code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
# -*- coding: utf8 -*-
"""
Run all test modules in current directory.
"""
import os, sys
import unittest
import doctest
import glob
import logging
from StringIO import StringIO
try:
import fms
except ImportError:
# runnning from source, not installed, add fms source path to system path
fmspath = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(fmspath)
import fms
import fms.core
from fms.utils.parsers import YamlParamsParser
def sourceList():
"""
Return list of all python modules except this one in current dir
"""
liste = []
for s in glob.glob("*.py"):
if s == 'runalltests.py':
continue
s = s.split('.')[0]
liste.append(s)
return liste
def expList():
"""
Return list of experiments conffiles in fixtures/experiments dir
"""
return glob.glob("fixtures/experiments/*.yml")
old_dir = os.getcwd()
try:
os.chdir(os.path.dirname(__file__))
except OSError:
pass
logger = fms.core.set_logger('info','fms-tests')
logger.info("Running unittests")
suite = unittest.TestSuite()
for modtestname in sourceList():
modtest = __import__(modtestname, globals(), locals(), [], -1)
if hasattr(modtest, 'suite'):
suite.addTest(modtest.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(modtest))
for root, dir, files in os.walk(os.path.dirname(fms.__file__)):
for f in files:
if os.path.splitext(f)[1] == '.py':
head, tail = os.path.split(root)
if tail == 'fms':
suite.addTest(doctest.DocFileSuite(f, package='fms',
optionflags=+doctest.ELLIPSIS))
else:
if 'contrib' in head:
h, t = os.path.split(head)
if t == 'contrib':
suite.addTest(doctest.DocFileSuite(
os.path.join(os.path.split(root)[1], f),
package='fms.contrib',
optionflags=+doctest.ELLIPSIS))
else:
suite.addTest(doctest.DocFileSuite(
os.path.join(os.path.split(root)[1], f),
package='fms.contrib.%s' % t,
optionflags=+doctest.ELLIPSIS))
else:
suite.addTest(doctest.DocFileSuite(
os.path.join(os.path.split(root)[1], f),
package='fms',
optionflags=+doctest.ELLIPSIS))
unittest.TextTestRunner(verbosity=2).run(suite)
for simconffile in expList():
logger.info("Running %s" % simconffile)
params = YamlParamsParser(simconffile)
params['show_books'] = False
params['timer'] = False
params.outputfile = StringIO()
(world, engineslist, agentslist) = fms.core.set_classes(params)
for e in engineslist:
e['instance'].run(world, agentslist, e['market']['instance'])
benchfile = "%s.csv" % simconffile.split('.')[0]
benchmark = open(benchfile).read()
testresult = params.outputfile.getvalue()
if not benchmark == testresult:
logger.error("%s failed" % simconffile)
print testresult
else:
logger.info("%s ok" % simconffile)
params.close_files(1)
agentslist[0].reset()
os.chdir(old_dir)
|
jcbagneris/fms
|
tests/runalltests.py
|
Python
|
bsd-3-clause
| 3,413
|
from configurations import values
class Email(object):
"""Email settings for SMTP."""
EMAIL_HOST = values.Value('localhost')
EMAIL_HOST_PASSWORD = values.SecretValue()
EMAIL_HOST_USER = values.Value('max@max-brauer.de')
EMAIL_PORT = values.IntegerValue(465)
EMAIL_USE_SSL = values.BooleanValue(True)
EMAIL_USE_TLS = values.BooleanValue(False)
class Mailgun(object):
"""Email settings for Mailgun."""
EMAIL_BACKEND = 'django_mailgun.MailgunBackend'
MAILGUN_ACCESS_KEY = values.SecretValue()
MAILGUN_SERVER_NAME = values.Value('mg.transcode.de')
|
DebVortex/max-brauer.de
|
maxbrauer/config/settings/email.py
|
Python
|
bsd-3-clause
| 601
|
from __future__ import absolute_import, division, print_function
from glue.core.data import Data, Component
from glue.config import data_factory
from glue.core.data_factories.helpers import has_extension
__all__ = ['is_npy', 'npy_reader', 'is_npz', 'npz_reader']
# TODO: implement support for regular arrays, e.g., not just structured arrays?
def is_npy(filename):
"""
The first bytes are: x93NUMPY
see: https://github.com/numpy/numpy/blob/master/doc/neps/npy-format.rst
"""
from numpy.lib.format import MAGIC_PREFIX
with open(filename, 'rb') as infile:
return infile.read(6) == MAGIC_PREFIX
@data_factory(label="Numpy save file", identifier=is_npy, priority=100)
def npy_reader(filename, format='auto', auto_merge=False, **kwargs):
"""
Read in a Numpy structured array saved to a .npy or .npz file.
Parameters
----------
source: str
The pathname to the Numpy save file.
"""
import numpy as np
npy_data = np.load(filename)
if not hasattr(npy_data.dtype, 'names'):
raise ValueError("Numpy save file loading currently only supports structured"
" arrays, e.g., with specified names.")
d = Data()
for name in npy_data.dtype.names:
comp = Component.autotyped(npy_data[name])
d.add_component(comp, label=name)
return d
def is_npz(filename):
"""
The first bytes are: x93NUMPY
see: https://github.com/numpy/numpy/blob/master/doc/neps/npy-format.rst
"""
tester = has_extension('npz .npz')
MAGIC_PREFIX = b'PK\x03\x04' # first 4 bytes for a zipfile
with open(filename, 'rb') as infile:
check = infile.read(4) == MAGIC_PREFIX
return check and tester(filename)
@data_factory(label="Numpy multiple array save file", identifier=is_npz, priority=100)
def npz_reader(filename, format='auto', auto_merge=False, **kwargs):
"""
Read in a Numpy structured array saved to a .npy or .npz file.
Parameters
----------
source: str
The pathname to the Numpy save file.
"""
import numpy as np
npy_data = np.load(filename)
groups = []
for groupname in sorted(npy_data.files):
d = Data(label=groupname)
arr = npy_data[groupname]
if not hasattr(arr.dtype, 'names'):
raise ValueError("Numpy save file loading currently only supports structured"
" arrays, e.g., with specified names.")
for name in arr.dtype.names:
comp = Component.autotyped(arr[name])
d.add_component(comp, label=name)
groups.append(d)
return groups
|
saimn/glue
|
glue/core/data_factories/npy.py
|
Python
|
bsd-3-clause
| 2,643
|
from datetime import timedelta
from django.core.validators import ValidationError
from django.test import SimpleTestCase
from django.utils import timezone
from glitter.publisher.validators import future_date
class TestFutureDateValidator(SimpleTestCase):
def test_valid_date(self):
next_week = timezone.now() + timedelta(weeks=1)
date_valid = future_date(next_week)
self.assertIsNone(date_valid)
def test_invalid_past_date(self):
last_week = timezone.now() - timedelta(weeks=1)
with self.assertRaises(ValidationError):
future_date(last_week)
|
developersociety/django-glitter
|
glitter/publisher/tests/test_validators.py
|
Python
|
bsd-3-clause
| 610
|
import numpy as np
import pytest
import pandas as pd
from pandas import (
Index,
MultiIndex,
date_range,
period_range,
)
import pandas._testing as tm
def test_shift(idx):
# GH8083 test the base class for shift
msg = "This method is only implemented for DatetimeIndex, PeriodIndex and "
"TimedeltaIndex; Got type MultiIndex"
with pytest.raises(NotImplementedError, match=msg):
idx.shift(1)
with pytest.raises(NotImplementedError, match=msg):
idx.shift(1, 2)
def test_groupby(idx):
groups = idx.groupby(np.array([1, 1, 1, 2, 2, 2]))
labels = idx.tolist()
exp = {1: labels[:3], 2: labels[3:]}
tm.assert_dict_equal(groups, exp)
# GH5620
groups = idx.groupby(idx)
exp = {key: [key] for key in idx}
tm.assert_dict_equal(groups, exp)
def test_truncate_multiindex():
# GH 34564 for MultiIndex level names check
major_axis = Index(list(range(4)))
minor_axis = Index(list(range(2)))
major_codes = np.array([0, 0, 1, 2, 3, 3])
minor_codes = np.array([0, 1, 0, 1, 0, 1])
index = MultiIndex(
levels=[major_axis, minor_axis],
codes=[major_codes, minor_codes],
names=["L1", "L2"],
)
result = index.truncate(before=1)
assert "foo" not in result.levels[0]
assert 1 in result.levels[0]
assert index.names == result.names
result = index.truncate(after=1)
assert 2 not in result.levels[0]
assert 1 in result.levels[0]
assert index.names == result.names
result = index.truncate(before=1, after=2)
assert len(result.levels[0]) == 2
assert index.names == result.names
msg = "after < before"
with pytest.raises(ValueError, match=msg):
index.truncate(3, 1)
# TODO: reshape
def test_reorder_levels(idx):
# this blows up
with pytest.raises(IndexError, match="^Too many levels"):
idx.reorder_levels([2, 1, 0])
def test_numpy_repeat():
reps = 2
numbers = [1, 2, 3]
names = np.array(["foo", "bar"])
m = MultiIndex.from_product([numbers, names], names=names)
expected = MultiIndex.from_product([numbers, names.repeat(reps)], names=names)
tm.assert_index_equal(np.repeat(m, reps), expected)
msg = "the 'axis' parameter is not supported"
with pytest.raises(ValueError, match=msg):
np.repeat(m, reps, axis=1)
def test_append_mixed_dtypes():
# GH 13660
dti = date_range("2011-01-01", freq="M", periods=3)
dti_tz = date_range("2011-01-01", freq="M", periods=3, tz="US/Eastern")
pi = period_range("2011-01", freq="M", periods=3)
mi = MultiIndex.from_arrays(
[[1, 2, 3], [1.1, np.nan, 3.3], ["a", "b", "c"], dti, dti_tz, pi]
)
assert mi.nlevels == 6
res = mi.append(mi)
exp = MultiIndex.from_arrays(
[
[1, 2, 3, 1, 2, 3],
[1.1, np.nan, 3.3, 1.1, np.nan, 3.3],
["a", "b", "c", "a", "b", "c"],
dti.append(dti),
dti_tz.append(dti_tz),
pi.append(pi),
]
)
tm.assert_index_equal(res, exp)
other = MultiIndex.from_arrays(
[
["x", "y", "z"],
["x", "y", "z"],
["x", "y", "z"],
["x", "y", "z"],
["x", "y", "z"],
["x", "y", "z"],
]
)
res = mi.append(other)
exp = MultiIndex.from_arrays(
[
[1, 2, 3, "x", "y", "z"],
[1.1, np.nan, 3.3, "x", "y", "z"],
["a", "b", "c", "x", "y", "z"],
dti.append(Index(["x", "y", "z"])),
dti_tz.append(Index(["x", "y", "z"])),
pi.append(Index(["x", "y", "z"])),
]
)
tm.assert_index_equal(res, exp)
def test_iter(idx):
result = list(idx)
expected = [
("foo", "one"),
("foo", "two"),
("bar", "one"),
("baz", "two"),
("qux", "one"),
("qux", "two"),
]
assert result == expected
def test_sub(idx):
first = idx
# - now raises (previously was set op difference)
msg = "cannot perform __sub__ with this index type: MultiIndex"
with pytest.raises(TypeError, match=msg):
first - idx[-3:]
with pytest.raises(TypeError, match=msg):
idx[-3:] - first
with pytest.raises(TypeError, match=msg):
idx[-3:] - first.tolist()
msg = "cannot perform __rsub__ with this index type: MultiIndex"
with pytest.raises(TypeError, match=msg):
first.tolist() - idx[-3:]
def test_map(idx):
# callable
index = idx
# we don't infer UInt64
if isinstance(index, pd.UInt64Index):
expected = index.astype("int64")
else:
expected = index
result = index.map(lambda x: x)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"mapper",
[
lambda values, idx: {i: e for e, i in zip(values, idx)},
lambda values, idx: pd.Series(values, idx),
],
)
def test_map_dictlike(idx, mapper):
if isinstance(idx, (pd.CategoricalIndex, pd.IntervalIndex)):
pytest.skip(f"skipping tests for {type(idx)}")
identity = mapper(idx.values, idx)
# we don't infer to UInt64 for a dict
if isinstance(idx, pd.UInt64Index) and isinstance(identity, dict):
expected = idx.astype("int64")
else:
expected = idx
result = idx.map(identity)
tm.assert_index_equal(result, expected)
# empty mappable
expected = Index([np.nan] * len(idx))
result = idx.map(mapper(expected, idx))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"func",
[
np.exp,
np.exp2,
np.expm1,
np.log,
np.log2,
np.log10,
np.log1p,
np.sqrt,
np.sin,
np.cos,
np.tan,
np.arcsin,
np.arccos,
np.arctan,
np.sinh,
np.cosh,
np.tanh,
np.arcsinh,
np.arccosh,
np.arctanh,
np.deg2rad,
np.rad2deg,
],
ids=lambda func: func.__name__,
)
def test_numpy_ufuncs(idx, func):
# test ufuncs of numpy. see:
# https://numpy.org/doc/stable/reference/ufuncs.html
expected_exception = TypeError
msg = (
"loop of ufunc does not support argument 0 of type tuple which "
f"has no callable {func.__name__} method"
)
with pytest.raises(expected_exception, match=msg):
func(idx)
@pytest.mark.parametrize(
"func",
[np.isfinite, np.isinf, np.isnan, np.signbit],
ids=lambda func: func.__name__,
)
def test_numpy_type_funcs(idx, func):
msg = (
f"ufunc '{func.__name__}' not supported for the input types, and the inputs "
"could not be safely coerced to any supported types according to "
"the casting rule ''safe''"
)
with pytest.raises(TypeError, match=msg):
func(idx)
|
datapythonista/pandas
|
pandas/tests/indexes/multi/test_analytics.py
|
Python
|
bsd-3-clause
| 6,843
|
from django.conf.urls import patterns, url
from views import google_get_state_token, google_login, google_logout
urlpatterns = patterns('',
url(r'^get-state-token/(?P<action_type_id>\d+)/(?P<action_id>\d+)/$', google_get_state_token, name='google_contacts_get_state_token'),
url(r'^login/$', google_login, name='google_contacts_login'),
url(r'^logout/$', google_logout, name='google_contacts_logout'),
)
|
agiliq/fundraiser
|
contacts/urls.py
|
Python
|
bsd-3-clause
| 424
|
#!/usr/bin/env python
# Copyright (c) 2011 Fergus Gallagher <fergus.gallagher@citeulike.org>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. All advertising materials mentioning features or use of this software
# must display the following acknowledgement:
# This product includes software developed by
# CiteULike <http://www.citeulike.org> and its
# contributors.
# 4. Neither the name of CiteULike nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY CITEULIKE.ORG AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import atypon
import socket
socket.setdefaulttimeout(15)
atypon.process("www.tandfonline.com", "TANDF")
|
OAButton/tricorder
|
plugins/python/tandf.py
|
Python
|
bsd-3-clause
| 1,935
|
"""
======================================================
Classification of text documents using sparse features
======================================================
This is an example showing how scikit-learn can be used to classify documents
by topics using a bag-of-words approach. This example uses a scipy.sparse
matrix to store the features and demonstrates various classifiers that can
efficiently handle sparse matrices.
The dataset used in this example is the 20 newsgroups dataset. It will be
automatically downloaded, then cached.
The bar plot indicates the accuracy, training time (normalized) and test time
(normalized) of each classifier.
"""
# Author: Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
# Lars Buitinck <L.J.Buitinck@uva.nl>
# License: BSD 3 clause
from __future__ import print_function
import logging
import numpy as np
from optparse import OptionParser
import sys
from time import time
import matplotlib.pyplot as plt
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_selection import SelectKBest, chi2
from sklearn.linear_model import RidgeClassifier
from sklearn.svm import LinearSVC
from sklearn.linear_model import SGDClassifier
from sklearn.linear_model import Perceptron
from sklearn.linear_model import PassiveAggressiveClassifier
from sklearn.naive_bayes import BernoulliNB, MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestCentroid
from sklearn.ensemble import RandomForestClassifier
from sklearn.utils.extmath import density
from sklearn import metrics
# Display progress logs on stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
# parse commandline arguments
op = OptionParser()
op.add_option("--report",
action="store_true", dest="print_report",
help="Print a detailed classification report.")
op.add_option("--chi2_select",
action="store", type="int", dest="select_chi2",
help="Select some number of features using a chi-squared test")
op.add_option("--confusion_matrix",
action="store_true", dest="print_cm",
help="Print the confusion matrix.")
op.add_option("--top10",
action="store_true", dest="print_top10",
help="Print ten most discriminative terms per class"
" for every classifier.")
op.add_option("--all_categories",
action="store_true", dest="all_categories",
help="Whether to use all categories or not.")
op.add_option("--use_hashing",
action="store_true",
help="Use a hashing vectorizer.")
op.add_option("--n_features",
action="store", type=int, default=2 ** 16,
help="n_features when using the hashing vectorizer.")
op.add_option("--filtered",
action="store_true",
help="Remove newsgroup information that is easily overfit: "
"headers, signatures, and quoting.")
(opts, args) = op.parse_args()
if len(args) > 0:
op.error("this script takes no arguments.")
sys.exit(1)
print(__doc__)
op.print_help()
print()
###############################################################################
# Load some categories from the training set
if opts.all_categories:
categories = None
else:
categories = [
'alt.atheism',
'talk.religion.misc',
'comp.graphics',
'sci.space',
]
if opts.filtered:
remove = ('headers', 'footers', 'quotes')
else:
remove = ()
print("Loading 20 newsgroups dataset for categories:")
print(categories if categories else "all")
data_train = fetch_20newsgroups(subset='train', categories=categories,
shuffle=True, random_state=42,
remove=remove)
data_test = fetch_20newsgroups(subset='test', categories=categories,
shuffle=True, random_state=42,
remove=remove)
print('data loaded')
categories = data_train.target_names # for case categories == None
def size_mb(docs):
return sum(len(s.encode('utf-8')) for s in docs) / 1e6
data_train_size_mb = size_mb(data_train.data)
data_test_size_mb = size_mb(data_test.data)
print("%d documents - %0.3fMB (training set)" % (
len(data_train.data), data_train_size_mb))
print("%d documents - %0.3fMB (test set)" % (
len(data_test.data), data_test_size_mb))
print("%d categories" % len(categories))
print()
# split a training set and a test set
y_train, y_test = data_train.target, data_test.target
print("Extracting features from the training dataset using a sparse vectorizer")
t0 = time()
if opts.use_hashing:
vectorizer = HashingVectorizer(stop_words='english', non_negative=True,
n_features=opts.n_features)
X_train = vectorizer.transform(data_train.data)
else:
vectorizer = TfidfVectorizer(sublinear_tf=True, max_df=0.5,
stop_words='english')
X_train = vectorizer.fit_transform(data_train.data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_train_size_mb / duration))
print("n_samples: %d, n_features: %d" % X_train.shape)
print()
print("Extracting features from the test dataset using the same vectorizer")
t0 = time()
X_test = vectorizer.transform(data_test.data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_test_size_mb / duration))
print("n_samples: %d, n_features: %d" % X_test.shape)
print()
# mapping from integer feature name to original token string
if opts.use_hashing:
feature_names = None
else:
feature_names = vectorizer.get_feature_names()
if opts.select_chi2:
print("Extracting %d best features by a chi-squared test" %
opts.select_chi2)
t0 = time()
ch2 = SelectKBest(chi2, k=opts.select_chi2)
X_train = ch2.fit_transform(X_train, y_train)
X_test = ch2.transform(X_test)
if feature_names:
# keep selected feature names
feature_names = [feature_names[i] for i
in ch2.get_support(indices=True)]
print("done in %fs" % (time() - t0))
print()
if feature_names:
feature_names = np.asarray(feature_names)
def trim(s):
"""Trim string to fit on terminal (assuming 80-column display)"""
return s if len(s) <= 80 else s[:77] + "..."
###############################################################################
# Benchmark classifiers
def benchmark(clf):
print('_' * 80)
print("Training: ")
print(clf)
t0 = time()
clf.fit(X_train, y_train)
train_time = time() - t0
print("train time: %0.3fs" % train_time)
t0 = time()
pred = clf.predict(X_test)
test_time = time() - t0
print("test time: %0.3fs" % test_time)
score = metrics.f1_score(y_test, pred)
print("f1-score: %0.3f" % score)
if hasattr(clf, 'coef_'):
print("dimensionality: %d" % clf.coef_.shape[1])
print("density: %f" % density(clf.coef_))
if opts.print_top10 and feature_names is not None:
print("top 10 keywords per class:")
for i, category in enumerate(categories):
top10 = np.argsort(clf.coef_[i])[-10:]
print(trim("%s: %s"
% (category, " ".join(feature_names[top10]))))
print()
if opts.print_report:
print("classification report:")
print(metrics.classification_report(y_test, pred,
target_names=categories))
if opts.print_cm:
print("confusion matrix:")
print(metrics.confusion_matrix(y_test, pred))
print()
clf_descr = str(clf).split('(')[0]
return clf_descr, score, train_time, test_time
results = []
for clf, name in (
(RidgeClassifier(tol=1e-2, solver="lsqr"), "Ridge Classifier"),
(Perceptron(n_iter=50), "Perceptron"),
(PassiveAggressiveClassifier(n_iter=50), "Passive-Aggressive"),
(KNeighborsClassifier(n_neighbors=10), "kNN"),
(RandomForestClassifier(n_estimators=100), "Random forest")):
print('=' * 80)
print(name)
results.append(benchmark(clf))
for penalty in ["l2", "l1"]:
print('=' * 80)
print("%s penalty" % penalty.upper())
# Train Liblinear model
results.append(benchmark(LinearSVC(loss='l2', penalty=penalty,
dual=False, tol=1e-3)))
# Train SGD model
results.append(benchmark(SGDClassifier(alpha=.0001, n_iter=50,
penalty=penalty)))
# Train SGD with Elastic Net penalty
print('=' * 80)
print("Elastic-Net penalty")
results.append(benchmark(SGDClassifier(alpha=.0001, n_iter=50,
penalty="elasticnet")))
# Train NearestCentroid without threshold
print('=' * 80)
print("NearestCentroid (aka Rocchio classifier)")
results.append(benchmark(NearestCentroid()))
# Train sparse Naive Bayes classifiers
print('=' * 80)
print("Naive Bayes")
results.append(benchmark(MultinomialNB(alpha=.01)))
results.append(benchmark(BernoulliNB(alpha=.01)))
class L1LinearSVC(LinearSVC):
def fit(self, X, y):
# The smaller C, the stronger the regularization.
# The more regularization, the more sparsity.
self.transformer_ = LinearSVC(penalty="l1",
dual=False, tol=1e-3)
X = self.transformer_.fit_transform(X, y)
return LinearSVC.fit(self, X, y)
def predict(self, X):
X = self.transformer_.transform(X)
return LinearSVC.predict(self, X)
print('=' * 80)
print("LinearSVC with L1-based feature selection")
results.append(benchmark(L1LinearSVC()))
# make some plots
indices = np.arange(len(results))
results = [[x[i] for x in results] for i in range(4)]
clf_names, score, training_time, test_time = results
training_time = np.array(training_time) / np.max(training_time)
test_time = np.array(test_time) / np.max(test_time)
plt.figure(figsize=(12, 8))
plt.title("Score")
plt.barh(indices, score, .2, label="score", color='r')
plt.barh(indices + .3, training_time, .2, label="training time", color='g')
plt.barh(indices + .6, test_time, .2, label="test time", color='b')
plt.yticks(())
plt.legend(loc='best')
plt.subplots_adjust(left=.25)
plt.subplots_adjust(top=.95)
plt.subplots_adjust(bottom=.05)
for i, c in zip(indices, clf_names):
plt.text(-.3, i, c)
plt.show()
|
hitszxp/scikit-learn
|
examples/text/document_classification_20newsgroups.py
|
Python
|
bsd-3-clause
| 10,746
|
"""
Functions to operate on polynomials.
"""
__all__ = ['poly', 'roots', 'polyint', 'polyder', 'polyadd',
'polysub', 'polymul', 'polydiv', 'polyval', 'poly1d',
'polyfit', 'RankWarning']
import functools
import re
import warnings
import numpy.core.numeric as NX
from numpy.core import (isscalar, abs, finfo, atleast_1d, hstack, dot, array,
ones)
from numpy.core import overrides
from numpy.core.overrides import set_module
from numpy.lib.twodim_base import diag, vander
from numpy.lib.function_base import trim_zeros
from numpy.lib.type_check import iscomplex, real, imag, mintypecode
from numpy.linalg import eigvals, lstsq, inv
array_function_dispatch = functools.partial(
overrides.array_function_dispatch, module='numpy')
@set_module('numpy')
class RankWarning(UserWarning):
"""
Issued by `polyfit` when the Vandermonde matrix is rank deficient.
For more information, a way to suppress the warning, and an example of
`RankWarning` being issued, see `polyfit`.
"""
pass
def _poly_dispatcher(seq_of_zeros):
return seq_of_zeros
@array_function_dispatch(_poly_dispatcher)
def poly(seq_of_zeros):
"""
Find the coefficients of a polynomial with the given sequence of roots.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
Returns the coefficients of the polynomial whose leading coefficient
is one for the given sequence of zeros (multiple roots must be included
in the sequence as many times as their multiplicity; see Examples).
A square matrix (or array, which will be treated as a matrix) can also
be given, in which case the coefficients of the characteristic polynomial
of the matrix are returned.
Parameters
----------
seq_of_zeros : array_like, shape (N,) or (N, N)
A sequence of polynomial roots, or a square array or matrix object.
Returns
-------
c : ndarray
1D array of polynomial coefficients from highest to lowest degree:
``c[0] * x**(N) + c[1] * x**(N-1) + ... + c[N-1] * x + c[N]``
where c[0] always equals 1.
Raises
------
ValueError
If input is the wrong shape (the input must be a 1-D or square
2-D array).
See Also
--------
polyval : Compute polynomial values.
roots : Return the roots of a polynomial.
polyfit : Least squares polynomial fit.
poly1d : A one-dimensional polynomial class.
Notes
-----
Specifying the roots of a polynomial still leaves one degree of
freedom, typically represented by an undetermined leading
coefficient. [1]_ In the case of this function, that coefficient -
the first one in the returned array - is always taken as one. (If
for some reason you have one other point, the only automatic way
presently to leverage that information is to use ``polyfit``.)
The characteristic polynomial, :math:`p_a(t)`, of an `n`-by-`n`
matrix **A** is given by
:math:`p_a(t) = \\mathrm{det}(t\\, \\mathbf{I} - \\mathbf{A})`,
where **I** is the `n`-by-`n` identity matrix. [2]_
References
----------
.. [1] M. Sullivan and M. Sullivan, III, "Algebra and Trignometry,
Enhanced With Graphing Utilities," Prentice-Hall, pg. 318, 1996.
.. [2] G. Strang, "Linear Algebra and Its Applications, 2nd Edition,"
Academic Press, pg. 182, 1980.
Examples
--------
Given a sequence of a polynomial's zeros:
>>> np.poly((0, 0, 0)) # Multiple root example
array([1., 0., 0., 0.])
The line above represents z**3 + 0*z**2 + 0*z + 0.
>>> np.poly((-1./2, 0, 1./2))
array([ 1. , 0. , -0.25, 0. ])
The line above represents z**3 - z/4
>>> np.poly((np.random.random(1)[0], 0, np.random.random(1)[0]))
array([ 1. , -0.77086955, 0.08618131, 0. ]) # random
Given a square array object:
>>> P = np.array([[0, 1./3], [-1./2, 0]])
>>> np.poly(P)
array([1. , 0. , 0.16666667])
Note how in all cases the leading coefficient is always 1.
"""
seq_of_zeros = atleast_1d(seq_of_zeros)
sh = seq_of_zeros.shape
if len(sh) == 2 and sh[0] == sh[1] and sh[0] != 0:
seq_of_zeros = eigvals(seq_of_zeros)
elif len(sh) == 1:
dt = seq_of_zeros.dtype
# Let object arrays slip through, e.g. for arbitrary precision
if dt != object:
seq_of_zeros = seq_of_zeros.astype(mintypecode(dt.char))
else:
raise ValueError("input must be 1d or non-empty square 2d array.")
if len(seq_of_zeros) == 0:
return 1.0
dt = seq_of_zeros.dtype
a = ones((1,), dtype=dt)
for k in range(len(seq_of_zeros)):
a = NX.convolve(a, array([1, -seq_of_zeros[k]], dtype=dt),
mode='full')
if issubclass(a.dtype.type, NX.complexfloating):
# if complex roots are all complex conjugates, the roots are real.
roots = NX.asarray(seq_of_zeros, complex)
if NX.all(NX.sort(roots) == NX.sort(roots.conjugate())):
a = a.real.copy()
return a
def _roots_dispatcher(p):
return p
@array_function_dispatch(_roots_dispatcher)
def roots(p):
"""
Return the roots of a polynomial with coefficients given in p.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
The values in the rank-1 array `p` are coefficients of a polynomial.
If the length of `p` is n+1 then the polynomial is described by::
p[0] * x**n + p[1] * x**(n-1) + ... + p[n-1]*x + p[n]
Parameters
----------
p : array_like
Rank-1 array of polynomial coefficients.
Returns
-------
out : ndarray
An array containing the roots of the polynomial.
Raises
------
ValueError
When `p` cannot be converted to a rank-1 array.
See also
--------
poly : Find the coefficients of a polynomial with a given sequence
of roots.
polyval : Compute polynomial values.
polyfit : Least squares polynomial fit.
poly1d : A one-dimensional polynomial class.
Notes
-----
The algorithm relies on computing the eigenvalues of the
companion matrix [1]_.
References
----------
.. [1] R. A. Horn & C. R. Johnson, *Matrix Analysis*. Cambridge, UK:
Cambridge University Press, 1999, pp. 146-7.
Examples
--------
>>> coeff = [3.2, 2, 1]
>>> np.roots(coeff)
array([-0.3125+0.46351241j, -0.3125-0.46351241j])
"""
# If input is scalar, this makes it an array
p = atleast_1d(p)
if p.ndim != 1:
raise ValueError("Input must be a rank-1 array.")
# find non-zero array entries
non_zero = NX.nonzero(NX.ravel(p))[0]
# Return an empty array if polynomial is all zeros
if len(non_zero) == 0:
return NX.array([])
# find the number of trailing zeros -- this is the number of roots at 0.
trailing_zeros = len(p) - non_zero[-1] - 1
# strip leading and trailing zeros
p = p[int(non_zero[0]):int(non_zero[-1])+1]
# casting: if incoming array isn't floating point, make it floating point.
if not issubclass(p.dtype.type, (NX.floating, NX.complexfloating)):
p = p.astype(float)
N = len(p)
if N > 1:
# build companion matrix and find its eigenvalues (the roots)
A = diag(NX.ones((N-2,), p.dtype), -1)
A[0,:] = -p[1:] / p[0]
roots = eigvals(A)
else:
roots = NX.array([])
# tack any zeros onto the back of the array
roots = hstack((roots, NX.zeros(trailing_zeros, roots.dtype)))
return roots
def _polyint_dispatcher(p, m=None, k=None):
return (p,)
@array_function_dispatch(_polyint_dispatcher)
def polyint(p, m=1, k=None):
"""
Return an antiderivative (indefinite integral) of a polynomial.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
The returned order `m` antiderivative `P` of polynomial `p` satisfies
:math:`\\frac{d^m}{dx^m}P(x) = p(x)` and is defined up to `m - 1`
integration constants `k`. The constants determine the low-order
polynomial part
.. math:: \\frac{k_{m-1}}{0!} x^0 + \\ldots + \\frac{k_0}{(m-1)!}x^{m-1}
of `P` so that :math:`P^{(j)}(0) = k_{m-j-1}`.
Parameters
----------
p : array_like or poly1d
Polynomial to integrate.
A sequence is interpreted as polynomial coefficients, see `poly1d`.
m : int, optional
Order of the antiderivative. (Default: 1)
k : list of `m` scalars or scalar, optional
Integration constants. They are given in the order of integration:
those corresponding to highest-order terms come first.
If ``None`` (default), all constants are assumed to be zero.
If `m = 1`, a single scalar can be given instead of a list.
See Also
--------
polyder : derivative of a polynomial
poly1d.integ : equivalent method
Examples
--------
The defining property of the antiderivative:
>>> p = np.poly1d([1,1,1])
>>> P = np.polyint(p)
>>> P
poly1d([ 0.33333333, 0.5 , 1. , 0. ]) # may vary
>>> np.polyder(P) == p
True
The integration constants default to zero, but can be specified:
>>> P = np.polyint(p, 3)
>>> P(0)
0.0
>>> np.polyder(P)(0)
0.0
>>> np.polyder(P, 2)(0)
0.0
>>> P = np.polyint(p, 3, k=[6,5,3])
>>> P
poly1d([ 0.01666667, 0.04166667, 0.16666667, 3. , 5. , 3. ]) # may vary
Note that 3 = 6 / 2!, and that the constants are given in the order of
integrations. Constant of the highest-order polynomial term comes first:
>>> np.polyder(P, 2)(0)
6.0
>>> np.polyder(P, 1)(0)
5.0
>>> P(0)
3.0
"""
m = int(m)
if m < 0:
raise ValueError("Order of integral must be positive (see polyder)")
if k is None:
k = NX.zeros(m, float)
k = atleast_1d(k)
if len(k) == 1 and m > 1:
k = k[0]*NX.ones(m, float)
if len(k) < m:
raise ValueError(
"k must be a scalar or a rank-1 array of length 1 or >m.")
truepoly = isinstance(p, poly1d)
p = NX.asarray(p)
if m == 0:
if truepoly:
return poly1d(p)
return p
else:
# Note: this must work also with object and integer arrays
y = NX.concatenate((p.__truediv__(NX.arange(len(p), 0, -1)), [k[0]]))
val = polyint(y, m - 1, k=k[1:])
if truepoly:
return poly1d(val)
return val
def _polyder_dispatcher(p, m=None):
return (p,)
@array_function_dispatch(_polyder_dispatcher)
def polyder(p, m=1):
"""
Return the derivative of the specified order of a polynomial.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
Parameters
----------
p : poly1d or sequence
Polynomial to differentiate.
A sequence is interpreted as polynomial coefficients, see `poly1d`.
m : int, optional
Order of differentiation (default: 1)
Returns
-------
der : poly1d
A new polynomial representing the derivative.
See Also
--------
polyint : Anti-derivative of a polynomial.
poly1d : Class for one-dimensional polynomials.
Examples
--------
The derivative of the polynomial :math:`x^3 + x^2 + x^1 + 1` is:
>>> p = np.poly1d([1,1,1,1])
>>> p2 = np.polyder(p)
>>> p2
poly1d([3, 2, 1])
which evaluates to:
>>> p2(2.)
17.0
We can verify this, approximating the derivative with
``(f(x + h) - f(x))/h``:
>>> (p(2. + 0.001) - p(2.)) / 0.001
17.007000999997857
The fourth-order derivative of a 3rd-order polynomial is zero:
>>> np.polyder(p, 2)
poly1d([6, 2])
>>> np.polyder(p, 3)
poly1d([6])
>>> np.polyder(p, 4)
poly1d([0])
"""
m = int(m)
if m < 0:
raise ValueError("Order of derivative must be positive (see polyint)")
truepoly = isinstance(p, poly1d)
p = NX.asarray(p)
n = len(p) - 1
y = p[:-1] * NX.arange(n, 0, -1)
if m == 0:
val = p
else:
val = polyder(y, m - 1)
if truepoly:
val = poly1d(val)
return val
def _polyfit_dispatcher(x, y, deg, rcond=None, full=None, w=None, cov=None):
return (x, y, w)
@array_function_dispatch(_polyfit_dispatcher)
def polyfit(x, y, deg, rcond=None, full=False, w=None, cov=False):
"""
Least squares polynomial fit.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
Fit a polynomial ``p(x) = p[0] * x**deg + ... + p[deg]`` of degree `deg`
to points `(x, y)`. Returns a vector of coefficients `p` that minimises
the squared error in the order `deg`, `deg-1`, ... `0`.
The `Polynomial.fit <numpy.polynomial.polynomial.Polynomial.fit>` class
method is recommended for new code as it is more stable numerically. See
the documentation of the method for more information.
Parameters
----------
x : array_like, shape (M,)
x-coordinates of the M sample points ``(x[i], y[i])``.
y : array_like, shape (M,) or (M, K)
y-coordinates of the sample points. Several data sets of sample
points sharing the same x-coordinates can be fitted at once by
passing in a 2D-array that contains one dataset per column.
deg : int
Degree of the fitting polynomial
rcond : float, optional
Relative condition number of the fit. Singular values smaller than
this relative to the largest singular value will be ignored. The
default value is len(x)*eps, where eps is the relative precision of
the float type, about 2e-16 in most cases.
full : bool, optional
Switch determining nature of return value. When it is False (the
default) just the coefficients are returned, when True diagnostic
information from the singular value decomposition is also returned.
w : array_like, shape (M,), optional
Weights to apply to the y-coordinates of the sample points. For
gaussian uncertainties, use 1/sigma (not 1/sigma**2).
cov : bool or str, optional
If given and not `False`, return not just the estimate but also its
covariance matrix. By default, the covariance are scaled by
chi2/dof, where dof = M - (deg + 1), i.e., the weights are presumed
to be unreliable except in a relative sense and everything is scaled
such that the reduced chi2 is unity. This scaling is omitted if
``cov='unscaled'``, as is relevant for the case that the weights are
1/sigma**2, with sigma known to be a reliable estimate of the
uncertainty.
Returns
-------
p : ndarray, shape (deg + 1,) or (deg + 1, K)
Polynomial coefficients, highest power first. If `y` was 2-D, the
coefficients for `k`-th data set are in ``p[:,k]``.
residuals, rank, singular_values, rcond
Present only if `full` = True. Residuals is sum of squared residuals
of the least-squares fit, the effective rank of the scaled Vandermonde
coefficient matrix, its singular values, and the specified value of
`rcond`. For more details, see `linalg.lstsq`.
V : ndarray, shape (M,M) or (M,M,K)
Present only if `full` = False and `cov`=True. The covariance
matrix of the polynomial coefficient estimates. The diagonal of
this matrix are the variance estimates for each coefficient. If y
is a 2-D array, then the covariance matrix for the `k`-th data set
are in ``V[:,:,k]``
Warns
-----
RankWarning
The rank of the coefficient matrix in the least-squares fit is
deficient. The warning is only raised if `full` = False.
The warnings can be turned off by
>>> import warnings
>>> warnings.simplefilter('ignore', np.RankWarning)
See Also
--------
polyval : Compute polynomial values.
linalg.lstsq : Computes a least-squares fit.
scipy.interpolate.UnivariateSpline : Computes spline fits.
Notes
-----
The solution minimizes the squared error
.. math ::
E = \\sum_{j=0}^k |p(x_j) - y_j|^2
in the equations::
x[0]**n * p[0] + ... + x[0] * p[n-1] + p[n] = y[0]
x[1]**n * p[0] + ... + x[1] * p[n-1] + p[n] = y[1]
...
x[k]**n * p[0] + ... + x[k] * p[n-1] + p[n] = y[k]
The coefficient matrix of the coefficients `p` is a Vandermonde matrix.
`polyfit` issues a `RankWarning` when the least-squares fit is badly
conditioned. This implies that the best fit is not well-defined due
to numerical error. The results may be improved by lowering the polynomial
degree or by replacing `x` by `x` - `x`.mean(). The `rcond` parameter
can also be set to a value smaller than its default, but the resulting
fit may be spurious: including contributions from the small singular
values can add numerical noise to the result.
Note that fitting polynomial coefficients is inherently badly conditioned
when the degree of the polynomial is large or the interval of sample points
is badly centered. The quality of the fit should always be checked in these
cases. When polynomial fits are not satisfactory, splines may be a good
alternative.
References
----------
.. [1] Wikipedia, "Curve fitting",
https://en.wikipedia.org/wiki/Curve_fitting
.. [2] Wikipedia, "Polynomial interpolation",
https://en.wikipedia.org/wiki/Polynomial_interpolation
Examples
--------
>>> import warnings
>>> x = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0])
>>> y = np.array([0.0, 0.8, 0.9, 0.1, -0.8, -1.0])
>>> z = np.polyfit(x, y, 3)
>>> z
array([ 0.08703704, -0.81349206, 1.69312169, -0.03968254]) # may vary
It is convenient to use `poly1d` objects for dealing with polynomials:
>>> p = np.poly1d(z)
>>> p(0.5)
0.6143849206349179 # may vary
>>> p(3.5)
-0.34732142857143039 # may vary
>>> p(10)
22.579365079365115 # may vary
High-order polynomials may oscillate wildly:
>>> with warnings.catch_warnings():
... warnings.simplefilter('ignore', np.RankWarning)
... p30 = np.poly1d(np.polyfit(x, y, 30))
...
>>> p30(4)
-0.80000000000000204 # may vary
>>> p30(5)
-0.99999999999999445 # may vary
>>> p30(4.5)
-0.10547061179440398 # may vary
Illustration:
>>> import matplotlib.pyplot as plt
>>> xp = np.linspace(-2, 6, 100)
>>> _ = plt.plot(x, y, '.', xp, p(xp), '-', xp, p30(xp), '--')
>>> plt.ylim(-2,2)
(-2, 2)
>>> plt.show()
"""
order = int(deg) + 1
x = NX.asarray(x) + 0.0
y = NX.asarray(y) + 0.0
# check arguments.
if deg < 0:
raise ValueError("expected deg >= 0")
if x.ndim != 1:
raise TypeError("expected 1D vector for x")
if x.size == 0:
raise TypeError("expected non-empty vector for x")
if y.ndim < 1 or y.ndim > 2:
raise TypeError("expected 1D or 2D array for y")
if x.shape[0] != y.shape[0]:
raise TypeError("expected x and y to have same length")
# set rcond
if rcond is None:
rcond = len(x)*finfo(x.dtype).eps
# set up least squares equation for powers of x
lhs = vander(x, order)
rhs = y
# apply weighting
if w is not None:
w = NX.asarray(w) + 0.0
if w.ndim != 1:
raise TypeError("expected a 1-d array for weights")
if w.shape[0] != y.shape[0]:
raise TypeError("expected w and y to have the same length")
lhs *= w[:, NX.newaxis]
if rhs.ndim == 2:
rhs *= w[:, NX.newaxis]
else:
rhs *= w
# scale lhs to improve condition number and solve
scale = NX.sqrt((lhs*lhs).sum(axis=0))
lhs /= scale
c, resids, rank, s = lstsq(lhs, rhs, rcond)
c = (c.T/scale).T # broadcast scale coefficients
# warn on rank reduction, which indicates an ill conditioned matrix
if rank != order and not full:
msg = "Polyfit may be poorly conditioned"
warnings.warn(msg, RankWarning, stacklevel=4)
if full:
return c, resids, rank, s, rcond
elif cov:
Vbase = inv(dot(lhs.T, lhs))
Vbase /= NX.outer(scale, scale)
if cov == "unscaled":
fac = 1
else:
if len(x) <= order:
raise ValueError("the number of data points must exceed order "
"to scale the covariance matrix")
# note, this used to be: fac = resids / (len(x) - order - 2.0)
# it was deciced that the "- 2" (originally justified by "Bayesian
# uncertainty analysis") is not was the user expects
# (see gh-11196 and gh-11197)
fac = resids / (len(x) - order)
if y.ndim == 1:
return c, Vbase * fac
else:
return c, Vbase[:,:, NX.newaxis] * fac
else:
return c
def _polyval_dispatcher(p, x):
return (p, x)
@array_function_dispatch(_polyval_dispatcher)
def polyval(p, x):
"""
Evaluate a polynomial at specific values.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
If `p` is of length N, this function returns the value:
``p[0]*x**(N-1) + p[1]*x**(N-2) + ... + p[N-2]*x + p[N-1]``
If `x` is a sequence, then ``p(x)`` is returned for each element of ``x``.
If `x` is another polynomial then the composite polynomial ``p(x(t))``
is returned.
Parameters
----------
p : array_like or poly1d object
1D array of polynomial coefficients (including coefficients equal
to zero) from highest degree to the constant term, or an
instance of poly1d.
x : array_like or poly1d object
A number, an array of numbers, or an instance of poly1d, at
which to evaluate `p`.
Returns
-------
values : ndarray or poly1d
If `x` is a poly1d instance, the result is the composition of the two
polynomials, i.e., `x` is "substituted" in `p` and the simplified
result is returned. In addition, the type of `x` - array_like or
poly1d - governs the type of the output: `x` array_like => `values`
array_like, `x` a poly1d object => `values` is also.
See Also
--------
poly1d: A polynomial class.
Notes
-----
Horner's scheme [1]_ is used to evaluate the polynomial. Even so,
for polynomials of high degree the values may be inaccurate due to
rounding errors. Use carefully.
If `x` is a subtype of `ndarray` the return value will be of the same type.
References
----------
.. [1] I. N. Bronshtein, K. A. Semendyayev, and K. A. Hirsch (Eng.
trans. Ed.), *Handbook of Mathematics*, New York, Van Nostrand
Reinhold Co., 1985, pg. 720.
Examples
--------
>>> np.polyval([3,0,1], 5) # 3 * 5**2 + 0 * 5**1 + 1
76
>>> np.polyval([3,0,1], np.poly1d(5))
poly1d([76])
>>> np.polyval(np.poly1d([3,0,1]), 5)
76
>>> np.polyval(np.poly1d([3,0,1]), np.poly1d(5))
poly1d([76])
"""
p = NX.asarray(p)
if isinstance(x, poly1d):
y = 0
else:
x = NX.asanyarray(x)
y = NX.zeros_like(x)
for i in range(len(p)):
y = y * x + p[i]
return y
def _binary_op_dispatcher(a1, a2):
return (a1, a2)
@array_function_dispatch(_binary_op_dispatcher)
def polyadd(a1, a2):
"""
Find the sum of two polynomials.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
Returns the polynomial resulting from the sum of two input polynomials.
Each input must be either a poly1d object or a 1D sequence of polynomial
coefficients, from highest to lowest degree.
Parameters
----------
a1, a2 : array_like or poly1d object
Input polynomials.
Returns
-------
out : ndarray or poly1d object
The sum of the inputs. If either input is a poly1d object, then the
output is also a poly1d object. Otherwise, it is a 1D array of
polynomial coefficients from highest to lowest degree.
See Also
--------
poly1d : A one-dimensional polynomial class.
poly, polyadd, polyder, polydiv, polyfit, polyint, polysub, polyval
Examples
--------
>>> np.polyadd([1, 2], [9, 5, 4])
array([9, 6, 6])
Using poly1d objects:
>>> p1 = np.poly1d([1, 2])
>>> p2 = np.poly1d([9, 5, 4])
>>> print(p1)
1 x + 2
>>> print(p2)
2
9 x + 5 x + 4
>>> print(np.polyadd(p1, p2))
2
9 x + 6 x + 6
"""
truepoly = (isinstance(a1, poly1d) or isinstance(a2, poly1d))
a1 = atleast_1d(a1)
a2 = atleast_1d(a2)
diff = len(a2) - len(a1)
if diff == 0:
val = a1 + a2
elif diff > 0:
zr = NX.zeros(diff, a1.dtype)
val = NX.concatenate((zr, a1)) + a2
else:
zr = NX.zeros(abs(diff), a2.dtype)
val = a1 + NX.concatenate((zr, a2))
if truepoly:
val = poly1d(val)
return val
@array_function_dispatch(_binary_op_dispatcher)
def polysub(a1, a2):
"""
Difference (subtraction) of two polynomials.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
Given two polynomials `a1` and `a2`, returns ``a1 - a2``.
`a1` and `a2` can be either array_like sequences of the polynomials'
coefficients (including coefficients equal to zero), or `poly1d` objects.
Parameters
----------
a1, a2 : array_like or poly1d
Minuend and subtrahend polynomials, respectively.
Returns
-------
out : ndarray or poly1d
Array or `poly1d` object of the difference polynomial's coefficients.
See Also
--------
polyval, polydiv, polymul, polyadd
Examples
--------
.. math:: (2 x^2 + 10 x - 2) - (3 x^2 + 10 x -4) = (-x^2 + 2)
>>> np.polysub([2, 10, -2], [3, 10, -4])
array([-1, 0, 2])
"""
truepoly = (isinstance(a1, poly1d) or isinstance(a2, poly1d))
a1 = atleast_1d(a1)
a2 = atleast_1d(a2)
diff = len(a2) - len(a1)
if diff == 0:
val = a1 - a2
elif diff > 0:
zr = NX.zeros(diff, a1.dtype)
val = NX.concatenate((zr, a1)) - a2
else:
zr = NX.zeros(abs(diff), a2.dtype)
val = a1 - NX.concatenate((zr, a2))
if truepoly:
val = poly1d(val)
return val
@array_function_dispatch(_binary_op_dispatcher)
def polymul(a1, a2):
"""
Find the product of two polynomials.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
Finds the polynomial resulting from the multiplication of the two input
polynomials. Each input must be either a poly1d object or a 1D sequence
of polynomial coefficients, from highest to lowest degree.
Parameters
----------
a1, a2 : array_like or poly1d object
Input polynomials.
Returns
-------
out : ndarray or poly1d object
The polynomial resulting from the multiplication of the inputs. If
either inputs is a poly1d object, then the output is also a poly1d
object. Otherwise, it is a 1D array of polynomial coefficients from
highest to lowest degree.
See Also
--------
poly1d : A one-dimensional polynomial class.
poly, polyadd, polyder, polydiv, polyfit, polyint, polysub, polyval
convolve : Array convolution. Same output as polymul, but has parameter
for overlap mode.
Examples
--------
>>> np.polymul([1, 2, 3], [9, 5, 1])
array([ 9, 23, 38, 17, 3])
Using poly1d objects:
>>> p1 = np.poly1d([1, 2, 3])
>>> p2 = np.poly1d([9, 5, 1])
>>> print(p1)
2
1 x + 2 x + 3
>>> print(p2)
2
9 x + 5 x + 1
>>> print(np.polymul(p1, p2))
4 3 2
9 x + 23 x + 38 x + 17 x + 3
"""
truepoly = (isinstance(a1, poly1d) or isinstance(a2, poly1d))
a1, a2 = poly1d(a1), poly1d(a2)
val = NX.convolve(a1, a2)
if truepoly:
val = poly1d(val)
return val
def _polydiv_dispatcher(u, v):
return (u, v)
@array_function_dispatch(_polydiv_dispatcher)
def polydiv(u, v):
"""
Returns the quotient and remainder of polynomial division.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
The input arrays are the coefficients (including any coefficients
equal to zero) of the "numerator" (dividend) and "denominator"
(divisor) polynomials, respectively.
Parameters
----------
u : array_like or poly1d
Dividend polynomial's coefficients.
v : array_like or poly1d
Divisor polynomial's coefficients.
Returns
-------
q : ndarray
Coefficients, including those equal to zero, of the quotient.
r : ndarray
Coefficients, including those equal to zero, of the remainder.
See Also
--------
poly, polyadd, polyder, polydiv, polyfit, polyint, polymul, polysub
polyval
Notes
-----
Both `u` and `v` must be 0-d or 1-d (ndim = 0 or 1), but `u.ndim` need
not equal `v.ndim`. In other words, all four possible combinations -
``u.ndim = v.ndim = 0``, ``u.ndim = v.ndim = 1``,
``u.ndim = 1, v.ndim = 0``, and ``u.ndim = 0, v.ndim = 1`` - work.
Examples
--------
.. math:: \\frac{3x^2 + 5x + 2}{2x + 1} = 1.5x + 1.75, remainder 0.25
>>> x = np.array([3.0, 5.0, 2.0])
>>> y = np.array([2.0, 1.0])
>>> np.polydiv(x, y)
(array([1.5 , 1.75]), array([0.25]))
"""
truepoly = (isinstance(u, poly1d) or isinstance(v, poly1d))
u = atleast_1d(u) + 0.0
v = atleast_1d(v) + 0.0
# w has the common type
w = u[0] + v[0]
m = len(u) - 1
n = len(v) - 1
scale = 1. / v[0]
q = NX.zeros((max(m - n + 1, 1),), w.dtype)
r = u.astype(w.dtype)
for k in range(0, m-n+1):
d = scale * r[k]
q[k] = d
r[k:k+n+1] -= d*v
while NX.allclose(r[0], 0, rtol=1e-14) and (r.shape[-1] > 1):
r = r[1:]
if truepoly:
return poly1d(q), poly1d(r)
return q, r
_poly_mat = re.compile(r"\*\*([0-9]*)")
def _raise_power(astr, wrap=70):
n = 0
line1 = ''
line2 = ''
output = ' '
while True:
mat = _poly_mat.search(astr, n)
if mat is None:
break
span = mat.span()
power = mat.groups()[0]
partstr = astr[n:span[0]]
n = span[1]
toadd2 = partstr + ' '*(len(power)-1)
toadd1 = ' '*(len(partstr)-1) + power
if ((len(line2) + len(toadd2) > wrap) or
(len(line1) + len(toadd1) > wrap)):
output += line1 + "\n" + line2 + "\n "
line1 = toadd1
line2 = toadd2
else:
line2 += partstr + ' '*(len(power)-1)
line1 += ' '*(len(partstr)-1) + power
output += line1 + "\n" + line2
return output + astr[n:]
@set_module('numpy')
class poly1d:
"""
A one-dimensional polynomial class.
.. note::
This forms part of the old polynomial API. Since version 1.4, the
new polynomial API defined in `numpy.polynomial` is preferred.
A summary of the differences can be found in the
:doc:`transition guide </reference/routines.polynomials>`.
A convenience class, used to encapsulate "natural" operations on
polynomials so that said operations may take on their customary
form in code (see Examples).
Parameters
----------
c_or_r : array_like
The polynomial's coefficients, in decreasing powers, or if
the value of the second parameter is True, the polynomial's
roots (values where the polynomial evaluates to 0). For example,
``poly1d([1, 2, 3])`` returns an object that represents
:math:`x^2 + 2x + 3`, whereas ``poly1d([1, 2, 3], True)`` returns
one that represents :math:`(x-1)(x-2)(x-3) = x^3 - 6x^2 + 11x -6`.
r : bool, optional
If True, `c_or_r` specifies the polynomial's roots; the default
is False.
variable : str, optional
Changes the variable used when printing `p` from `x` to `variable`
(see Examples).
Examples
--------
Construct the polynomial :math:`x^2 + 2x + 3`:
>>> p = np.poly1d([1, 2, 3])
>>> print(np.poly1d(p))
2
1 x + 2 x + 3
Evaluate the polynomial at :math:`x = 0.5`:
>>> p(0.5)
4.25
Find the roots:
>>> p.r
array([-1.+1.41421356j, -1.-1.41421356j])
>>> p(p.r)
array([ -4.44089210e-16+0.j, -4.44089210e-16+0.j]) # may vary
These numbers in the previous line represent (0, 0) to machine precision
Show the coefficients:
>>> p.c
array([1, 2, 3])
Display the order (the leading zero-coefficients are removed):
>>> p.order
2
Show the coefficient of the k-th power in the polynomial
(which is equivalent to ``p.c[-(i+1)]``):
>>> p[1]
2
Polynomials can be added, subtracted, multiplied, and divided
(returns quotient and remainder):
>>> p * p
poly1d([ 1, 4, 10, 12, 9])
>>> (p**3 + 4) / p
(poly1d([ 1., 4., 10., 12., 9.]), poly1d([4.]))
``asarray(p)`` gives the coefficient array, so polynomials can be
used in all functions that accept arrays:
>>> p**2 # square of polynomial
poly1d([ 1, 4, 10, 12, 9])
>>> np.square(p) # square of individual coefficients
array([1, 4, 9])
The variable used in the string representation of `p` can be modified,
using the `variable` parameter:
>>> p = np.poly1d([1,2,3], variable='z')
>>> print(p)
2
1 z + 2 z + 3
Construct a polynomial from its roots:
>>> np.poly1d([1, 2], True)
poly1d([ 1., -3., 2.])
This is the same polynomial as obtained by:
>>> np.poly1d([1, -1]) * np.poly1d([1, -2])
poly1d([ 1, -3, 2])
"""
__hash__ = None
@property
def coeffs(self):
""" The polynomial coefficients """
return self._coeffs
@coeffs.setter
def coeffs(self, value):
# allowing this makes p.coeffs *= 2 legal
if value is not self._coeffs:
raise AttributeError("Cannot set attribute")
@property
def variable(self):
""" The name of the polynomial variable """
return self._variable
# calculated attributes
@property
def order(self):
""" The order or degree of the polynomial """
return len(self._coeffs) - 1
@property
def roots(self):
""" The roots of the polynomial, where self(x) == 0 """
return roots(self._coeffs)
# our internal _coeffs property need to be backed by __dict__['coeffs'] for
# scipy to work correctly.
@property
def _coeffs(self):
return self.__dict__['coeffs']
@_coeffs.setter
def _coeffs(self, coeffs):
self.__dict__['coeffs'] = coeffs
# alias attributes
r = roots
c = coef = coefficients = coeffs
o = order
def __init__(self, c_or_r, r=False, variable=None):
if isinstance(c_or_r, poly1d):
self._variable = c_or_r._variable
self._coeffs = c_or_r._coeffs
if set(c_or_r.__dict__) - set(self.__dict__):
msg = ("In the future extra properties will not be copied "
"across when constructing one poly1d from another")
warnings.warn(msg, FutureWarning, stacklevel=2)
self.__dict__.update(c_or_r.__dict__)
if variable is not None:
self._variable = variable
return
if r:
c_or_r = poly(c_or_r)
c_or_r = atleast_1d(c_or_r)
if c_or_r.ndim > 1:
raise ValueError("Polynomial must be 1d only.")
c_or_r = trim_zeros(c_or_r, trim='f')
if len(c_or_r) == 0:
c_or_r = NX.array([0], dtype=c_or_r.dtype)
self._coeffs = c_or_r
if variable is None:
variable = 'x'
self._variable = variable
def __array__(self, t=None):
if t:
return NX.asarray(self.coeffs, t)
else:
return NX.asarray(self.coeffs)
def __repr__(self):
vals = repr(self.coeffs)
vals = vals[6:-1]
return "poly1d(%s)" % vals
def __len__(self):
return self.order
def __str__(self):
thestr = "0"
var = self.variable
# Remove leading zeros
coeffs = self.coeffs[NX.logical_or.accumulate(self.coeffs != 0)]
N = len(coeffs)-1
def fmt_float(q):
s = '%.4g' % q
if s.endswith('.0000'):
s = s[:-5]
return s
for k in range(len(coeffs)):
if not iscomplex(coeffs[k]):
coefstr = fmt_float(real(coeffs[k]))
elif real(coeffs[k]) == 0:
coefstr = '%sj' % fmt_float(imag(coeffs[k]))
else:
coefstr = '(%s + %sj)' % (fmt_float(real(coeffs[k])),
fmt_float(imag(coeffs[k])))
power = (N-k)
if power == 0:
if coefstr != '0':
newstr = '%s' % (coefstr,)
else:
if k == 0:
newstr = '0'
else:
newstr = ''
elif power == 1:
if coefstr == '0':
newstr = ''
elif coefstr == 'b':
newstr = var
else:
newstr = '%s %s' % (coefstr, var)
else:
if coefstr == '0':
newstr = ''
elif coefstr == 'b':
newstr = '%s**%d' % (var, power,)
else:
newstr = '%s %s**%d' % (coefstr, var, power)
if k > 0:
if newstr != '':
if newstr.startswith('-'):
thestr = "%s - %s" % (thestr, newstr[1:])
else:
thestr = "%s + %s" % (thestr, newstr)
else:
thestr = newstr
return _raise_power(thestr)
def __call__(self, val):
return polyval(self.coeffs, val)
def __neg__(self):
return poly1d(-self.coeffs)
def __pos__(self):
return self
def __mul__(self, other):
if isscalar(other):
return poly1d(self.coeffs * other)
else:
other = poly1d(other)
return poly1d(polymul(self.coeffs, other.coeffs))
def __rmul__(self, other):
if isscalar(other):
return poly1d(other * self.coeffs)
else:
other = poly1d(other)
return poly1d(polymul(self.coeffs, other.coeffs))
def __add__(self, other):
other = poly1d(other)
return poly1d(polyadd(self.coeffs, other.coeffs))
def __radd__(self, other):
other = poly1d(other)
return poly1d(polyadd(self.coeffs, other.coeffs))
def __pow__(self, val):
if not isscalar(val) or int(val) != val or val < 0:
raise ValueError("Power to non-negative integers only.")
res = [1]
for _ in range(val):
res = polymul(self.coeffs, res)
return poly1d(res)
def __sub__(self, other):
other = poly1d(other)
return poly1d(polysub(self.coeffs, other.coeffs))
def __rsub__(self, other):
other = poly1d(other)
return poly1d(polysub(other.coeffs, self.coeffs))
def __div__(self, other):
if isscalar(other):
return poly1d(self.coeffs/other)
else:
other = poly1d(other)
return polydiv(self, other)
__truediv__ = __div__
def __rdiv__(self, other):
if isscalar(other):
return poly1d(other/self.coeffs)
else:
other = poly1d(other)
return polydiv(other, self)
__rtruediv__ = __rdiv__
def __eq__(self, other):
if not isinstance(other, poly1d):
return NotImplemented
if self.coeffs.shape != other.coeffs.shape:
return False
return (self.coeffs == other.coeffs).all()
def __ne__(self, other):
if not isinstance(other, poly1d):
return NotImplemented
return not self.__eq__(other)
def __getitem__(self, val):
ind = self.order - val
if val > self.order:
return 0
if val < 0:
return 0
return self.coeffs[ind]
def __setitem__(self, key, val):
ind = self.order - key
if key < 0:
raise ValueError("Does not support negative powers.")
if key > self.order:
zr = NX.zeros(key-self.order, self.coeffs.dtype)
self._coeffs = NX.concatenate((zr, self.coeffs))
ind = 0
self._coeffs[ind] = val
return
def __iter__(self):
return iter(self.coeffs)
def integ(self, m=1, k=0):
"""
Return an antiderivative (indefinite integral) of this polynomial.
Refer to `polyint` for full documentation.
See Also
--------
polyint : equivalent function
"""
return poly1d(polyint(self.coeffs, m=m, k=k))
def deriv(self, m=1):
"""
Return a derivative of this polynomial.
Refer to `polyder` for full documentation.
See Also
--------
polyder : equivalent function
"""
return poly1d(polyder(self.coeffs, m=m))
# Stuff to do on module import
warnings.simplefilter('always', RankWarning)
|
pbrod/numpy
|
numpy/lib/polynomial.py
|
Python
|
bsd-3-clause
| 43,813
|
##########################################################################
#
# Copyright (c) 2014, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferSceneUI
Gaffer.Metadata.registerNodeDescription(
GafferSceneUI.CropWindowTool,
"""Tool for adjusting crop window for rendering.
The crop window is displayed as a masked area which can be adjusted using drag and drop.
Note that the view must be locked to a render camera for this tool to be used. Additionally, an upstream node much be capable of setting the crop window so that there is something to adjust - typically this will be a StandardOptions node. The name of the plug being manipulated is displayed underneath the cropped area - it can be used to verify that the expected node is being adjusted.
"""
)
|
cedriclaunay/gaffer
|
python/GafferSceneUI/CropWindowToolUI.py
|
Python
|
bsd-3-clause
| 2,447
|
import difflib
import json
import posixpath
import sys
import threading
import unittest
from collections import Counter
from contextlib import contextmanager
from copy import copy
from functools import wraps
from unittest.util import safe_repr
from urllib.parse import unquote, urljoin, urlparse, urlsplit
from urllib.request import url2pathname
from django.apps import apps
from django.conf import settings
from django.core import mail
from django.core.exceptions import ValidationError
from django.core.files import locks
from django.core.handlers.wsgi import WSGIHandler, get_path_info
from django.core.management import call_command
from django.core.management.color import no_style
from django.core.management.sql import emit_post_migrate_signal
from django.core.servers.basehttp import WSGIRequestHandler, WSGIServer
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
from django.forms.fields import CharField
from django.http import QueryDict
from django.http.request import split_domain_port, validate_host
from django.test.client import Client
from django.test.html import HTMLParseError, parse_html
from django.test.signals import setting_changed, template_rendered
from django.test.utils import (
CaptureQueriesContext, ContextList, compare_xml, modify_settings,
override_settings,
)
from django.utils.decorators import classproperty
from django.utils.encoding import force_text
from django.views.static import serve
__all__ = ('TestCase', 'TransactionTestCase',
'SimpleTestCase', 'skipIfDBFeature', 'skipUnlessDBFeature')
def to_list(value):
"""
Puts value into a list if it's not already one.
Returns an empty list if value is None.
"""
if value is None:
value = []
elif not isinstance(value, list):
value = [value]
return value
def assert_and_parse_html(self, html, user_msg, msg):
try:
dom = parse_html(html)
except HTMLParseError as e:
standardMsg = '%s\n%s' % (msg, e)
self.fail(self._formatMessage(user_msg, standardMsg))
return dom
class _AssertNumQueriesContext(CaptureQueriesContext):
def __init__(self, test_case, num, connection):
self.test_case = test_case
self.num = num
super(_AssertNumQueriesContext, self).__init__(connection)
def __exit__(self, exc_type, exc_value, traceback):
super(_AssertNumQueriesContext, self).__exit__(exc_type, exc_value, traceback)
if exc_type is not None:
return
executed = len(self)
self.test_case.assertEqual(
executed, self.num,
"%d queries executed, %d expected\nCaptured queries were:\n%s" % (
executed, self.num,
'\n'.join(
query['sql'] for query in self.captured_queries
)
)
)
class _AssertTemplateUsedContext:
def __init__(self, test_case, template_name):
self.test_case = test_case
self.template_name = template_name
self.rendered_templates = []
self.rendered_template_names = []
self.context = ContextList()
def on_template_render(self, sender, signal, template, context, **kwargs):
self.rendered_templates.append(template)
self.rendered_template_names.append(template.name)
self.context.append(copy(context))
def test(self):
return self.template_name in self.rendered_template_names
def message(self):
return '%s was not rendered.' % self.template_name
def __enter__(self):
template_rendered.connect(self.on_template_render)
return self
def __exit__(self, exc_type, exc_value, traceback):
template_rendered.disconnect(self.on_template_render)
if exc_type is not None:
return
if not self.test():
message = self.message()
if len(self.rendered_templates) == 0:
message += ' No template was rendered.'
else:
message += ' Following templates were rendered: %s' % (
', '.join(self.rendered_template_names))
self.test_case.fail(message)
class _AssertTemplateNotUsedContext(_AssertTemplateUsedContext):
def test(self):
return self.template_name not in self.rendered_template_names
def message(self):
return '%s was rendered.' % self.template_name
class _CursorFailure:
def __init__(self, cls_name, wrapped):
self.cls_name = cls_name
self.wrapped = wrapped
def __call__(self):
raise AssertionError(
"Database queries aren't allowed in SimpleTestCase. "
"Either use TestCase or TransactionTestCase to ensure proper test isolation or "
"set %s.allow_database_queries to True to silence this failure." % self.cls_name
)
class SimpleTestCase(unittest.TestCase):
# The class we'll use for the test client self.client.
# Can be overridden in derived classes.
client_class = Client
_overridden_settings = None
_modified_settings = None
# Tests shouldn't be allowed to query the database since
# this base class doesn't enforce any isolation.
allow_database_queries = False
@classmethod
def setUpClass(cls):
super(SimpleTestCase, cls).setUpClass()
if cls._overridden_settings:
cls._cls_overridden_context = override_settings(**cls._overridden_settings)
cls._cls_overridden_context.enable()
if cls._modified_settings:
cls._cls_modified_context = modify_settings(cls._modified_settings)
cls._cls_modified_context.enable()
if not cls.allow_database_queries:
for alias in connections:
connection = connections[alias]
connection.cursor = _CursorFailure(cls.__name__, connection.cursor)
connection.chunked_cursor = _CursorFailure(cls.__name__, connection.chunked_cursor)
@classmethod
def tearDownClass(cls):
if not cls.allow_database_queries:
for alias in connections:
connection = connections[alias]
connection.cursor = connection.cursor.wrapped
connection.chunked_cursor = connection.chunked_cursor.wrapped
if hasattr(cls, '_cls_modified_context'):
cls._cls_modified_context.disable()
delattr(cls, '_cls_modified_context')
if hasattr(cls, '_cls_overridden_context'):
cls._cls_overridden_context.disable()
delattr(cls, '_cls_overridden_context')
super(SimpleTestCase, cls).tearDownClass()
def __call__(self, result=None):
"""
Wrapper around default __call__ method to perform common Django test
set up. This means that user-defined Test Cases aren't required to
include a call to super().setUp().
"""
testMethod = getattr(self, self._testMethodName)
skipped = (
getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)
)
if not skipped:
try:
self._pre_setup()
except Exception:
result.addError(self, sys.exc_info())
return
super(SimpleTestCase, self).__call__(result)
if not skipped:
try:
self._post_teardown()
except Exception:
result.addError(self, sys.exc_info())
return
def _pre_setup(self):
"""Performs any pre-test setup. This includes:
* Creating a test client.
* Clearing the mail test outbox.
"""
self.client = self.client_class()
mail.outbox = []
def _post_teardown(self):
"""Perform any post-test things."""
pass
def settings(self, **kwargs):
"""
A context manager that temporarily sets a setting and reverts to the original value when exiting the context.
"""
return override_settings(**kwargs)
def modify_settings(self, **kwargs):
"""
A context manager that temporarily applies changes a list setting and
reverts back to the original value when exiting the context.
"""
return modify_settings(**kwargs)
def assertRedirects(self, response, expected_url, status_code=302,
target_status_code=200, msg_prefix='',
fetch_redirect_response=True):
"""Asserts that a response redirected to a specific URL, and that the
redirect URL can be loaded.
Note that assertRedirects won't work for external links since it uses
TestClient to do a request (use fetch_redirect_response=False to check
such links without fetching them).
"""
if msg_prefix:
msg_prefix += ": "
if hasattr(response, 'redirect_chain'):
# The request was a followed redirect
self.assertTrue(
len(response.redirect_chain) > 0,
msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)"
% (response.status_code, status_code)
)
self.assertEqual(
response.redirect_chain[0][1], status_code,
msg_prefix + "Initial response didn't redirect as expected: Response code was %d (expected %d)"
% (response.redirect_chain[0][1], status_code)
)
url, status_code = response.redirect_chain[-1]
scheme, netloc, path, query, fragment = urlsplit(url)
self.assertEqual(
response.status_code, target_status_code,
msg_prefix + "Response didn't redirect as expected: Final Response code was %d (expected %d)"
% (response.status_code, target_status_code)
)
else:
# Not a followed redirect
self.assertEqual(
response.status_code, status_code,
msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)"
% (response.status_code, status_code)
)
url = response.url
scheme, netloc, path, query, fragment = urlsplit(url)
# Prepend the request path to handle relative path redirects.
if not path.startswith('/'):
url = urljoin(response.request['PATH_INFO'], url)
path = urljoin(response.request['PATH_INFO'], path)
if fetch_redirect_response:
# netloc might be empty, or in cases where Django tests the
# HTTP scheme, the convention is for netloc to be 'testserver'.
# Trust both as "internal" URLs here.
domain, port = split_domain_port(netloc)
if domain and not validate_host(domain, settings.ALLOWED_HOSTS):
raise ValueError(
"The test client is unable to fetch remote URLs (got %s). "
"If the host is served by Django, add '%s' to ALLOWED_HOSTS. "
"Otherwise, use assertRedirects(..., fetch_redirect_response=False)."
% (url, domain)
)
redirect_response = response.client.get(path, QueryDict(query), secure=(scheme == 'https'))
# Get the redirection page, using the same client that was used
# to obtain the original response.
self.assertEqual(
redirect_response.status_code, target_status_code,
msg_prefix + "Couldn't retrieve redirection page '%s': response code was %d (expected %d)"
% (path, redirect_response.status_code, target_status_code)
)
self.assertEqual(
url, expected_url,
msg_prefix + "Response redirected to '%s', expected '%s'" % (url, expected_url)
)
def _assert_contains(self, response, text, status_code, msg_prefix, html):
# If the response supports deferred rendering and hasn't been rendered
# yet, then ensure that it does get rendered before proceeding further.
if hasattr(response, 'render') and callable(response.render) and not response.is_rendered:
response.render()
if msg_prefix:
msg_prefix += ": "
self.assertEqual(
response.status_code, status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code)
)
if response.streaming:
content = b''.join(response.streaming_content)
else:
content = response.content
if not isinstance(text, bytes) or html:
text = force_text(text, encoding=response.charset)
content = content.decode(response.charset)
text_repr = "'%s'" % text
else:
text_repr = repr(text)
if html:
content = assert_and_parse_html(self, content, None, "Response's content is not valid HTML:")
text = assert_and_parse_html(self, text, None, "Second argument is not valid HTML:")
real_count = content.count(text)
return (text_repr, real_count, msg_prefix)
def assertContains(self, response, text, count=None, status_code=200, msg_prefix='', html=False):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` occurs ``count`` times in the content of the response.
If ``count`` is None, the count doesn't matter - the assertion is true
if the text occurs at least once in the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
if count is not None:
self.assertEqual(
real_count, count,
msg_prefix + "Found %d instances of %s in response (expected %d)" % (real_count, text_repr, count)
)
else:
self.assertTrue(real_count != 0, msg_prefix + "Couldn't find %s in response" % text_repr)
def assertNotContains(self, response, text, status_code=200, msg_prefix='', html=False):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` doesn't occurs in the content of the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
self.assertEqual(real_count, 0, msg_prefix + "Response should not contain %s" % text_repr)
def assertFormError(self, response, form, field, errors, msg_prefix=''):
"""
Asserts that a form used to render the response has a specific field
error.
"""
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + "Response did not use any contexts to render the response")
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_form = False
for i, context in enumerate(contexts):
if form not in context:
continue
found_form = True
for err in errors:
if field:
if field in context[form].errors:
field_errors = context[form].errors[field]
self.assertTrue(
err in field_errors,
msg_prefix + "The field '%s' on form '%s' in"
" context %d does not contain the error '%s'"
" (actual errors: %s)" %
(field, form, i, err, repr(field_errors))
)
elif field in context[form].fields:
self.fail(
msg_prefix + "The field '%s' on form '%s' in context %d contains no errors" %
(field, form, i)
)
else:
self.fail(
msg_prefix + "The form '%s' in context %d does not contain the field '%s'" %
(form, i, field)
)
else:
non_field_errors = context[form].non_field_errors()
self.assertTrue(
err in non_field_errors,
msg_prefix + "The form '%s' in context %d does not"
" contain the non-field error '%s'"
" (actual errors: %s)" %
(form, i, err, non_field_errors)
)
if not found_form:
self.fail(msg_prefix + "The form '%s' was not used to render the response" % form)
def assertFormsetError(self, response, formset, form_index, field, errors,
msg_prefix=''):
"""
Asserts that a formset used to render the response has a specific error.
For field errors, specify the ``form_index`` and the ``field``.
For non-field errors, specify the ``form_index`` and the ``field`` as
None.
For non-form errors, specify ``form_index`` as None and the ``field``
as None.
"""
# Add punctuation to msg_prefix
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + 'Response did not use any contexts to '
'render the response')
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_formset = False
for i, context in enumerate(contexts):
if formset not in context:
continue
found_formset = True
for err in errors:
if field is not None:
if field in context[formset].forms[form_index].errors:
field_errors = context[formset].forms[form_index].errors[field]
self.assertTrue(
err in field_errors,
msg_prefix + "The field '%s' on formset '%s', "
"form %d in context %d does not contain the "
"error '%s' (actual errors: %s)" %
(field, formset, form_index, i, err, repr(field_errors))
)
elif field in context[formset].forms[form_index].fields:
self.fail(
msg_prefix + "The field '%s' on formset '%s', form %d in context %d contains no errors"
% (field, formset, form_index, i)
)
else:
self.fail(
msg_prefix + "The formset '%s', form %d in context %d does not contain the field '%s'"
% (formset, form_index, i, field)
)
elif form_index is not None:
non_field_errors = context[formset].forms[form_index].non_field_errors()
self.assertFalse(
len(non_field_errors) == 0,
msg_prefix + "The formset '%s', form %d in context %d "
"does not contain any non-field errors." % (formset, form_index, i)
)
self.assertTrue(
err in non_field_errors,
msg_prefix + "The formset '%s', form %d in context %d "
"does not contain the non-field error '%s' (actual errors: %s)"
% (formset, form_index, i, err, repr(non_field_errors))
)
else:
non_form_errors = context[formset].non_form_errors()
self.assertFalse(
len(non_form_errors) == 0,
msg_prefix + "The formset '%s' in context %d does not "
"contain any non-form errors." % (formset, i)
)
self.assertTrue(
err in non_form_errors,
msg_prefix + "The formset '%s' in context %d does not "
"contain the non-form error '%s' (actual errors: %s)"
% (formset, i, err, repr(non_form_errors))
)
if not found_formset:
self.fail(msg_prefix + "The formset '%s' was not used to render the response" % formset)
def _assert_template_used(self, response, template_name, msg_prefix):
if response is None and template_name is None:
raise TypeError('response and/or template_name argument must be provided')
if msg_prefix:
msg_prefix += ": "
if template_name is not None and response is not None and not hasattr(response, 'templates'):
raise ValueError(
"assertTemplateUsed() and assertTemplateNotUsed() are only "
"usable on responses fetched using the Django test Client."
)
if not hasattr(response, 'templates') or (response is None and template_name):
if response:
template_name = response
response = None
# use this template with context manager
return template_name, None, msg_prefix
template_names = [t.name for t in response.templates if t.name is not None]
return None, template_names, msg_prefix
def assertTemplateUsed(self, response=None, template_name=None, msg_prefix='', count=None):
"""
Asserts that the template with the provided name was used in rendering
the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix)
if context_mgr_template:
# Use assertTemplateUsed as context manager.
return _AssertTemplateUsedContext(self, context_mgr_template)
if not template_names:
self.fail(msg_prefix + "No templates used to render the response")
self.assertTrue(
template_name in template_names,
msg_prefix + "Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s"
% (template_name, ', '.join(template_names))
)
if count is not None:
self.assertEqual(
template_names.count(template_name), count,
msg_prefix + "Template '%s' was expected to be rendered %d "
"time(s) but was actually rendered %d time(s)."
% (template_name, count, template_names.count(template_name))
)
def assertTemplateNotUsed(self, response=None, template_name=None, msg_prefix=''):
"""
Asserts that the template with the provided name was NOT used in
rendering the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix
)
if context_mgr_template:
# Use assertTemplateNotUsed as context manager.
return _AssertTemplateNotUsedContext(self, context_mgr_template)
self.assertFalse(
template_name in template_names,
msg_prefix + "Template '%s' was used unexpectedly in rendering the response" % template_name
)
@contextmanager
def _assert_raises_message_cm(self, expected_exception, expected_message):
with self.assertRaises(expected_exception) as cm:
yield cm
self.assertIn(expected_message, str(cm.exception))
def assertRaisesMessage(self, expected_exception, expected_message, *args, **kwargs):
"""
Asserts that expected_message is found in the the message of a raised
exception.
Args:
expected_exception: Exception class expected to be raised.
expected_message: expected error message string value.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
"""
callable_obj = None
if len(args):
callable_obj = args[0]
args = args[1:]
cm = self._assert_raises_message_cm(expected_exception, expected_message)
# Assertion used in context manager fashion.
if callable_obj is None:
return cm
# Assertion was passed a callable.
with cm:
callable_obj(*args, **kwargs)
def assertFieldOutput(self, fieldclass, valid, invalid, field_args=None,
field_kwargs=None, empty_value=''):
"""
Asserts that a form field behaves correctly with various inputs.
Args:
fieldclass: the class of the field to be tested.
valid: a dictionary mapping valid inputs to their expected
cleaned values.
invalid: a dictionary mapping invalid inputs to one or more
raised error messages.
field_args: the args passed to instantiate the field
field_kwargs: the kwargs passed to instantiate the field
empty_value: the expected clean output for inputs in empty_values
"""
if field_args is None:
field_args = []
if field_kwargs is None:
field_kwargs = {}
required = fieldclass(*field_args, **field_kwargs)
optional = fieldclass(*field_args, **dict(field_kwargs, required=False))
# test valid inputs
for input, output in valid.items():
self.assertEqual(required.clean(input), output)
self.assertEqual(optional.clean(input), output)
# test invalid inputs
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
required.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
with self.assertRaises(ValidationError) as context_manager:
optional.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
# test required inputs
error_required = [force_text(required.error_messages['required'])]
for e in required.empty_values:
with self.assertRaises(ValidationError) as context_manager:
required.clean(e)
self.assertEqual(context_manager.exception.messages, error_required)
self.assertEqual(optional.clean(e), empty_value)
# test that max_length and min_length are always accepted
if issubclass(fieldclass, CharField):
field_kwargs.update({'min_length': 2, 'max_length': 20})
self.assertIsInstance(fieldclass(*field_args, **field_kwargs), fieldclass)
def assertHTMLEqual(self, html1, html2, msg=None):
"""
Asserts that two HTML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid HTML.
"""
dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:')
if dom1 != dom2:
standardMsg = '%s != %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
str(dom1).splitlines(), str(dom2).splitlines(),
)))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertHTMLNotEqual(self, html1, html2, msg=None):
"""Asserts that two HTML snippets are not semantically equivalent."""
dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:')
if dom1 == dom2:
standardMsg = '%s == %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
self.fail(self._formatMessage(msg, standardMsg))
def assertInHTML(self, needle, haystack, count=None, msg_prefix=''):
needle = assert_and_parse_html(self, needle, None, 'First argument is not valid HTML:')
haystack = assert_and_parse_html(self, haystack, None, 'Second argument is not valid HTML:')
real_count = haystack.count(needle)
if count is not None:
self.assertEqual(
real_count, count,
msg_prefix + "Found %d instances of '%s' in response (expected %d)" % (real_count, needle, count)
)
else:
self.assertTrue(real_count != 0, msg_prefix + "Couldn't find '%s' in response" % needle)
def assertJSONEqual(self, raw, expected_data, msg=None):
"""
Asserts that the JSON fragments raw and expected_data are equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except ValueError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, str):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertEqual(data, expected_data, msg=msg)
def assertJSONNotEqual(self, raw, expected_data, msg=None):
"""
Asserts that the JSON fragments raw and expected_data are not equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except ValueError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, str):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertNotEqual(data, expected_data, msg=msg)
def assertXMLEqual(self, xml1, xml2, msg=None):
"""
Asserts that two XML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if not result:
standardMsg = '%s != %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
diff = ('\n' + '\n'.join(
difflib.ndiff(xml1.splitlines(), xml2.splitlines())
))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertXMLNotEqual(self, xml1, xml2, msg=None):
"""
Asserts that two XML snippets are not semantically equivalent.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if result:
standardMsg = '%s == %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
self.fail(self._formatMessage(msg, standardMsg))
class TransactionTestCase(SimpleTestCase):
# Subclasses can ask for resetting of auto increment sequence before each
# test case
reset_sequences = False
# Subclasses can enable only a subset of apps for faster tests
available_apps = None
# Subclasses can define fixtures which will be automatically installed.
fixtures = None
# If transactions aren't available, Django will serialize the database
# contents into a fixture during setup and flush and reload them
# during teardown (as flush does not restore data from migrations).
# This can be slow; this flag allows enabling on a per-case basis.
serialized_rollback = False
# Since tests will be wrapped in a transaction, or serialized if they
# are not available, we allow queries to be run.
allow_database_queries = True
def _pre_setup(self):
"""Performs any pre-test setup. This includes:
* If the class has an 'available_apps' attribute, restricting the app
registry to these applications, then firing post_migrate -- it must
run with the correct set of applications for the test case.
* If the class has a 'fixtures' attribute, installing these fixtures.
"""
super(TransactionTestCase, self)._pre_setup()
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
setting_changed.send(
sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=self.available_apps,
enter=True,
)
for db_name in self._databases_names(include_mirrors=False):
emit_post_migrate_signal(verbosity=0, interactive=False, db=db_name)
try:
self._fixture_setup()
except Exception:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(
sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False,
)
raise
@classmethod
def _databases_names(cls, include_mirrors=True):
# If the test case has a multi_db=True flag, act on all databases,
# including mirrors or not. Otherwise, just on the default DB.
if getattr(cls, 'multi_db', False):
return [
alias for alias in connections
if include_mirrors or not connections[alias].settings_dict['TEST']['MIRROR']
]
else:
return [DEFAULT_DB_ALIAS]
def _reset_sequences(self, db_name):
conn = connections[db_name]
if conn.features.supports_sequence_reset:
sql_list = conn.ops.sequence_reset_by_name_sql(
no_style(), conn.introspection.sequence_list())
if sql_list:
with transaction.atomic(using=db_name):
cursor = conn.cursor()
for sql in sql_list:
cursor.execute(sql)
def _fixture_setup(self):
for db_name in self._databases_names(include_mirrors=False):
# Reset sequences
if self.reset_sequences:
self._reset_sequences(db_name)
# If we need to provide replica initial data from migrated apps,
# then do so.
if self.serialized_rollback and hasattr(connections[db_name], "_test_serialized_contents"):
if self.available_apps is not None:
apps.unset_available_apps()
connections[db_name].creation.deserialize_db_from_string(
connections[db_name]._test_serialized_contents
)
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
if self.fixtures:
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command('loaddata', *self.fixtures,
**{'verbosity': 0, 'database': db_name})
def _should_reload_connections(self):
return True
def _post_teardown(self):
"""Performs any post-test things. This includes:
* Flushing the contents of the database, to leave a clean slate. If
the class has an 'available_apps' attribute, post_migrate isn't fired.
* Force-closing the connection, so the next test gets a clean cursor.
"""
try:
self._fixture_teardown()
super(TransactionTestCase, self)._post_teardown()
if self._should_reload_connections():
# Some DB cursors include SQL statements as part of cursor
# creation. If you have a test that does a rollback, the effect
# of these statements is lost, which can affect the operation of
# tests (e.g., losing a timezone setting causing objects to be
# created with the wrong time). To make sure this doesn't
# happen, get a clean connection at the start of every test.
for conn in connections.all():
conn.close()
finally:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False)
def _fixture_teardown(self):
# Allow TRUNCATE ... CASCADE and don't emit the post_migrate signal
# when flushing only a subset of the apps
for db_name in self._databases_names(include_mirrors=False):
# Flush the database
inhibit_post_migrate = (
self.available_apps is not None or
( # Inhibit the post_migrate signal when using serialized
# rollback to avoid trying to recreate the serialized data.
self.serialized_rollback and
hasattr(connections[db_name], '_test_serialized_contents')
)
)
call_command('flush', verbosity=0, interactive=False,
database=db_name, reset_sequences=False,
allow_cascade=self.available_apps is not None,
inhibit_post_migrate=inhibit_post_migrate)
def assertQuerysetEqual(self, qs, values, transform=repr, ordered=True, msg=None):
items = map(transform, qs)
if not ordered:
return self.assertEqual(Counter(items), Counter(values), msg=msg)
values = list(values)
# For example qs.iterator() could be passed as qs, but it does not
# have 'ordered' attribute.
if len(values) > 1 and hasattr(qs, 'ordered') and not qs.ordered:
raise ValueError("Trying to compare non-ordered queryset "
"against more than one ordered values")
return self.assertEqual(list(items), values, msg=msg)
def assertNumQueries(self, num, func=None, *args, **kwargs):
using = kwargs.pop("using", DEFAULT_DB_ALIAS)
conn = connections[using]
context = _AssertNumQueriesContext(self, num, conn)
if func is None:
return context
with context:
func(*args, **kwargs)
def connections_support_transactions():
"""
Returns True if all connections support transactions.
"""
return all(conn.features.supports_transactions
for conn in connections.all())
class TestCase(TransactionTestCase):
"""
Similar to TransactionTestCase, but uses `transaction.atomic()` to achieve
test isolation.
In most situations, TestCase should be preferred to TransactionTestCase as
it allows faster execution. However, there are some situations where using
TransactionTestCase might be necessary (e.g. testing some transactional
behavior).
On database backends with no transaction support, TestCase behaves as
TransactionTestCase.
"""
@classmethod
def _enter_atomics(cls):
"""Helper method to open atomic blocks for multiple databases"""
atomics = {}
for db_name in cls._databases_names():
atomics[db_name] = transaction.atomic(using=db_name)
atomics[db_name].__enter__()
return atomics
@classmethod
def _rollback_atomics(cls, atomics):
"""Rollback atomic blocks opened through the previous method"""
for db_name in reversed(cls._databases_names()):
transaction.set_rollback(True, using=db_name)
atomics[db_name].__exit__(None, None, None)
@classmethod
def setUpClass(cls):
super(TestCase, cls).setUpClass()
if not connections_support_transactions():
return
cls.cls_atomics = cls._enter_atomics()
if cls.fixtures:
for db_name in cls._databases_names(include_mirrors=False):
try:
call_command('loaddata', *cls.fixtures, **{
'verbosity': 0,
'commit': False,
'database': db_name,
})
except Exception:
cls._rollback_atomics(cls.cls_atomics)
raise
try:
cls.setUpTestData()
except Exception:
cls._rollback_atomics(cls.cls_atomics)
raise
@classmethod
def tearDownClass(cls):
if connections_support_transactions():
cls._rollback_atomics(cls.cls_atomics)
for conn in connections.all():
conn.close()
super(TestCase, cls).tearDownClass()
@classmethod
def setUpTestData(cls):
"""Load initial data for the TestCase"""
pass
def _should_reload_connections(self):
if connections_support_transactions():
return False
return super(TestCase, self)._should_reload_connections()
def _fixture_setup(self):
if not connections_support_transactions():
# If the backend does not support transactions, we should reload
# class data before each test
self.setUpTestData()
return super(TestCase, self)._fixture_setup()
assert not self.reset_sequences, 'reset_sequences cannot be used on TestCase instances'
self.atomics = self._enter_atomics()
def _fixture_teardown(self):
if not connections_support_transactions():
return super(TestCase, self)._fixture_teardown()
try:
for db_name in reversed(self._databases_names()):
if self._should_check_constraints(connections[db_name]):
connections[db_name].check_constraints()
finally:
self._rollback_atomics(self.atomics)
def _should_check_constraints(self, connection):
return (
connection.features.can_defer_constraint_checks and
not connection.needs_rollback and connection.is_usable()
)
class CheckCondition:
"""Descriptor class for deferred condition checking"""
def __init__(self, *conditions):
self.conditions = conditions
def add_condition(self, condition, reason):
return self.__class__(*self.conditions + ((condition, reason),))
def __get__(self, instance, cls=None):
# Trigger access for all bases.
if any(getattr(base, '__unittest_skip__', False) for base in cls.__bases__):
return True
for condition, reason in self.conditions:
if condition():
# Override this descriptor's value and set the skip reason.
cls.__unittest_skip__ = True
cls.__unittest_skip_why__ = reason
return True
return False
def _deferredSkip(condition, reason):
def decorator(test_func):
if not (isinstance(test_func, type) and
issubclass(test_func, unittest.TestCase)):
@wraps(test_func)
def skip_wrapper(*args, **kwargs):
if condition():
raise unittest.SkipTest(reason)
return test_func(*args, **kwargs)
test_item = skip_wrapper
else:
# Assume a class is decorated
test_item = test_func
# Retrieve the possibly existing value from the class's dict to
# avoid triggering the descriptor.
skip = test_func.__dict__.get('__unittest_skip__')
if isinstance(skip, CheckCondition):
test_item.__unittest_skip__ = skip.add_condition(condition, reason)
elif skip is not True:
test_item.__unittest_skip__ = CheckCondition((condition, reason))
return test_item
return decorator
def skipIfDBFeature(*features):
"""
Skip a test if a database has at least one of the named features.
"""
return _deferredSkip(
lambda: any(getattr(connection.features, feature, False) for feature in features),
"Database has feature(s) %s" % ", ".join(features)
)
def skipUnlessDBFeature(*features):
"""
Skip a test unless a database has all the named features.
"""
return _deferredSkip(
lambda: not all(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support feature(s): %s" % ", ".join(features)
)
def skipUnlessAnyDBFeature(*features):
"""
Skip a test unless a database has any of the named features.
"""
return _deferredSkip(
lambda: not any(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support any of the feature(s): %s" % ", ".join(features)
)
class QuietWSGIRequestHandler(WSGIRequestHandler):
"""
Just a regular WSGIRequestHandler except it doesn't log to the standard
output any of the requests received, so as to not clutter the output for
the tests' results.
"""
def log_message(*args):
pass
class FSFilesHandler(WSGIHandler):
"""
WSGI middleware that intercepts calls to a directory, as defined by one of
the *_ROOT settings, and serves those files, publishing them under *_URL.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
super(FSFilesHandler, self).__init__()
def _should_handle(self, path):
"""
Checks if the path should be handled. Ignores the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def file_path(self, url):
"""
Returns the relative path to the file on disk for the given URL.
"""
relative_url = url[len(self.base_url[2]):]
return url2pathname(relative_url)
def get_response(self, request):
from django.http import Http404
if self._should_handle(request.path):
try:
return self.serve(request)
except Http404:
pass
return super(FSFilesHandler, self).get_response(request)
def serve(self, request):
os_rel_path = self.file_path(request.path)
os_rel_path = posixpath.normpath(unquote(os_rel_path))
# Emulate behavior of django.contrib.staticfiles.views.serve() when it
# invokes staticfiles' finders functionality.
# TODO: Modify if/when that internal API is refactored
final_rel_path = os_rel_path.replace('\\', '/').lstrip('/')
return serve(request, final_rel_path, document_root=self.get_base_dir())
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super(FSFilesHandler, self).__call__(environ, start_response)
class _StaticFilesHandler(FSFilesHandler):
"""
Handler for serving static files. A private class that is meant to be used
solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.STATIC_ROOT
def get_base_url(self):
return settings.STATIC_URL
class _MediaFilesHandler(FSFilesHandler):
"""
Handler for serving the media files. A private class that is meant to be
used solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.MEDIA_ROOT
def get_base_url(self):
return settings.MEDIA_URL
class LiveServerThread(threading.Thread):
"""
Thread for running a live http server while the tests are running.
"""
def __init__(self, host, static_handler, connections_override=None):
self.host = host
self.port = None
self.is_ready = threading.Event()
self.error = None
self.static_handler = static_handler
self.connections_override = connections_override
super(LiveServerThread, self).__init__()
def run(self):
"""
Sets up the live server and databases, and then loops over handling
http requests.
"""
if self.connections_override:
# Override this thread's database connections with the ones
# provided by the main thread.
for alias, conn in self.connections_override.items():
connections[alias] = conn
try:
# Create the handler for serving static and media files
handler = self.static_handler(_MediaFilesHandler(WSGIHandler()))
self.httpd = self._create_server(0)
self.port = self.httpd.server_address[1]
self.httpd.set_app(handler)
self.is_ready.set()
self.httpd.serve_forever()
except Exception as e:
self.error = e
self.is_ready.set()
finally:
connections.close_all()
def _create_server(self, port):
return WSGIServer((self.host, port), QuietWSGIRequestHandler, allow_reuse_address=False)
def terminate(self):
if hasattr(self, 'httpd'):
# Stop the WSGI server
self.httpd.shutdown()
self.httpd.server_close()
self.join()
class LiveServerTestCase(TransactionTestCase):
"""
Does basically the same as TransactionTestCase but also launches a live
http server in a separate thread so that the tests may use another testing
framework, such as Selenium for example, instead of the built-in dummy
client.
Note that it inherits from TransactionTestCase instead of TestCase because
the threads do not share the same transactions (unless if using in-memory
sqlite) and each thread needs to commit all their transactions so that the
other thread can see the changes.
"""
host = 'localhost'
server_thread_class = LiveServerThread
static_handler = _StaticFilesHandler
@classproperty
def live_server_url(cls):
return 'http://%s:%s' % (cls.host, cls.server_thread.port)
@classmethod
def setUpClass(cls):
super(LiveServerTestCase, cls).setUpClass()
connections_override = {}
for conn in connections.all():
# If using in-memory sqlite databases, pass the connections to
# the server thread.
if conn.vendor == 'sqlite' and conn.is_in_memory_db():
# Explicitly enable thread-shareability for this connection
conn.allow_thread_sharing = True
connections_override[conn.alias] = conn
cls._live_server_modified_settings = modify_settings(
ALLOWED_HOSTS={'append': cls.host},
)
cls._live_server_modified_settings.enable()
cls.server_thread = cls._create_server_thread(connections_override)
cls.server_thread.daemon = True
cls.server_thread.start()
# Wait for the live server to be ready
cls.server_thread.is_ready.wait()
if cls.server_thread.error:
# Clean up behind ourselves, since tearDownClass won't get called in
# case of errors.
cls._tearDownClassInternal()
raise cls.server_thread.error
@classmethod
def _create_server_thread(cls, connections_override):
return cls.server_thread_class(
cls.host,
cls.static_handler,
connections_override=connections_override,
)
@classmethod
def _tearDownClassInternal(cls):
# There may not be a 'server_thread' attribute if setUpClass() for some
# reasons has raised an exception.
if hasattr(cls, 'server_thread'):
# Terminate the live server's thread
cls.server_thread.terminate()
# Restore sqlite in-memory database connections' non-shareability
for conn in connections.all():
if conn.vendor == 'sqlite' and conn.is_in_memory_db():
conn.allow_thread_sharing = False
@classmethod
def tearDownClass(cls):
cls._tearDownClassInternal()
cls._live_server_modified_settings.disable()
super(LiveServerTestCase, cls).tearDownClass()
class SerializeMixin:
"""
Mixin to enforce serialization of TestCases that share a common resource.
Define a common 'lockfile' for each set of TestCases to serialize. This
file must exist on the filesystem.
Place it early in the MRO in order to isolate setUpClass / tearDownClass.
"""
lockfile = None
@classmethod
def setUpClass(cls):
if cls.lockfile is None:
raise ValueError(
"{}.lockfile isn't set. Set it to a unique value "
"in the base class.".format(cls.__name__))
cls._lockfile = open(cls.lockfile)
locks.lock(cls._lockfile, locks.LOCK_EX)
super(SerializeMixin, cls).setUpClass()
@classmethod
def tearDownClass(cls):
super(SerializeMixin, cls).tearDownClass()
cls._lockfile.close()
|
twz915/django
|
django/test/testcases.py
|
Python
|
bsd-3-clause
| 55,344
|
from babelsubs.generators.base import BaseGenerator, register
class TXTGenerator(BaseGenerator):
file_type = 'txt'
MAPPINGS = dict(linebreaks="\n")
def __init__(self, subtitle_set, line_delimiter=u'\n\n', language=None):
"""
Generator is list of {'text': 'text', 'start': 'seconds', 'end': 'seconds'}
"""
self.subtitle_set = subtitle_set
self.line_delimiter = line_delimiter
self.language = language
def __unicode__(self):
output = []
items = self.subtitle_set.subtitle_items(mappings=self.MAPPINGS)
for _, _, content, _ in items:
if content:
output.append(content.strip())
return self.line_delimiter.join(output)
register(TXTGenerator)
|
revdotcom/babelsubs
|
babelsubs/generators/txt.py
|
Python
|
bsd-3-clause
| 769
|
'''
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
from ..document import Document
class Application(object):
''' An Application is a factory for Document instances.
'''
def __init__(self, *handlers):
self._handlers = list(handlers)
# TODO (havocp) should this potentially create multiple documents?
# or does multiple docs mean multiple Application?
def create_document(self):
''' Loads a new document using the Application's handlers to fill it in. '''
doc = Document()
for h in self._handlers:
# TODO (havocp) we need to check the 'failed' flag on each handler
# and build a composite error display.
h.modify_document(doc)
if h.failed:
log.error("Error running application handler %r: %s %s ", h, h.error, h.error_detail)
# A future server setting could make it configurable whether to do this,
# since it has some performance impact probably. Let's see if we need to.
doc.validate()
return doc
def add(self, handler):
''' Add a handler to the pipeline used to initialize new documents.
Args:
handler (Handler) : a handler to process this Application
'''
self._handlers.append(handler)
@property
def handlers(self):
return tuple(self._handlers)
|
htygithub/bokeh
|
bokeh/application/application.py
|
Python
|
bsd-3-clause
| 1,422
|
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
class DmozSpider(CrawlSpider):
"""Follow categories and extract links."""
name = 'dmoz'
allowed_domains = ['dmoz-odp.org']
start_urls = ['http://www.dmoz-odp.org/']
rules = [
Rule(LinkExtractor(
restrict_css=('.top-cat', '.sub-cat', '.cat-item')
), callback='parse_directory', follow=True),
]
def parse_directory(self, response):
for div in response.css('.title-and-desc'):
yield {
'name': div.css('.site-title::text').extract_first(),
'description': div.css('.site-descr::text').extract_first().strip(),
'link': div.css('a::attr(href)').extract_first(),
}
|
darkrho/scrapy-redis
|
example-project/example/spiders/dmoz.py
|
Python
|
bsd-3-clause
| 790
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests that relate to fitting models with quantity parameters
"""
from __future__ import (absolute_import, unicode_literals, division,
print_function)
import numpy as np
from ..models import Gaussian1D
from ... import units as u
from ...units import UnitsError
from ...tests.helper import pytest, assert_quantity_allclose
from ...utils import NumpyRNGContext
from .. import fitting
try:
from scipy import optimize
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
# Fitting should be as intuitive as possible to the user. Essentially, models
# and fitting should work without units, but if one has units, the other should
# have units too, and the resulting fitted parameters will also have units.
def _fake_gaussian_data():
# Generate fake data
with NumpyRNGContext(12345):
x = np.linspace(-5., 5., 2000)
y = 3 * np.exp(-0.5 * (x - 1.3)**2 / 0.8**2)
y += np.random.normal(0., 0.2, x.shape)
# Attach units to data
x = x * u.m
y = y * u.Jy
return x, y
@pytest.mark.skipif('not HAS_SCIPY')
def test_fitting_simple():
x, y = _fake_gaussian_data()
# Fit the data using a Gaussian with units
g_init = Gaussian1D()
fit_g = fitting.LevMarLSQFitter()
g = fit_g(g_init, x, y)
# TODO: update actual numerical results once implemented, but these should
# be close to the values below.
assert_quantity_allclose(g.amplitude, 3 * u.Jy, rtol=0.05)
assert_quantity_allclose(g.mean, 1.3 * u.m, rtol=0.05)
assert_quantity_allclose(g.stddev, 0.8 * u.m, rtol=0.05)
@pytest.mark.skipif('not HAS_SCIPY')
def test_fitting_with_initial_values():
x, y = _fake_gaussian_data()
# Fit the data using a Gaussian with units
g_init = Gaussian1D(amplitude=1. * u.mJy, mean=3 * u.cm, stddev=2 * u.mm)
fit_g = fitting.LevMarLSQFitter()
g = fit_g(g_init, x, y)
# TODO: update actual numerical results once implemented, but these should
# be close to the values below.
assert_quantity_allclose(g.amplitude, 3 * u.Jy, rtol=0.05)
assert_quantity_allclose(g.mean, 1.3 * u.m, rtol=0.05)
assert_quantity_allclose(g.stddev, 0.8 * u.m, rtol=0.05)
@pytest.mark.skipif('not HAS_SCIPY')
def test_fitting_missing_data_units():
"""
Raise an error if the model has units but the data doesn't
"""
g_init = Gaussian1D(amplitude=1. * u.mJy, mean=3 * u.cm, stddev=2 * u.mm)
fit_g = fitting.LevMarLSQFitter()
with pytest.raises(UnitsError) as exc:
fit_g(g_init, [1, 2, 3], [4, 5, 6])
assert exc.value.args[0] == ("'cm' (length) and '' (dimensionless) are not "
"convertible")
with pytest.raises(UnitsError) as exc:
fit_g(g_init, [1, 2, 3] * u.m, [4, 5, 6])
assert exc.value.args[0] == ("'mJy' (spectral flux density) and '' "
"(dimensionless) are not convertible")
@pytest.mark.skipif('not HAS_SCIPY')
def test_fitting_missing_model_units():
"""
Proceed if the data has units but the model doesn't
"""
x, y = _fake_gaussian_data()
g_init = Gaussian1D(amplitude=1., mean=3, stddev=2)
fit_g = fitting.LevMarLSQFitter()
g = fit_g(g_init, x, y)
assert_quantity_allclose(g.amplitude, 3 * u.Jy, rtol=0.05)
assert_quantity_allclose(g.mean, 1.3 * u.m, rtol=0.05)
assert_quantity_allclose(g.stddev, 0.8 * u.m, rtol=0.05)
g_init = Gaussian1D(amplitude=1., mean=3 * u.m, stddev=2 * u.m)
fit_g = fitting.LevMarLSQFitter()
g = fit_g(g_init, x, y)
assert_quantity_allclose(g.amplitude, 3 * u.Jy, rtol=0.05)
assert_quantity_allclose(g.mean, 1.3 * u.m, rtol=0.05)
assert_quantity_allclose(g.stddev, 0.8 * u.m, rtol=0.05)
@pytest.mark.skipif('not HAS_SCIPY')
def test_fitting_incompatible_units():
"""
Raise an error if the data and model have incompatible units
"""
g_init = Gaussian1D(amplitude=1. * u.Jy, mean=3 * u.m, stddev=2 * u.cm)
fit_g = fitting.LevMarLSQFitter()
with pytest.raises(UnitsError) as exc:
fit_g(g_init, [1, 2, 3] * u.Hz, [4, 5, 6] * u.Jy)
assert exc.value.args[0] == ("'Hz' (frequency) and 'm' (length) are not convertible")
|
kelle/astropy
|
astropy/modeling/tests/test_quantities_fitting.py
|
Python
|
bsd-3-clause
| 4,268
|
import os
from random import randint
from datetime import date, datetime, timedelta
from struct import unpack
from stdnet import SessionNotAvailable, CommitException
from stdnet.utils import test, encoders, populate, ispy3k, iteritems
from stdnet.apps.columnts import ColumnTS, as_dict
from stdnet.backends import redisb
from tests.all.structures.base import StructMixin
nan = float('nan')
this_path = os.path.split(os.path.abspath(__file__))[0]
bin_to_float = lambda f : unpack('>d', f)[0]
if ispy3k: # pragma nocover
bitflag = lambda value: value
else: # pragma nocover
bitflag = ord
class timeseries_test1(redisb.RedisScript):
script = (redisb.read_lua_file('tabletools'),
redisb.read_lua_file('columnts.columnts'),
redisb.read_lua_file('test1',this_path))
class ColumnData(test.DataGenerator):
sizes = {'tiny': 100,
'small': 300,
'normal': 2000,
'big': 10000,
'huge': 1000000}
def generate(self):
size = self.size
self.data1 = tsdata(self, ('a','b','c','d','f','g'))
self.data2 = tsdata(self, ('a','b','c','d','f','g'))
self.data3 = tsdata(self, ('a','b','c','d','f','g'))
self.missing = tsdata(self, ('a','b','c','d','f','g'), missing=True)
self.data_mul1 = tsdata(self, ('eurusd',))
self.data_mul2 = tsdata(self, ('gbpusd',))
class tsdata(object):
def __init__(self, g, fields, start=None, end=None, missing=False):
end = end or date.today()
if not start:
start = end - timedelta(days=g.size)
# random dates
self.dates = g.populate('date', start=start, end=end)
self.unique_dates = set(self.dates)
self.fields = {}
self.sorted_fields = {}
for field in fields:
vals = g.populate('float')
if missing:
N = len(vals)
for num in range(randint(0, N//2)):
index = randint(0, N-1)
vals[index] = nan
self.fields[field] = vals
self.sorted_fields[field] = []
self.values = []
date_dict = {}
for i,dt in enumerate(self.dates):
vals = dict(((f,v[i]) for f,v in iteritems(self.fields)))
self.values.append((dt,vals))
date_dict[dt] = vals
sdates = []
for i,dt in enumerate(sorted(date_dict)):
sdates.append(dt)
fields = date_dict[dt]
for field in fields:
self.sorted_fields[field].append(fields[field])
self.sorted_values = (sdates,self.sorted_fields)
self.length = len(sdates)
def create(self, test, id=None):
'''Create one ColumnTS with six fields and cls.size dates'''
models = test.mapper
ts = models.register(test.structure())
models.session().add(ts)
with ts.session.begin() as t:
t.add(ts)
ts.update(self.values)
yield t.on_result
yield ts
class ColumnMixin(object):
'''Used by all tests on ColumnTS'''
structure = ColumnTS
name = 'columnts'
data_cls = ColumnData
def create_one(self):
ts = self.structure()
d1 = date(2012,1,23)
data = {d1: {'open':586, 'high':588.66,
'low':583.16, 'close':585.52},
date(2012,1,20): {'open':590.53, 'high':591,
'low':581.7, 'close':585.99},
date(2012,1,19): {'open':640.99, 'high':640.99,
'low':631.46, 'close':639.57}}
ts.add(d1, data[d1])
self.data = data
data = self.data.copy()
data.pop(d1)
data = tuple(data.items())
ts.update(data)
# test bad add
self.assertRaises(TypeError, ts.add, date(2012,1,20), 1, 2, 3)
return ts
def empty(self):
models = self.mapper
l = models.register(self.structure())
self.assertTrue(l.id)
models.session().add(l)
self.assertTrue(l.session is not None)
return l
def check_stats(self, stat_field, data):
N = len(data)
cdata = list((d for d in data if d==d))
cdata2 = list((d*d for d in cdata))
dd = list((a-b for a,b in zip(cdata[1:],cdata[:-1])))
dd2 = list((d*d for d in dd))
NC = len(cdata)
self.assertEqual(stat_field['N'],NC)
self.assertAlmostEqual(stat_field['min'], min(cdata))
self.assertAlmostEqual(stat_field['max'], max(cdata))
self.assertAlmostEqual(stat_field['sum'], sum(cdata)/NC)
self.assertAlmostEqual(stat_field['sum2'], sum(cdata2)/NC)
self.assertAlmostEqual(stat_field['dsum'], sum(dd)/(NC-1))
self.assertAlmostEqual(stat_field['dsum2'], sum(dd2)/(NC-1))
def as_dict(self, serie):
times, fields = yield serie.irange()
yield as_dict(times, fields)
def makeGoogle(self):
ts = self.mapper.register(self.create_one())
self.assertTrue(len(ts.cache.fields['open']), 2)
self.assertTrue(len(ts.cache.fields), 4)
yield self.mapper.session().add(ts)
yield self.async.assertEqual(ts.size(), 3)
dates, fields = yield ts.irange()
self.assertEqual(len(fields), 4)
self.assertEqual(len(dates), 3)
for field in fields:
values = fields[field]
self.assertEqual(len(values), 3)
for dt, v in zip(dates, values):
v2 = self.data[dt.date()][field]
self.assertAlmostEqual(v, v2)
yield ts
class TestTimeSeries(ColumnMixin, StructMixin, test.TestCase):
def testLuaClass(self):
ts = self.empty()
backend = ts.backend_structure()
self.assertEqual(backend.instance, ts)
c = backend.client
r = yield c.execute_script('timeseries_test1', (backend.id,))
self.assertEqual(r, b'OK')
def testEmpty2(self):
'''Check an empty timeseries'''
ts = self.empty()
yield self.async.assertEqual(ts.numfields(), 0)
yield self.async.assertEqual(ts.fields(), ())
def testFrontBack(self):
models = self.mapper
ts = models.register(ColumnTS(pickler=encoders.DateConverter()))
models.session().add(ts)
yield self.async.assertEqual(ts.front(), None)
yield self.async.assertEqual(ts.back(), None)
d2 = date.today()
d1 = d2 - timedelta(days=2)
with ts.session.begin() as t:
ts.add(d2,'foo',-5.2)
ts.add(d1,'foo',789.3)
yield t.on_result
yield self.async.assertEqual(ts.size(),2)
yield self.async.assertEqual(ts.front(), (d1, {'foo':789.3}))
yield self.async.assertEqual(ts.back(), (d2, {'foo':-5.2}))
def test_ddd_simple(self):
ts = self.empty()
with ts.session.begin() as t:
ts.add(date.today(), 'pv', 56)
self.assertTrue(ts.cache.fields)
ts.add(date.today()-timedelta(days=2), 'pv', 53.8)
self.assertTrue(len(ts.cache.fields['pv']), 2)
yield t.on_result
yield self.async.assertEqual(ts.fields(), ('pv',))
yield self.async.assertEqual(ts.numfields(), 1)
yield self.async.assertEqual(ts.size(), 2)
#
# Check that a string is available at the field key
bts = ts.backend_structure()
keys = yield bts.allkeys()
keys = tuple((b.decode('utf-8') for b in keys))
self.assertEqual(len(keys), 3)
self.assertTrue(bts.id in keys)
self.assertTrue(bts.fieldsid in keys)
self.assertTrue(bts.fieldid('pv') in keys)
raw_data = bts.field('pv')
self.assertTrue(raw_data)
self.assertEqual(len(raw_data),18)
a1 = raw_data[:9]
a2 = raw_data[9:]
n = bitflag(a1[0])
self.assertEqual(n, bitflag(a2[0]))
self.assertEqual(n, 2)
self.assertEqual(bin_to_float(a1[1:]), 53.8)
self.assertEqual(bin_to_float(a2[1:]), 56)
#
data = ts.irange()
self.assertEqual(len(data),2)
dt,fields = data
self.assertEqual(len(dt),2)
self.assertTrue('pv' in fields)
for v, t in zip(fields['pv'],[53.8, 56]):
self.assertAlmostEqual(v, t)
def test_add_nil(self):
ts = self.empty()
with ts.session.begin() as t:
ts.add(date.today(), 'pv', 56)
ts.add(date.today()-timedelta(days=2), 'pv', nan)
yield t.on_result
yield self.async.assertEqual(ts.size(), 2)
dt, fields = yield ts.irange()
self.assertEqual(len(dt), 2)
self.assertTrue('pv' in fields)
n = fields['pv'][0]
self.assertNotEqual(n, n)
def testGoogleDrop(self):
ts = yield self.makeGoogle()
yield self.async.assertEqual(ts.fields(), ('close','high','low','open'))
yield self.async.assertEqual(ts.numfields(), 4)
yield self.async.assertEqual(ts.size(), 3)
def testRange(self):
ts = yield self.makeGoogle()
data = ts.irange()
self.assertEqual(len(data),2)
dt,fields = data
self.assertEqual(len(fields),4)
high = list(zip(dt,fields['high']))
self.assertEqual(high[0],(datetime(2012,1,19),640.99))
self.assertEqual(high[1],(datetime(2012,1,20),591))
self.assertEqual(high[2],(datetime(2012,1,23),588.66))
def testRangeField(self):
ts = yield self.makeGoogle()
data = ts.irange(fields=('low','high','badone'))
self.assertEqual(len(data),2)
dt,fields = data
self.assertEqual(len(fields),2)
low = list(zip(dt,fields['low']))
high = list(zip(dt,fields['high']))
self.assertEqual(high[0],(datetime(2012,1,19),640.99))
self.assertEqual(high[1],(datetime(2012,1,20),591))
self.assertEqual(high[2],(datetime(2012,1,23),588.66))
def testRaises(self):
ts = yield self.makeGoogle()
self.assertRaises(TypeError, ts.merge, 5)
self.assertRaises(ValueError, ts.merge, (5,))
ts.session = None
self.assertRaises(SessionNotAvailable, ts.merge, (5, ts))
def testUpdateDict(self):
'''Test updating via a dictionary.'''
ts = yield self.makeGoogle()
data = {date(2012,1,23):{'open':586.00, 'high':588.66,
'low':583.16, 'close':585.52},
date(2012,1,25):{'open':586.32, 'high':687.68,
'low':578, 'close':580.93},
date(2012,1,24):{'open':586.32, 'high':687.68,
'low':578, 'close':580.93}}
ts.update(data)
self.assertEqual(ts.size(), 5)
dates, fields = ts.range(date(2012,1,23), date(2012,1,25))
self.assertEqual(len(dates),3)
self.assertEqual(dates[0].date(),date(2012,1,23))
self.assertEqual(dates[1].date(),date(2012,1,24))
self.assertEqual(dates[2].date(),date(2012,1,25))
for field in fields:
for d, v1 in zip(dates, fields[field]):
v2 = data[d.date()][field]
self.assertAlmostEqual(v1, v2)
def __testBadQuery(self):
ts = yield self.makeGoogle()
# get the backend id and override it
id = ts.dbid()
client = ts.session.backend.client
client.delete(id)
client.rpush(id, 'bla')
client.rpush(id, 'foo')
self.assertEqual(client.llen(id), 2)
self.assertRaises(redisb.ScriptError, ts.add,
date(2012,1,23), {'open':586})
self.assertRaises(redisb.ScriptError, ts.irange)
self.assertRaises(redisb.RedisInvalidResponse, ts.size)
def test_get(self):
ts = yield self.makeGoogle()
v = yield ts.get(date(2012,1,23))
self.assertTrue(v)
self.assertEqual(len(v),4)
v2 = ts[date(2012,1,23)]
self.assertEqual(v,v2)
self.assertEqual(ts.get(date(2014,1,1)),None)
self.assertRaises(KeyError, lambda: ts[date(2014,1,1)])
def testSet(self):
ts = yield self.makeGoogle()
ts[date(2012,1,27)] = {'open': 600}
self.assertEqual(len(ts), 4)
res = ts[date(2012,1,27)]
self.assertEqual(len(res),4)
self.assertEqual(res['open'], 600)
self.assertNotEqual(res['close'],res['close'])
self.assertNotEqual(res['high'],res['high'])
self.assertNotEqual(res['low'],res['low'])
def test_times(self):
ts = yield self.makeGoogle()
dates = yield ts.itimes()
self.assertTrue(dates)
self.assertEqual(len(dates), 3)
for dt in dates:
self.assertIsInstance(dt, datetime)
class TestOperations(ColumnMixin, test.TestCase):
@classmethod
def after_setup(cls):
cls.ts1 = yield cls.data.data1.create(cls)
cls.ts2 = yield cls.data.data2.create(cls)
cls.ts3 = yield cls.data.data3.create(cls)
cls.mul1 = yield cls.data.data_mul1.create(cls)
cls.mul2 = yield cls.data.data_mul2.create(cls)
def test_merge2series(self):
data = self.data
ts1, ts2 = self.ts1, self.ts2
yield self.async.assertEqual(ts1.size(), data.data1.length)
yield self.async.assertEqual(ts1.numfields(), 6)
yield self.async.assertEqual(ts2.size(), data.data2.length)
yield self.async.assertEqual(ts2.numfields(), 6)
ts3 = self.mapper.register(self.structure())
session = self.mapper.session()
with session.begin() as t:
t.add(ts3)
# merge ts1 with weight -1 and ts2 with weight 2
ts3.merge((-1, ts1), (2, ts2))
yield t.on_result
yield self.async.assertTrue(ts3.size())
yield self.async.assertEqual(ts3.numfields(), 6)
times, fields = ts3.irange()
for i,dt in enumerate(times):
dt = dt.date()
v1 = ts1.get(dt)
v2 = ts2.get(dt)
if dt in data.data1.unique_dates and dt in data.data2.unique_dates:
for field, values in fields.items():
res = 2*v2[field] - v1[field]
self.assertAlmostEqual(values[i],res)
else:
self.assertTrue(v1 is None or v2 is None)
for values in fields.values():
v = values[i]
self.assertNotEqual(v,v)
def test_merge3series(self):
data = self.data
ts1, ts2, ts3 = self.ts1, self.ts2, self.ts3
ts4 = self.mapper.register(self.structure())
session = self.mapper.session()
yield self.async.assertEqual(ts1.size(), data.data1.length)
yield self.async.assertEqual(ts2.size(), data.data2.length)
yield self.async.assertEqual(ts3.size(), data.data3.length)
with session.begin() as t:
t.add(ts4)
# merge ts1 with weight -1 and ts2 with weight 2
ts4.merge((0.5, ts1), (1.3, ts2), (-2.65, ts3))
self.assertEqual(ts4.session, session)
yield t.on_result
length = yield ts4.size()
self.assertTrue(length >= max(data.data1.length, data.data2.length,
data.data3.length))
yield self.async.assertEqual(ts2.numfields(), 6)
#
results = yield self.as_dict(ts4)
d1 = yield self.as_dict(ts1)
d2 = yield self.as_dict(ts2)
d3 = yield self.as_dict(ts3)
#
for dt in results:
v1 = d1.get(dt)
v2 = d2.get(dt)
v3 = d3.get(dt)
result = results[dt]
if v1 is not None and v2 is not None and v3 is not None:
for field in result:
vc = result[field]
res = 0.5*v1[field] + 1.3*v2[field] - 2.65*v3[field]
self.assertAlmostEqual(vc, res)
else:
for v in result.values():
self.assertNotEqual(v, v)
def test_add_multiply1(self):
data = self.data
ts1, ts2, mul1 = self.ts1, self.ts2, self.mul1
ts = self.mapper.register(self.structure())
session = self.mapper.session()
with session.begin() as t:
t.add(ts)
ts.merge((1.5, mul1, ts1), (-1.2, ts2))
self.assertTrue(ts.cache.merged_series)
self.assertEqual(ts.session, session)
yield t.on_result
length = yield ts.size()
self.assertTrue(length >= max(data.data1.length, data.data2.length))
yield self.async.assertEqual(ts.numfields(), 6)
results = yield self.as_dict(ts)
mul1 = yield self.as_dict(mul1)
d1 = yield self.as_dict(ts1)
d2 = yield self.as_dict(ts2)
for dt in results:
v1 = d1.get(dt)
v2 = d2.get(dt)
m1 = mul1.get(dt)
result = results[dt]
if v1 is not None and v2 is not None and m1 is not None:
m1 = m1['eurusd']
for field in result:
vc = result[field]
res = 1.5*m1*v1[field] - 1.2*v2[field]
self.assertAlmostEqual(vc, res)
else:
for v in result.values():
self.assertNotEqual(v,v)
def test_add_multiply2(self):
data = self.data
ts1, ts2, mul1, mul2 = self.ts1, self.ts2, self.mul1, self.mul2
ts = self.mapper.register(self.structure())
session = self.mapper.session()
with session.begin() as t:
t.add(ts)
ts.merge((1.5, mul1, ts1), (-1.2, mul2, ts2))
self.assertEqual(ts.session, session)
yield t.on_result
length = yield ts.size()
self.assertTrue(length >= max(data.data1.length, data.data2.length))
yield self.async.assertEqual(ts.numfields(), 6)
times, fields = ts.irange()
for i,dt in enumerate(times):
dt = dt.date()
v1 = ts1.get(dt)
v2 = ts2.get(dt)
m1 = mul1.get(dt)
m2 = mul2.get(dt)
if v1 is not None and v2 is not None and m1 is not None\
and m2 is not None:
m1 = m1['eurusd']
m2 = m2['gbpusd']
for field,values in fields.items():
res = 1.5*m1*v1[field] - 1.2*m2*v2[field]
self.assertAlmostEqual(values[i],res)
else:
for values in fields.values():
v = values[i]
self.assertNotEqual(v,v)
def test_multiply_no_store(self):
data = self.data
ts1, ts2 = self.ts1, self.ts2
times, fields = yield self.structure.merged_series((1.5, ts1),
(-1.2, ts2))
for i,dt in enumerate(times):
dt = dt.date()
v1 = ts1.get(dt)
v2 = ts2.get(dt)
if v1 is not None and v2 is not None:
for field,values in fields.items():
res = 1.5*v1[field] - 1.2*v2[field]
self.assertAlmostEqual(values[i],res)
else:
for values in fields.values():
v = values[i]
self.assertNotEqual(v,v)
def test_merge_fields(self):
data = self.data
ts1, ts2, mul1, mul2 = self.ts1, self.ts2, self.mul1, self.mul2
ts = self.mapper.register(self.structure())
session = self.mapper.session()
with session.begin() as t:
t.add(ts)
ts.merge((1.5, mul1, ts1), (-1.2, mul2, ts2),
fields=('a','b','c','badone'))
self.assertEqual(ts.session,session)
yield t.on_result
length = yield ts.size()
self.assertTrue(length >= max(data.data1.length, data.data2.length))
yield self.async.assertEqual(ts.numfields(), 3)
yield self.async.assertEqual(ts.fields(), ('a','b','c'))
times, fields = yield ts.irange()
for i,dt in enumerate(times):
dt = dt.date()
v1 = ts1.get(dt)
v2 = ts2.get(dt)
m1 = mul1.get(dt)
m2 = mul2.get(dt)
if v1 is not None and v2 is not None and m1 is not None\
and m2 is not None:
m1 = m1['eurusd']
m2 = m2['gbpusd']
for field,values in fields.items():
res = 1.5*m1*v1[field] - 1.2*m2*v2[field]
self.assertAlmostEqual(values[i],res)
else:
for values in fields.values():
v = values[i]
self.assertNotEqual(v,v)
class a:
#class TestMissingValues(TestOperations):
@classmethod
def after_setup(cls):
cls.ts1 = yield cls.data.missing.create(cls)
def test_missing(self):
result = self.ts1.istats(0, -1)
stats = result['stats']
self.assertEqual(len(stats), 6)
for stat in stats:
self.check_stats(stats[stat],self.fields[stat])
|
lsbardel/python-stdnet
|
tests/all/apps/columnts/main.py
|
Python
|
bsd-3-clause
| 21,184
|
# Copyright (c) 2005-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from m5.SimObject import SimObject
from m5.params import *
from m5.proxy import *
from Device import BasicPioDevice, DmaDevice, PioDevice
class PciConfigAll(BasicPioDevice):
type = 'PciConfigAll'
cxx_header = "dev/pciconfigall.hh"
platform = Param.Platform(Parent.any, "Platform this device is part of.")
bus = Param.UInt8(0x00, "PCI bus to act as config space for")
size = Param.MemorySize32('16MB', "Size of config space")
pio_latency = '30ns'
pio_addr = 0 # will be overridden by platform-based calculation
class PciDevice(DmaDevice):
type = 'PciDevice'
cxx_class = 'PciDevice'
cxx_header = "dev/pcidev.hh"
abstract = True
platform = Param.Platform(Parent.any, "Platform this device is part of.")
config = SlavePort("PCI configuration space port")
pci_bus = Param.Int("PCI bus")
pci_dev = Param.Int("PCI device number")
pci_func = Param.Int("PCI function code")
pio_latency = Param.Latency('30ns', "Programmed IO latency")
config_latency = Param.Latency('20ns', "Config read or write latency")
VendorID = Param.UInt16("Vendor ID")
DeviceID = Param.UInt16("Device ID")
Command = Param.UInt16(0, "Command")
Status = Param.UInt16(0, "Status")
Revision = Param.UInt8(0, "Device")
ProgIF = Param.UInt8(0, "Programming Interface")
SubClassCode = Param.UInt8(0, "Sub-Class Code")
ClassCode = Param.UInt8(0, "Class Code")
CacheLineSize = Param.UInt8(0, "System Cacheline Size")
LatencyTimer = Param.UInt8(0, "PCI Latency Timer")
HeaderType = Param.UInt8(0, "PCI Header Type")
BIST = Param.UInt8(0, "Built In Self Test")
BAR0 = Param.UInt32(0x00, "Base Address Register 0")
BAR1 = Param.UInt32(0x00, "Base Address Register 1")
BAR2 = Param.UInt32(0x00, "Base Address Register 2")
BAR3 = Param.UInt32(0x00, "Base Address Register 3")
BAR4 = Param.UInt32(0x00, "Base Address Register 4")
BAR5 = Param.UInt32(0x00, "Base Address Register 5")
BAR0Size = Param.MemorySize32('0B', "Base Address Register 0 Size")
BAR1Size = Param.MemorySize32('0B', "Base Address Register 1 Size")
BAR2Size = Param.MemorySize32('0B', "Base Address Register 2 Size")
BAR3Size = Param.MemorySize32('0B', "Base Address Register 3 Size")
BAR4Size = Param.MemorySize32('0B', "Base Address Register 4 Size")
BAR5Size = Param.MemorySize32('0B', "Base Address Register 5 Size")
BAR0LegacyIO = Param.Bool(False, "Whether BAR0 is hardwired legacy IO")
BAR1LegacyIO = Param.Bool(False, "Whether BAR1 is hardwired legacy IO")
BAR2LegacyIO = Param.Bool(False, "Whether BAR2 is hardwired legacy IO")
BAR3LegacyIO = Param.Bool(False, "Whether BAR3 is hardwired legacy IO")
BAR4LegacyIO = Param.Bool(False, "Whether BAR4 is hardwired legacy IO")
BAR5LegacyIO = Param.Bool(False, "Whether BAR5 is hardwired legacy IO")
CardbusCIS = Param.UInt32(0x00, "Cardbus Card Information Structure")
SubsystemID = Param.UInt16(0x00, "Subsystem ID")
SubsystemVendorID = Param.UInt16(0x00, "Subsystem Vendor ID")
ExpansionROM = Param.UInt32(0x00, "Expansion ROM Base Address")
InterruptLine = Param.UInt8(0x00, "Interrupt Line")
InterruptPin = Param.UInt8(0x00, "Interrupt Pin")
MaximumLatency = Param.UInt8(0x00, "Maximum Latency")
MinimumGrant = Param.UInt8(0x00, "Minimum Grant")
|
prodromou87/gem5
|
src/dev/Pci.py
|
Python
|
bsd-3-clause
| 4,938
|
# -*- coding: utf-8 -*-
"""
Tests for TimedeltaIndex methods behaving like their Timedelta counterparts
"""
import numpy as np
import pytest
import pandas as pd
from pandas import Index, Series, Timedelta, TimedeltaIndex, timedelta_range
import pandas.util.testing as tm
class TestVectorizedTimedelta(object):
def test_tdi_total_seconds(self):
# GH#10939
# test index
rng = timedelta_range('1 days, 10:11:12.100123456', periods=2,
freq='s')
expt = [1 * 86400 + 10 * 3600 + 11 * 60 + 12 + 100123456. / 1e9,
1 * 86400 + 10 * 3600 + 11 * 60 + 13 + 100123456. / 1e9]
tm.assert_almost_equal(rng.total_seconds(), Index(expt))
# test Series
ser = Series(rng)
s_expt = Series(expt, index=[0, 1])
tm.assert_series_equal(ser.dt.total_seconds(), s_expt)
# with nat
ser[1] = np.nan
s_expt = Series([1 * 86400 + 10 * 3600 + 11 * 60 +
12 + 100123456. / 1e9, np.nan], index=[0, 1])
tm.assert_series_equal(ser.dt.total_seconds(), s_expt)
# with both nat
ser = Series([np.nan, np.nan], dtype='timedelta64[ns]')
tm.assert_series_equal(ser.dt.total_seconds(),
Series([np.nan, np.nan], index=[0, 1]))
def test_tdi_round(self):
td = pd.timedelta_range(start='16801 days', periods=5, freq='30Min')
elt = td[1]
expected_rng = TimedeltaIndex([Timedelta('16801 days 00:00:00'),
Timedelta('16801 days 00:00:00'),
Timedelta('16801 days 01:00:00'),
Timedelta('16801 days 02:00:00'),
Timedelta('16801 days 02:00:00')])
expected_elt = expected_rng[1]
tm.assert_index_equal(td.round(freq='H'), expected_rng)
assert elt.round(freq='H') == expected_elt
msg = pd._libs.tslibs.frequencies.INVALID_FREQ_ERR_MSG
with pytest.raises(ValueError, match=msg):
td.round(freq='foo')
with pytest.raises(ValueError, match=msg):
elt.round(freq='foo')
msg = "<MonthEnd> is a non-fixed frequency"
with pytest.raises(ValueError, match=msg):
td.round(freq='M')
with pytest.raises(ValueError, match=msg):
elt.round(freq='M')
|
GuessWhoSamFoo/pandas
|
pandas/tests/indexes/timedeltas/test_scalar_compat.py
|
Python
|
bsd-3-clause
| 2,423
|
import json
from django import forms
from django.test.utils import override_settings
from django_webtest import WebTest
from . import build_test_urls
class TextareaForm(forms.Form):
test_field = forms.CharField(
min_length=5,
max_length=20,
widget=forms.Textarea(attrs={'data-test': 'Test Attr'}))
@override_settings(ROOT_URLCONF=__name__)
class Test(WebTest):
default_form = TextareaForm
def test_default_usecase(self):
page = self.app.get(self.test_default_usecase.url)
self.assertIn('id="id_test_field_container"', page.body.decode('utf-8'))
self.assertIn('id="id_test_field"', page.body.decode('utf-8'))
self.assertIn('maxlength="20"', page.body.decode('utf-8'))
self.assertIn('data-test="Test Attr"', page.body.decode('utf-8'))
form = page.form
self.assertIn('test_field', form.fields)
form['test_field'] = 'TEST CONTENT'
response = json.loads(form.submit().body.decode('utf-8'))
self.assertIn('cleaned_data', response)
self.assertIn('test_field', response['cleaned_data'])
self.assertEquals('TEST CONTENT', response['cleaned_data']['test_field'])
def test_missing_value_error(self):
form = self.app.get(self.test_missing_value_error.url).form
response = form.submit()
self.assertIn('has-error', response.body.decode('utf-8'))
self.assertIn('This field is required.', response.body.decode('utf-8'))
def test_render_with_value(self):
form = self.app.get(self.test_render_with_value.url).form
form['test_field'] = 'a'*21
response = form.submit()
self.assertIn('>{}<'.format('a'*21), response.body.decode('utf-8'))
self.assertIn('Ensure this value has at most 20 characters', response.body.decode('utf-8'))
def test_part_group_class(self):
page = self.app.get(self.test_part_group_class.url)
self.assertIn('class="input-field col s12 yellow"', page.body.decode('utf-8'))
test_part_group_class.template = '''
{% form %}
{% part form.test_field group_class %}input-field col s12 yellow{% endpart %}
{% endform %}
'''
def test_part_add_group_class(self):
page = self.app.get(self.test_part_add_group_class.url)
self.assertIn('class="input-field col s12 required deep-purple lighten-5"', page.body.decode('utf-8'))
test_part_add_group_class.template = '''
{% form %}
{% part form.test_field add_group_class %}deep-purple lighten-5{% endpart %}
{% endform %}
'''
def test_part_prefix(self):
response = self.app.get(self.test_part_prefix.url)
self.assertIn('<i class="mdi-communication-email prefix"></i>', response.body.decode('utf-8'))
test_part_prefix.template = '''
{% form %}
{% part form.test_field prefix %}<i class="mdi-communication-email prefix"></i>{% endpart %}
{% endform %}
'''
def test_part_add_control_class(self):
response = self.app.get(self.test_part_add_control_class.url)
self.assertIn('class="materialize-textarea orange"', response.body.decode('utf-8'))
test_part_add_control_class.template = '''
{% form %}
{% part form.test_field add_control_class %}orange{% endpart %}
{% endform %}
'''
def test_part_label(self):
response = self.app.get(self.test_part_label.url)
self.assertIn('<label for="id_test_field">My label</label>', response.body.decode('utf-8'))
test_part_label.template = '''
{% form %}
{% part form.test_field label %}<label for="id_test_field">My label</label>{% endpart %}
{% endform %}
'''
def test_part_add_label_class(self):
response = self.app.get(self.test_part_add_label_class.url)
self.assertIn('<label for="id_test_field" class="green-text">Test field</label>', response.body.decode('utf-8'))
test_part_add_label_class.template = '''
{% form %}
{% part form.test_field add_label_class %}green-text{% endpart %}
{% endform %}
'''
def test_part_help_text(self):
response = self.app.get(self.test_part_help_text.url)
self.assertIn('<small class="help-block">My help</small>', response.body.decode('utf-8'))
test_part_help_text.template = '''
{% form %}
{% part form.test_field help_text %}<small class="help-block">My help</small>{% endpart %}
{% endform %}
'''
def test_part_errors(self):
response = self.app.get(self.test_part_errors.url)
self.assertIn('<div class="errors"><small class="error">My Error</small></div>', response.body.decode('utf-8'))
test_part_errors.template = '''
{% form %}
{% part form.test_field errors%}<div class="errors"><small class="error">My Error</small></div>{% endpart %}
{% endform %}
'''
urlpatterns = build_test_urls(Test)
|
2947721120/django-material
|
tests/test_widget_textarea.py
|
Python
|
bsd-3-clause
| 5,002
|
"""
Utility functions for handling network ports
"""
import socket
def find_port() -> int:
sock = socket.socket()
sock.bind(("localhost", 0))
host, port = sock.getsockname()
return port
def is_port_open(ip: str, port: int) -> bool:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.settimeout(1)
try:
s.connect((ip, int(port)))
s.shutdown(socket.SHUT_RDWR)
return True
except ConnectionRefusedError:
return False
|
psi4/DatenQM
|
qcfractal/port_util.py
|
Python
|
bsd-3-clause
| 523
|
""" test positional based indexing with iloc """
import pytest
from warnings import catch_warnings, filterwarnings, simplefilter
import numpy as np
import pandas as pd
from pandas.compat import lrange, lmap
from pandas import Series, DataFrame, date_range, concat, isna
from pandas.util import testing as tm
from pandas.tests.indexing.common import Base
from pandas.api.types import is_scalar
class TestiLoc(Base):
def test_iloc_exceeds_bounds(self):
# GH6296
# iloc should allow indexers that exceed the bounds
df = DataFrame(np.random.random_sample((20, 5)), columns=list('ABCDE'))
expected = df
# lists of positions should raise IndexErrror!
with tm.assert_raises_regex(IndexError,
'positional indexers '
'are out-of-bounds'):
df.iloc[:, [0, 1, 2, 3, 4, 5]]
pytest.raises(IndexError, lambda: df.iloc[[1, 30]])
pytest.raises(IndexError, lambda: df.iloc[[1, -30]])
pytest.raises(IndexError, lambda: df.iloc[[100]])
s = df['A']
pytest.raises(IndexError, lambda: s.iloc[[100]])
pytest.raises(IndexError, lambda: s.iloc[[-100]])
# still raise on a single indexer
msg = 'single positional indexer is out-of-bounds'
with tm.assert_raises_regex(IndexError, msg):
df.iloc[30]
pytest.raises(IndexError, lambda: df.iloc[-30])
# GH10779
# single positive/negative indexer exceeding Series bounds should raise
# an IndexError
with tm.assert_raises_regex(IndexError, msg):
s.iloc[30]
pytest.raises(IndexError, lambda: s.iloc[-30])
# slices are ok
result = df.iloc[:, 4:10] # 0 < start < len < stop
expected = df.iloc[:, 4:]
tm.assert_frame_equal(result, expected)
result = df.iloc[:, -4:-10] # stop < 0 < start < len
expected = df.iloc[:, :0]
tm.assert_frame_equal(result, expected)
result = df.iloc[:, 10:4:-1] # 0 < stop < len < start (down)
expected = df.iloc[:, :4:-1]
tm.assert_frame_equal(result, expected)
result = df.iloc[:, 4:-10:-1] # stop < 0 < start < len (down)
expected = df.iloc[:, 4::-1]
tm.assert_frame_equal(result, expected)
result = df.iloc[:, -10:4] # start < 0 < stop < len
expected = df.iloc[:, :4]
tm.assert_frame_equal(result, expected)
result = df.iloc[:, 10:4] # 0 < stop < len < start
expected = df.iloc[:, :0]
tm.assert_frame_equal(result, expected)
result = df.iloc[:, -10:-11:-1] # stop < start < 0 < len (down)
expected = df.iloc[:, :0]
tm.assert_frame_equal(result, expected)
result = df.iloc[:, 10:11] # 0 < len < start < stop
expected = df.iloc[:, :0]
tm.assert_frame_equal(result, expected)
# slice bounds exceeding is ok
result = s.iloc[18:30]
expected = s.iloc[18:]
tm.assert_series_equal(result, expected)
result = s.iloc[30:]
expected = s.iloc[:0]
tm.assert_series_equal(result, expected)
result = s.iloc[30::-1]
expected = s.iloc[::-1]
tm.assert_series_equal(result, expected)
# doc example
def check(result, expected):
str(result)
result.dtypes
tm.assert_frame_equal(result, expected)
dfl = DataFrame(np.random.randn(5, 2), columns=list('AB'))
check(dfl.iloc[:, 2:3], DataFrame(index=dfl.index))
check(dfl.iloc[:, 1:3], dfl.iloc[:, [1]])
check(dfl.iloc[4:6], dfl.iloc[[4]])
pytest.raises(IndexError, lambda: dfl.iloc[[4, 5, 6]])
pytest.raises(IndexError, lambda: dfl.iloc[:, 4])
def test_iloc_getitem_int(self):
# integer
self.check_result('integer', 'iloc', 2, 'ix',
{0: 4, 1: 6, 2: 8}, typs=['ints', 'uints'])
self.check_result('integer', 'iloc', 2, 'indexer', 2,
typs=['labels', 'mixed', 'ts', 'floats', 'empty'],
fails=IndexError)
def test_iloc_getitem_neg_int(self):
# neg integer
self.check_result('neg int', 'iloc', -1, 'ix',
{0: 6, 1: 9, 2: 12}, typs=['ints', 'uints'])
self.check_result('neg int', 'iloc', -1, 'indexer', -1,
typs=['labels', 'mixed', 'ts', 'floats', 'empty'],
fails=IndexError)
@pytest.mark.parametrize('dims', [1, 2])
def test_iloc_getitem_invalid_scalar(self, dims):
# GH 21982
if dims == 1:
s = Series(np.arange(10))
else:
s = DataFrame(np.arange(100).reshape(10, 10))
tm.assert_raises_regex(TypeError, 'Cannot index by location index',
lambda: s.iloc['a'])
def test_iloc_array_not_mutating_negative_indices(self):
# GH 21867
array_with_neg_numbers = np.array([1, 2, -1])
array_copy = array_with_neg_numbers.copy()
df = pd.DataFrame({
'A': [100, 101, 102],
'B': [103, 104, 105],
'C': [106, 107, 108]},
index=[1, 2, 3])
df.iloc[array_with_neg_numbers]
tm.assert_numpy_array_equal(array_with_neg_numbers, array_copy)
df.iloc[:, array_with_neg_numbers]
tm.assert_numpy_array_equal(array_with_neg_numbers, array_copy)
def test_iloc_getitem_list_int(self):
# list of ints
self.check_result('list int', 'iloc', [0, 1, 2], 'ix',
{0: [0, 2, 4], 1: [0, 3, 6], 2: [0, 4, 8]},
typs=['ints', 'uints'])
self.check_result('list int', 'iloc', [2], 'ix',
{0: [4], 1: [6], 2: [8]}, typs=['ints', 'uints'])
self.check_result('list int', 'iloc', [0, 1, 2], 'indexer', [0, 1, 2],
typs=['labels', 'mixed', 'ts', 'floats', 'empty'],
fails=IndexError)
# array of ints (GH5006), make sure that a single indexer is returning
# the correct type
self.check_result('array int', 'iloc', np.array([0, 1, 2]), 'ix',
{0: [0, 2, 4],
1: [0, 3, 6],
2: [0, 4, 8]}, typs=['ints', 'uints'])
self.check_result('array int', 'iloc', np.array([2]), 'ix',
{0: [4], 1: [6], 2: [8]}, typs=['ints', 'uints'])
self.check_result('array int', 'iloc', np.array([0, 1, 2]), 'indexer',
[0, 1, 2],
typs=['labels', 'mixed', 'ts', 'floats', 'empty'],
fails=IndexError)
def test_iloc_getitem_neg_int_can_reach_first_index(self):
# GH10547 and GH10779
# negative integers should be able to reach index 0
df = DataFrame({'A': [2, 3, 5], 'B': [7, 11, 13]})
s = df['A']
expected = df.iloc[0]
result = df.iloc[-3]
tm.assert_series_equal(result, expected)
expected = df.iloc[[0]]
result = df.iloc[[-3]]
tm.assert_frame_equal(result, expected)
expected = s.iloc[0]
result = s.iloc[-3]
assert result == expected
expected = s.iloc[[0]]
result = s.iloc[[-3]]
tm.assert_series_equal(result, expected)
# check the length 1 Series case highlighted in GH10547
expected = Series(['a'], index=['A'])
result = expected.iloc[[-1]]
tm.assert_series_equal(result, expected)
def test_iloc_getitem_dups(self):
# no dups in panel (bug?)
self.check_result('list int (dups)', 'iloc', [0, 1, 1, 3], 'ix',
{0: [0, 2, 2, 6], 1: [0, 3, 3, 9]},
objs=['series', 'frame'], typs=['ints', 'uints'])
# GH 6766
df1 = DataFrame([{'A': None, 'B': 1}, {'A': 2, 'B': 2}])
df2 = DataFrame([{'A': 3, 'B': 3}, {'A': 4, 'B': 4}])
df = concat([df1, df2], axis=1)
# cross-sectional indexing
result = df.iloc[0, 0]
assert isna(result)
result = df.iloc[0, :]
expected = Series([np.nan, 1, 3, 3], index=['A', 'B', 'A', 'B'],
name=0)
tm.assert_series_equal(result, expected)
def test_iloc_getitem_array(self):
# array like
s = Series(index=lrange(1, 4))
self.check_result('array like', 'iloc', s.index, 'ix',
{0: [2, 4, 6], 1: [3, 6, 9], 2: [4, 8, 12]},
typs=['ints', 'uints'])
def test_iloc_getitem_bool(self):
# boolean indexers
b = [True, False, True, False, ]
self.check_result('bool', 'iloc', b, 'ix', b, typs=['ints', 'uints'])
self.check_result('bool', 'iloc', b, 'ix', b,
typs=['labels', 'mixed', 'ts', 'floats', 'empty'],
fails=IndexError)
def test_iloc_getitem_slice(self):
# slices
self.check_result('slice', 'iloc', slice(1, 3), 'ix',
{0: [2, 4], 1: [3, 6], 2: [4, 8]},
typs=['ints', 'uints'])
self.check_result('slice', 'iloc', slice(1, 3), 'indexer',
slice(1, 3),
typs=['labels', 'mixed', 'ts', 'floats', 'empty'],
fails=IndexError)
def test_iloc_getitem_slice_dups(self):
df1 = DataFrame(np.random.randn(10, 4), columns=['A', 'A', 'B', 'B'])
df2 = DataFrame(np.random.randint(0, 10, size=20).reshape(10, 2),
columns=['A', 'C'])
# axis=1
df = concat([df1, df2], axis=1)
tm.assert_frame_equal(df.iloc[:, :4], df1)
tm.assert_frame_equal(df.iloc[:, 4:], df2)
df = concat([df2, df1], axis=1)
tm.assert_frame_equal(df.iloc[:, :2], df2)
tm.assert_frame_equal(df.iloc[:, 2:], df1)
exp = concat([df2, df1.iloc[:, [0]]], axis=1)
tm.assert_frame_equal(df.iloc[:, 0:3], exp)
# axis=0
df = concat([df, df], axis=0)
tm.assert_frame_equal(df.iloc[0:10, :2], df2)
tm.assert_frame_equal(df.iloc[0:10, 2:], df1)
tm.assert_frame_equal(df.iloc[10:, :2], df2)
tm.assert_frame_equal(df.iloc[10:, 2:], df1)
def test_iloc_setitem(self):
df = self.frame_ints
df.iloc[1, 1] = 1
result = df.iloc[1, 1]
assert result == 1
df.iloc[:, 2:3] = 0
expected = df.iloc[:, 2:3]
result = df.iloc[:, 2:3]
tm.assert_frame_equal(result, expected)
# GH5771
s = Series(0, index=[4, 5, 6])
s.iloc[1:2] += 1
expected = Series([0, 1, 0], index=[4, 5, 6])
tm.assert_series_equal(s, expected)
@pytest.mark.parametrize(
'data, indexes, values, expected_k', [
# test without indexer value in first level of MultiIndex
([[2, 22, 5], [2, 33, 6]], [0, -1, 1], [2, 3, 1], [7, 10]),
# test like code sample 1 in the issue
([[1, 22, 555], [1, 33, 666]], [0, -1, 1], [200, 300, 100],
[755, 1066]),
# test like code sample 2 in the issue
([[1, 3, 7], [2, 4, 8]], [0, -1, 1], [10, 10, 1000], [17, 1018]),
# test like code sample 3 in the issue
([[1, 11, 4], [2, 22, 5], [3, 33, 6]], [0, -1, 1], [4, 7, 10],
[8, 15, 13])
])
def test_iloc_setitem_int_multiindex_series(
self, data, indexes, values, expected_k):
# GH17148
df = DataFrame(data=data, columns=['i', 'j', 'k'])
df = df.set_index(['i', 'j'])
series = df.k.copy()
for i, v in zip(indexes, values):
series.iloc[i] += v
df['k'] = expected_k
expected = df.k
tm.assert_series_equal(series, expected)
def test_iloc_setitem_list(self):
# setitem with an iloc list
df = DataFrame(np.arange(9).reshape((3, 3)), index=["A", "B", "C"],
columns=["A", "B", "C"])
df.iloc[[0, 1], [1, 2]]
df.iloc[[0, 1], [1, 2]] += 100
expected = DataFrame(
np.array([0, 101, 102, 3, 104, 105, 6, 7, 8]).reshape((3, 3)),
index=["A", "B", "C"], columns=["A", "B", "C"])
tm.assert_frame_equal(df, expected)
def test_iloc_setitem_pandas_object(self):
# GH 17193, affecting old numpy (1.7 and 1.8)
s_orig = Series([0, 1, 2, 3])
expected = Series([0, -1, -2, 3])
s = s_orig.copy()
s.iloc[Series([1, 2])] = [-1, -2]
tm.assert_series_equal(s, expected)
s = s_orig.copy()
s.iloc[pd.Index([1, 2])] = [-1, -2]
tm.assert_series_equal(s, expected)
def test_iloc_setitem_dups(self):
# GH 6766
# iloc with a mask aligning from another iloc
df1 = DataFrame([{'A': None, 'B': 1}, {'A': 2, 'B': 2}])
df2 = DataFrame([{'A': 3, 'B': 3}, {'A': 4, 'B': 4}])
df = concat([df1, df2], axis=1)
expected = df.fillna(3)
expected['A'] = expected['A'].astype('float64')
inds = np.isnan(df.iloc[:, 0])
mask = inds[inds].index
df.iloc[mask, 0] = df.iloc[mask, 2]
tm.assert_frame_equal(df, expected)
# del a dup column across blocks
expected = DataFrame({0: [1, 2], 1: [3, 4]})
expected.columns = ['B', 'B']
del df['A']
tm.assert_frame_equal(df, expected)
# assign back to self
df.iloc[[0, 1], [0, 1]] = df.iloc[[0, 1], [0, 1]]
tm.assert_frame_equal(df, expected)
# reversed x 2
df.iloc[[1, 0], [0, 1]] = df.iloc[[1, 0], [0, 1]].reset_index(
drop=True)
df.iloc[[1, 0], [0, 1]] = df.iloc[[1, 0], [0, 1]].reset_index(
drop=True)
tm.assert_frame_equal(df, expected)
def test_iloc_getitem_frame(self):
df = DataFrame(np.random.randn(10, 4), index=lrange(0, 20, 2),
columns=lrange(0, 8, 2))
result = df.iloc[2]
with catch_warnings(record=True):
filterwarnings("ignore", "\\n.ix", DeprecationWarning)
exp = df.ix[4]
tm.assert_series_equal(result, exp)
result = df.iloc[2, 2]
with catch_warnings(record=True):
filterwarnings("ignore", "\\n.ix", DeprecationWarning)
exp = df.ix[4, 4]
assert result == exp
# slice
result = df.iloc[4:8]
with catch_warnings(record=True):
filterwarnings("ignore", "\\n.ix", DeprecationWarning)
expected = df.ix[8:14]
tm.assert_frame_equal(result, expected)
result = df.iloc[:, 2:3]
with catch_warnings(record=True):
filterwarnings("ignore", "\\n.ix", DeprecationWarning)
expected = df.ix[:, 4:5]
tm.assert_frame_equal(result, expected)
# list of integers
result = df.iloc[[0, 1, 3]]
with catch_warnings(record=True):
filterwarnings("ignore", "\\n.ix", DeprecationWarning)
expected = df.ix[[0, 2, 6]]
tm.assert_frame_equal(result, expected)
result = df.iloc[[0, 1, 3], [0, 1]]
with catch_warnings(record=True):
filterwarnings("ignore", "\\n.ix", DeprecationWarning)
expected = df.ix[[0, 2, 6], [0, 2]]
tm.assert_frame_equal(result, expected)
# neg indices
result = df.iloc[[-1, 1, 3], [-1, 1]]
with catch_warnings(record=True):
filterwarnings("ignore", "\\n.ix", DeprecationWarning)
expected = df.ix[[18, 2, 6], [6, 2]]
tm.assert_frame_equal(result, expected)
# dups indices
result = df.iloc[[-1, -1, 1, 3], [-1, 1]]
with catch_warnings(record=True):
filterwarnings("ignore", "\\n.ix", DeprecationWarning)
expected = df.ix[[18, 18, 2, 6], [6, 2]]
tm.assert_frame_equal(result, expected)
# with index-like
s = Series(index=lrange(1, 5))
result = df.iloc[s.index]
with catch_warnings(record=True):
filterwarnings("ignore", "\\n.ix", DeprecationWarning)
expected = df.ix[[2, 4, 6, 8]]
tm.assert_frame_equal(result, expected)
def test_iloc_getitem_labelled_frame(self):
# try with labelled frame
df = DataFrame(np.random.randn(10, 4),
index=list('abcdefghij'), columns=list('ABCD'))
result = df.iloc[1, 1]
exp = df.loc['b', 'B']
assert result == exp
result = df.iloc[:, 2:3]
expected = df.loc[:, ['C']]
tm.assert_frame_equal(result, expected)
# negative indexing
result = df.iloc[-1, -1]
exp = df.loc['j', 'D']
assert result == exp
# out-of-bounds exception
pytest.raises(IndexError, df.iloc.__getitem__, tuple([10, 5]))
# trying to use a label
pytest.raises(ValueError, df.iloc.__getitem__, tuple(['j', 'D']))
def test_iloc_getitem_doc_issue(self):
# multi axis slicing issue with single block
# surfaced in GH 6059
arr = np.random.randn(6, 4)
index = date_range('20130101', periods=6)
columns = list('ABCD')
df = DataFrame(arr, index=index, columns=columns)
# defines ref_locs
df.describe()
result = df.iloc[3:5, 0:2]
str(result)
result.dtypes
expected = DataFrame(arr[3:5, 0:2], index=index[3:5],
columns=columns[0:2])
tm.assert_frame_equal(result, expected)
# for dups
df.columns = list('aaaa')
result = df.iloc[3:5, 0:2]
str(result)
result.dtypes
expected = DataFrame(arr[3:5, 0:2], index=index[3:5],
columns=list('aa'))
tm.assert_frame_equal(result, expected)
# related
arr = np.random.randn(6, 4)
index = list(range(0, 12, 2))
columns = list(range(0, 8, 2))
df = DataFrame(arr, index=index, columns=columns)
df._data.blocks[0].mgr_locs
result = df.iloc[1:5, 2:4]
str(result)
result.dtypes
expected = DataFrame(arr[1:5, 2:4], index=index[1:5],
columns=columns[2:4])
tm.assert_frame_equal(result, expected)
def test_iloc_setitem_series(self):
df = DataFrame(np.random.randn(10, 4), index=list('abcdefghij'),
columns=list('ABCD'))
df.iloc[1, 1] = 1
result = df.iloc[1, 1]
assert result == 1
df.iloc[:, 2:3] = 0
expected = df.iloc[:, 2:3]
result = df.iloc[:, 2:3]
tm.assert_frame_equal(result, expected)
s = Series(np.random.randn(10), index=lrange(0, 20, 2))
s.iloc[1] = 1
result = s.iloc[1]
assert result == 1
s.iloc[:4] = 0
expected = s.iloc[:4]
result = s.iloc[:4]
tm.assert_series_equal(result, expected)
s = Series([-1] * 6)
s.iloc[0::2] = [0, 2, 4]
s.iloc[1::2] = [1, 3, 5]
result = s
expected = Series([0, 1, 2, 3, 4, 5])
tm.assert_series_equal(result, expected)
def test_iloc_setitem_list_of_lists(self):
# GH 7551
# list-of-list is set incorrectly in mixed vs. single dtyped frames
df = DataFrame(dict(A=np.arange(5, dtype='int64'),
B=np.arange(5, 10, dtype='int64')))
df.iloc[2:4] = [[10, 11], [12, 13]]
expected = DataFrame(dict(A=[0, 1, 10, 12, 4], B=[5, 6, 11, 13, 9]))
tm.assert_frame_equal(df, expected)
df = DataFrame(
dict(A=list('abcde'), B=np.arange(5, 10, dtype='int64')))
df.iloc[2:4] = [['x', 11], ['y', 13]]
expected = DataFrame(dict(A=['a', 'b', 'x', 'y', 'e'],
B=[5, 6, 11, 13, 9]))
tm.assert_frame_equal(df, expected)
@pytest.mark.parametrize(
'indexer', [[0], slice(None, 1, None), np.array([0])])
@pytest.mark.parametrize(
'value', [['Z'], np.array(['Z'])])
def test_iloc_setitem_with_scalar_index(self, indexer, value):
# GH #19474
# assigning like "df.iloc[0, [0]] = ['Z']" should be evaluated
# elementwisely, not using "setter('A', ['Z'])".
df = pd.DataFrame([[1, 2], [3, 4]], columns=['A', 'B'])
df.iloc[0, indexer] = value
result = df.iloc[0, 0]
assert is_scalar(result) and result == 'Z'
def test_iloc_mask(self):
# GH 3631, iloc with a mask (of a series) should raise
df = DataFrame(lrange(5), list('ABCDE'), columns=['a'])
mask = (df.a % 2 == 0)
pytest.raises(ValueError, df.iloc.__getitem__, tuple([mask]))
mask.index = lrange(len(mask))
pytest.raises(NotImplementedError, df.iloc.__getitem__,
tuple([mask]))
# ndarray ok
result = df.iloc[np.array([True] * len(mask), dtype=bool)]
tm.assert_frame_equal(result, df)
# the possibilities
locs = np.arange(4)
nums = 2 ** locs
reps = lmap(bin, nums)
df = DataFrame({'locs': locs, 'nums': nums}, reps)
expected = {
(None, ''): '0b1100',
(None, '.loc'): '0b1100',
(None, '.iloc'): '0b1100',
('index', ''): '0b11',
('index', '.loc'): '0b11',
('index', '.iloc'): ('iLocation based boolean indexing '
'cannot use an indexable as a mask'),
('locs', ''): 'Unalignable boolean Series provided as indexer '
'(index of the boolean Series and of the indexed '
'object do not match',
('locs', '.loc'): 'Unalignable boolean Series provided as indexer '
'(index of the boolean Series and of the '
'indexed object do not match',
('locs', '.iloc'): ('iLocation based boolean indexing on an '
'integer type is not available'),
}
# UserWarnings from reindex of a boolean mask
with catch_warnings(record=True):
simplefilter("ignore", UserWarning)
result = dict()
for idx in [None, 'index', 'locs']:
mask = (df.nums > 2).values
if idx:
mask = Series(mask, list(reversed(getattr(df, idx))))
for method in ['', '.loc', '.iloc']:
try:
if method:
accessor = getattr(df, method[1:])
else:
accessor = df
ans = str(bin(accessor[mask]['nums'].sum()))
except Exception as e:
ans = str(e)
key = tuple([idx, method])
r = expected.get(key)
if r != ans:
raise AssertionError(
"[%s] does not match [%s], received [%s]"
% (key, ans, r))
def test_iloc_non_unique_indexing(self):
# GH 4017, non-unique indexing (on the axis)
df = DataFrame({'A': [0.1] * 3000, 'B': [1] * 3000})
idx = np.array(lrange(30)) * 99
expected = df.iloc[idx]
df3 = concat([df, 2 * df, 3 * df])
result = df3.iloc[idx]
tm.assert_frame_equal(result, expected)
df2 = DataFrame({'A': [0.1] * 1000, 'B': [1] * 1000})
df2 = concat([df2, 2 * df2, 3 * df2])
sidx = df2.index.to_series()
expected = df2.iloc[idx[idx <= sidx.max()]]
new_list = []
for r, s in expected.iterrows():
new_list.append(s)
new_list.append(s * 2)
new_list.append(s * 3)
expected = DataFrame(new_list)
expected = concat([expected, DataFrame(index=idx[idx > sidx.max()])],
sort=True)
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = df2.loc[idx]
tm.assert_frame_equal(result, expected, check_index_type=False)
def test_iloc_empty_list_indexer_is_ok(self):
from pandas.util.testing import makeCustomDataframe as mkdf
df = mkdf(5, 2)
# vertical empty
tm.assert_frame_equal(df.iloc[:, []], df.iloc[:, :0],
check_index_type=True, check_column_type=True)
# horizontal empty
tm.assert_frame_equal(df.iloc[[], :], df.iloc[:0, :],
check_index_type=True, check_column_type=True)
# horizontal empty
tm.assert_frame_equal(df.iloc[[]], df.iloc[:0, :],
check_index_type=True,
check_column_type=True)
def test_identity_slice_returns_new_object(self):
# GH13873
original_df = DataFrame({'a': [1, 2, 3]})
sliced_df = original_df.iloc[:]
assert sliced_df is not original_df
# should be a shallow copy
original_df['a'] = [4, 4, 4]
assert (sliced_df['a'] == 4).all()
original_series = Series([1, 2, 3, 4, 5, 6])
sliced_series = original_series.iloc[:]
assert sliced_series is not original_series
# should also be a shallow copy
original_series[:3] = [7, 8, 9]
assert all(sliced_series[:3] == [7, 8, 9])
|
cython-testbed/pandas
|
pandas/tests/indexing/test_iloc.py
|
Python
|
bsd-3-clause
| 25,666
|
from unittest import skipUnless
from django.db import connection
from django.test import TestCase
from .models import Article, ArticleTranslation, IndexTogetherSingleList
class SchemaIndexesTests(TestCase):
"""
Test index handling by the db.backends.schema infrastructure.
"""
def test_index_name_hash(self):
"""
Index names should be deterministic.
"""
with connection.schema_editor() as editor:
index_name = editor._create_index_name(
model=Article,
column_names=("c1", "c2", "c3"),
suffix="123",
)
self.assertEqual(index_name, "indexes_article_c1_7ce4cc86123")
def test_index_together(self):
editor = connection.schema_editor()
index_sql = editor._model_indexes_sql(Article)
self.assertEqual(len(index_sql), 1)
# Ensure the index name is properly quoted
self.assertIn(
connection.ops.quote_name(
editor._create_index_name(Article, ['headline', 'pub_date'], suffix='_idx')
),
index_sql[0]
)
def test_index_together_single_list(self):
# Test for using index_together with a single list (#22172)
index_sql = connection.schema_editor()._model_indexes_sql(IndexTogetherSingleList)
self.assertEqual(len(index_sql), 1)
@skipUnless(connection.vendor == 'postgresql', "This is a postgresql-specific issue")
def test_postgresql_text_indexes(self):
"""Test creation of PostgreSQL-specific text indexes (#12234)"""
from .models import IndexedArticle
index_sql = connection.schema_editor()._model_indexes_sql(IndexedArticle)
self.assertEqual(len(index_sql), 5)
self.assertIn('("headline" varchar_pattern_ops)', index_sql[1])
self.assertIn('("body" text_pattern_ops)', index_sql[3])
# unique=True and db_index=True should only create the varchar-specific
# index (#19441).
self.assertIn('("slug" varchar_pattern_ops)', index_sql[4])
@skipUnless(connection.vendor == 'postgresql', "This is a postgresql-specific issue")
def test_postgresql_virtual_relation_indexes(self):
"""Test indexes are not created for related objects"""
index_sql = connection.schema_editor()._model_indexes_sql(Article)
self.assertEqual(len(index_sql), 1)
@skipUnless(connection.vendor == 'mysql', "This is a mysql-specific issue")
def test_no_index_for_foreignkey(self):
"""
MySQL on InnoDB already creates indexes automatically for foreign keys.
(#14180). An index should be created if db_constraint=False (#26171).
"""
storage = connection.introspection.get_storage_engine(
connection.cursor(), ArticleTranslation._meta.db_table
)
if storage != "InnoDB":
self.skip("This test only applies to the InnoDB storage engine")
index_sql = connection.schema_editor()._model_indexes_sql(ArticleTranslation)
self.assertEqual(index_sql, [
'CREATE INDEX `indexes_articletranslation_99fb53c2` '
'ON `indexes_articletranslation` (`article_no_constraint_id`)'
])
|
sgzsh269/django
|
tests/indexes/tests.py
|
Python
|
bsd-3-clause
| 3,231
|
from .forest import RandomForestRegressor
from .forest import ExtraTreesRegressor
from .mondrian import MondrianForestClassifier
from .mondrian import MondrianForestRegressor
from .mondrian import MondrianTreeClassifier
from .mondrian import MondrianTreeRegressor
from .quantile import DecisionTreeQuantileRegressor
from .quantile import ExtraTreeQuantileRegressor
from .quantile import ExtraTreesQuantileRegressor
from .quantile import RandomForestQuantileRegressor
__version__ = "0.1.2"
__all__ = [
"MondrianTreeClassifier",
"MondrianTreeRegressor",
"MondrianForestClassifier",
"MondrianForestRegressor",
"DecisionTreeQuantileRegressor",
"ExtraTreesRegressor",
"ExtraTreeQuantileRegressor",
"ExtraTreesQuantileRegressor",
"RandomForestRegressor",
"RandomForestQuantileRegressor"]
|
MechCoder/scikit-garden
|
skgarden/__init__.py
|
Python
|
bsd-3-clause
| 824
|
"""
Plugin for probing emc
"""
from framework.dependency_management.dependency_resolver import ServiceLocator
DESCRIPTION = " EMC Probing "
def run(PluginInfo):
resource = ServiceLocator.get_component("resource").GetResources('EmcProbeMethods')
return ServiceLocator.get_component("plugin_helper").CommandDump('Test Command', 'Output', resource, PluginInfo, [])
|
DarKnight24/owtf
|
plugins/network/active/ppp@PTES-005.py
|
Python
|
bsd-3-clause
| 375
|
"""
Initializer for the queue_handler folder
"""
from .adapters import build_queue_adapter
from .handlers import QueueManagerHandler, ServiceQueueHandler, TaskQueueHandler, ComputeManagerHandler
from .managers import QueueManager
|
psi4/DatenQM
|
qcfractal/queue/__init__.py
|
Python
|
bsd-3-clause
| 231
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0014_add-state-tracking'),
]
operations = [
migrations.AddField(
model_name='project',
name='allow_promos',
field=models.BooleanField(default=True, help_text='Allow sponsor advertisements on my project documentation', verbose_name='Sponsor advertisements'),
),
]
|
espdev/readthedocs.org
|
readthedocs/projects/migrations/0015_add_project_allow_promos.py
|
Python
|
mit
| 519
|
import json
from flask import Flask
from flask import request, abort, redirect, url_for
app = Flask(__name__)
clients = {}
def getRequest(request):
rq = None
if request.method == 'POST':
rq = request.form
else:
rq = request.args
return rq
@app.route('/')
def index():
return 'Index Page'
@app.route('/amiloggedin', methods=['GET', 'POST'])
def hello():
# Returns 'Yes' when the specified user is logged in
rq = getRequest(request)
username = rq.get('username')
if username in clients:
return 'Yes'
return 'No'
@app.route('/login', methods=['GET', 'POST'])
def login():
# Creates user on server to store location
rq = getRequest(request)
username = rq.get('username')
if username and username not in clients:
clients[username] = {}
return username
return ''
@app.route('/logout', methods=['GET', 'POST'])
def logout():
# Removes user data from server
rq = getRequest(request)
username = rq.get('username')
if username in clients:
clients.pop(username)
return ''
@app.route('/update', methods=['GET', 'POST'])
def update():
# Updates client coordinates
rq = getRequest(request)
username = rq.get('username')
if username and username in clients:
fields = ['latitude', 'longitude', 'direction']
for fieldName in fields:
field = rq.get(fieldName)
if field:
clients[username][fieldName] = field
return json.dumps(clients[username])
# abort(404)
return ''
@app.route('/getallclients')
def getallclients():
# Returns JSON string of all client data
return json.dumps(clients)
@app.route('/getclient', methods=['GET', 'POST'])
def getclient():
# Returns JSON string of the specified client
rq = getRequest(request)
username = rq.get('username')
if username in clients:
return json.dumps(clients[username])
return ''
if __name__ == "__main__":
# app.run()
app.run(debug=True, host='0.0.0.0')
|
ayebear/FriendFinder
|
server.py
|
Python
|
mit
| 2,060
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import os.path
from pyglet.gl import *
from pyglet.image import *
from pyglet.image.codecs import *
try:
import Image
except ImportError:
from PIL import Image
class PILImageDecoder(ImageDecoder):
def get_file_extensions(self):
# Only most common ones shown here
return ['.bmp', '.cur', '.gif', '.ico', '.jpg', '.jpeg', '.pcx', '.png',
'.tga', '.tif', '.tiff', '.xbm', '.xpm']
def decode(self, file, filename):
try:
image = Image.open(file)
except Exception as e:
raise ImageDecodeException(
'PIL cannot read %r: %s' % (filename or file, e))
try:
image = image.transpose(Image.FLIP_TOP_BOTTOM)
except Exception as e:
raise ImageDecodeException(
'PIL failed to transpose %r: %s' % (filename or file, e))
# Convert bitmap and palette images to component
if image.mode in ('1', 'P'):
image = image.convert()
if image.mode not in ('L', 'LA', 'RGB', 'RGBA'):
raise ImageDecodeException('Unsupported mode "%s"' % image.mode)
type = GL_UNSIGNED_BYTE
width, height = image.size
# tostring is deprecated, replaced by tobytes in Pillow (PIL fork)
# (1.1.7) PIL still uses it
image_data_fn = getattr(image, "tobytes", getattr(image, "tostring"))
return ImageData(width, height, image.mode, image_data_fn())
class PILImageEncoder(ImageEncoder):
def get_file_extensions(self):
# Most common only
return ['.bmp', '.eps', '.gif', '.jpg', '.jpeg',
'.pcx', '.png', '.ppm', '.tiff', '.xbm']
def encode(self, image, file, filename):
# File format is guessed from filename extension, otherwise defaults
# to PNG.
pil_format = (filename and os.path.splitext(filename)[1][1:]) or 'png'
if pil_format.lower() == 'jpg':
pil_format = 'JPEG'
image = image.get_image_data()
format = image.format
if format != 'RGB':
# Only save in RGB or RGBA formats.
format = 'RGBA'
pitch = -(image.width * len(format))
# fromstring is deprecated, replaced by frombytes in Pillow (PIL fork)
# (1.1.7) PIL still uses it
image_from_fn = getattr(Image, "frombytes", getattr(Image, "fromstring"))
pil_image = image_from_fn(
format, (image.width, image.height), image.get_data(format, pitch))
try:
pil_image.save(file, pil_format)
except Exception as e:
raise ImageEncodeException(e)
def get_decoders():
return [PILImageDecoder()]
def get_encoders():
return [PILImageEncoder()]
|
AustinRoy7/Pomodoro-timer
|
venv/Lib/site-packages/pyglet/image/codecs/pil.py
|
Python
|
mit
| 4,522
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from functools import wraps
import json
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
import requests
from requests_toolbelt import MultipartEncoder
API_TEMPLATE = 'https://pcs.baidu.com/rest/2.0/pcs/{0}'
class InvalidToken(Exception):
"""异常:Access Token 不正确或者已经过期."""
pass
def check_token(func):
"""检查 access token 是否有效."""
@wraps(func)
def wrapper(*args, **kwargs):
response = func(*args, **kwargs)
if response.status_code == 401:
raise InvalidToken('Access token invalid or no longer valid')
else:
return response
return wrapper
class BaseClass(object):
def __init__(self, access_token, api_template=API_TEMPLATE):
self.access_token = access_token
self.api_template = api_template
def _remove_empty_items(self, data):
for k, v in data.copy().items():
if v is None:
data.pop(k)
@check_token
def _request(self, uri, method, url=None, extra_params=None,
data=None, files=None, **kwargs):
params = {
'method': method,
'access_token': self.access_token
}
if extra_params:
params.update(extra_params)
self._remove_empty_items(params)
if not url:
url = self.api_template.format(uri)
api = url
if data or files:
api = '%s?%s' % (url, urlencode(params))
if data:
self._remove_empty_items(data)
else:
self._remove_empty_items(files)
data = MultipartEncoder(files)
if kwargs.get('headers'):
kwargs['headers']['Content-Type'] = data.content_type
else:
kwargs['headers'] = {'Content-Type': data.content_type}
response = requests.post(api, data=data, **kwargs)
else:
response = requests.get(api, params=params, **kwargs)
return response
class PCS(BaseClass):
"""百度个人云存储(PCS)Python SDK.
所有 api 方法的返回值均为 ``requests.Response`` 对象::
>>> pcs = PCS('access_token')
>>> response = pcs.info()
>>> response
<Response [200]>
>>> response.ok # 状态码是否是 200
True
>>> response.status_code # 状态码
200
>>> response.content # 原始内容(二进制/json 字符串)
'{"quota":6442450944,"used":5138887,"request_id":1216061570}'
>>>
>>> response.json() # 将 json 字符串转换为 python dict
{u'used': 5138887, u'quota': 6442450944L, u'request_id': 1216061570}
"""
def info(self, **kwargs):
"""获取当前用户空间配额信息.
:return: Response 对象
"""
return self._request('quota', 'info', **kwargs)
def upload(self, remote_path, file_content, ondup=None, **kwargs):
"""上传单个文件(<2G).
| 百度PCS服务目前支持最大2G的单个文件上传。
| 如需支持超大文件(>2G)的断点续传,请参考下面的“分片文件上传”方法。
:param remote_path: 网盘中文件的保存路径(包含文件名)。
必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param file_content: 上传文件的内容/文件对象 。
(e.g. ``open('foobar', 'rb')`` )
:param ondup: (可选)
* 'overwrite':表示覆盖同名文件;
* 'newcopy':表示生成文件副本并进行重命名,命名规则为“
文件名_日期.后缀”。
:return: Response 对象
"""
params = {
'path': remote_path,
'ondup': ondup
}
files = {'file': ('file', file_content, '')}
url = 'https://c.pcs.baidu.com/rest/2.0/pcs/file'
return self._request('file', 'upload', url=url, extra_params=params,
files=files, **kwargs)
def upload_tmpfile(self, file_content, **kwargs):
"""分片上传—文件分片及上传.
百度 PCS 服务支持每次直接上传最大2G的单个文件。
如需支持上传超大文件(>2G),则可以通过组合调用分片文件上传的
``upload_tmpfile`` 方法和 ``upload_superfile`` 方法实现:
1. 首先,将超大文件分割为2G以内的单文件,并调用 ``upload_tmpfile``
将分片文件依次上传;
2. 其次,调用 ``upload_superfile`` ,完成分片文件的重组。
除此之外,如果应用中需要支持断点续传的功能,
也可以通过分片上传文件并调用 ``upload_superfile`` 接口的方式实现。
:param file_content: 上传文件的内容/文件对象
(e.g. ``open('foobar', 'rb')`` )
:return: Response 对象
"""
params = {
'type': 'tmpfile'
}
files = {'file': ('file', file_content, '')}
url = 'https://c.pcs.baidu.com/rest/2.0/pcs/file'
return self._request('file', 'upload', url=url, extra_params=params,
files=files, **kwargs)
def upload_superfile(self, remote_path, block_list, ondup=None, **kwargs):
"""分片上传—合并分片文件.
与分片文件上传的 ``upload_tmpfile`` 方法配合使用,
可实现超大文件(>2G)上传,同时也可用于断点续传的场景。
:param remote_path: 网盘中文件的保存路径(包含文件名)。
必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param block_list: 子文件内容的 MD5 值列表;子文件至少两个,最多1024个。
:type block_list: list
:param ondup: (可选)
* 'overwrite':表示覆盖同名文件;
* 'newcopy':表示生成文件副本并进行重命名,命名规则为“
文件名_日期.后缀”。
:return: Response 对象
"""
params = {
'path': remote_path,
'ondup': ondup
}
data = {
'param': json.dumps({'block_list': block_list}),
}
return self._request('file', 'createsuperfile', extra_params=params,
data=data, **kwargs)
def download(self, remote_path, **kwargs):
"""下载单个文件。
download 接口支持HTTP协议标准range定义,通过指定range的取值可以实现
断点下载功能。 例如:如果在request消息中指定“Range: bytes=0-99”,
那么响应消息中会返回该文件的前100个字节的内容;
继续指定“Range: bytes=100-199”,
那么响应消息中会返回该文件的第二个100字节内容::
>>> headers = {'Range': 'bytes=0-99'}
>>> pcs = PCS('token')
>>> pcs.download('/apps/test_sdk/test.txt', headers=headers)
:param remote_path: 网盘中文件的路径(包含文件名)。
必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:return: Response 对象
"""
params = {
'path': remote_path,
}
url = 'https://d.pcs.baidu.com/rest/2.0/pcs/file'
return self._request('file', 'download', url=url,
extra_params=params, **kwargs)
def mkdir(self, remote_path, **kwargs):
"""为当前用户创建一个目录.
:param remote_path: 网盘中目录的路径,必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:return: Response 对象
"""
data = {
'path': remote_path
}
return self._request('file', 'mkdir', data=data, **kwargs)
def meta(self, remote_path, **kwargs):
"""获取单个文件或目录的元信息.
:param remote_path: 网盘中文件/目录的路径,必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:return: Response 对象
"""
params = {
'path': remote_path
}
return self._request('file', 'meta', extra_params=params, **kwargs)
def multi_meta(self, path_list, **kwargs):
"""批量获取文件或目录的元信息.
:param path_list: 网盘中文件/目录的路径列表,路径必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:type path_list: list
:return: Response 对象
"""
data = {
'param': json.dumps({
'list': [{'path': path} for path in path_list]
}),
}
return self._request('file', 'meta', data=data, **kwargs)
def list_files(self, remote_path, by=None, order=None,
limit=None, **kwargs):
"""获取目录下的文件列表.
:param remote_path: 网盘中目录的路径,必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param by: 排序字段,缺省根据文件类型排序:
* time(修改时间)
* name(文件名)
* size(大小,注意目录无大小)
:param order: “asc”或“desc”,缺省采用降序排序。
* asc(升序)
* desc(降序)
:param limit: 返回条目控制,参数格式为:n1-n2。
返回结果集的[n1, n2)之间的条目,缺省返回所有条目;
n1从0开始。
:return: Response 对象
"""
params = {
'path': remote_path,
'by': by,
'order': order,
'limit': limit
}
return self._request('file', 'list', extra_params=params, **kwargs)
def move(self, from_path, to_path, **kwargs):
"""移动单个文件或目录.
:param from_path: 源文件/目录在网盘中的路径(包括文件名)。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param to_path: 目标文件/目录在网盘中的路径(包括文件名)。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:return: Response 对象
"""
data = {
'from': from_path,
'to': to_path,
}
return self._request('file', 'move', data=data, **kwargs)
def multi_move(self, path_list, **kwargs):
"""批量移动文件或目录.
:param path_list: 源文件地址和目标文件地址对列表:
>>> path_list = [
... ('/apps/test_sdk/test.txt', # 源文件
... '/apps/test_sdk/testmkdir/b.txt' # 目标文件
... ),
... ('/apps/test_sdk/test.txt', # 源文件
... '/apps/test_sdk/testmkdir/b.txt' # 目标文件
... ),
... ]
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:type path_list: list
:return: Response 对象
"""
data = {
'param': json.dumps({
'list': [{'from': x[0], 'to': x[1]} for x in path_list]
}),
}
return self._request('file', 'move', data=data, **kwargs)
def copy(self, from_path, to_path, **kwargs):
"""拷贝文件或目录.
:param from_path: 源文件/目录在网盘中的路径(包括文件名)。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param to_path: 目标文件/目录在网盘中的路径(包括文件名)。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:return: Response 对象
.. warning::
``move`` 操作后,源文件被移动至目标地址;
``copy`` 操作则会保留原文件。
"""
data = {
'from': from_path,
'to': to_path,
}
return self._request('file', 'copy', data=data, **kwargs)
def multi_copy(self, path_list, **kwargs):
"""批量拷贝文件或目录.
:param path_list: 源文件地址和目标文件地址对的列表:
>>> path_list = [
... ('/apps/test_sdk/test.txt', # 源文件
... '/apps/test_sdk/testmkdir/b.txt' # 目标文件
... ),
... ('/apps/test_sdk/test.txt', # 源文件
... '/apps/test_sdk/testmkdir/b.txt' # 目标文件
... ),
... ]
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:type path_list: list
:return: Response 对象
"""
data = {
'param': json.dumps({
'list': [{'from': x[0], 'to': x[1]} for x in path_list]
}),
}
return self._request('file', 'copy', data=data, **kwargs)
def delete(self, remote_path, **kwargs):
"""删除单个文件或目录.
.. warning::
* 文件/目录删除后默认临时存放在回收站内,删除文件或目录的临时存放
不占用用户的空间配额;
* 存放有效期为10天,10天内可还原回原路径下,10天后则永久删除。
:param remote_path: 网盘中文件/目录的路径,路径必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:type remote_path: str
:return: Response 对象
"""
data = {
'path': remote_path
}
return self._request('file', 'delete', data=data, **kwargs)
def multi_delete(self, path_list, **kwargs):
"""批量删除文件或目录.
.. warning::
* 文件/目录删除后默认临时存放在回收站内,删除文件或目录的临时存放
不占用用户的空间配额;
* 存放有效期为10天,10天内可还原回原路径下,10天后则永久删除。
:param path_list: 网盘中文件/目录的路径列表,路径必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:type path_list: list
:return: Response 对象
"""
data = {
'param': json.dumps({
'list': [{'path': path} for path in path_list]
}),
}
return self._request('file', 'delete', data=data, **kwargs)
def search(self, remote_path, keyword, recurrent='0', **kwargs):
"""按文件名搜索文件(不支持查找目录).
:param remote_path: 需要检索的目录路径,路径必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:type remote_path: str
:param keyword: 关键词
:type keyword: str
:param recurrent: 是否递归。
* "0"表示不递归
* "1"表示递归
:type recurrent: str
:return: Response 对象
"""
params = {
'path': remote_path,
'wd': keyword,
're': recurrent,
}
return self._request('file', 'search', extra_params=params, **kwargs)
def thumbnail(self, remote_path, height, width, quality=100, **kwargs):
"""获取指定图片文件的缩略图.
:param remote_path: 源图片的路径,路径必须以 /apps/ 开头。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param height: 指定缩略图的高度,取值范围为(0,1600]。
:type height: int
:param width: 指定缩略图的宽度,取值范围为(0,1600]。
:type width: int
:param quality: 缩略图的质量,默认为100,取值范围(0,100]。
:type quality: int
:return: Response 对象
.. warning::
有以下限制条件:
* 原图大小(0, 10M];
* 原图类型: jpg、jpeg、bmp、gif、png;
* 目标图类型:和原图的类型有关;例如:原图是gif图片,
则缩略后也为gif图片。
"""
params = {
'path': remote_path,
'height': height,
'width': width,
'quality': quality,
}
return self._request('thumbnail', 'generate', extra_params=params,
**kwargs)
def diff(self, cursor='null', **kwargs):
"""文件增量更新操作查询接口.
本接口有数秒延迟,但保证返回结果为最终一致.
:param cursor: 用于标记更新断点。
* 首次调用cursor=null;
* 非首次调用,使用最后一次调用diff接口的返回结果
中的cursor。
:type cursor: str
:return: Response 对象
"""
params = {
'cursor': cursor,
}
return self._request('file', 'diff', extra_params=params, **kwargs)
def video_convert(self, remote_path, video_type, **kwargs):
"""对视频文件进行转码,实现实时观看视频功能.
可下载支持 HLS/M3U8 的 `媒体云播放器 SDK <HLSSDK_>`__ 配合使用.
.. _HLSSDK:
http://developer.baidu.com/wiki/index.php?title=docs/cplat/media/sdk
:param remote_path: 需要下载的视频文件路径,以/开头的绝对路径,
需含源文件的文件名。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:type remote_path: str
:param video_type: 目前支持以下格式:
M3U8_320_240、M3U8_480_224、M3U8_480_360、
M3U8_640_480和M3U8_854_480
:type video_type: str
:return: Response 对象
.. warning::
目前这个接口支持的源文件格式如下:
+--------------------------+------------+--------------------------+
|格式名称 |扩展名 |备注 |
+==========================+============+==========================+
|Apple HTTP Live Streaming |m3u8/m3u |iOS支持的视频格式 |
+--------------------------+------------+--------------------------+
|ASF |asf |视频格式 |
+--------------------------+------------+--------------------------+
|AVI |avi |视频格式 |
+--------------------------+------------+--------------------------+
|Flash Video (FLV) |flv |Macromedia Flash视频格式 |
+--------------------------+------------+--------------------------+
|GIF Animation |gif |视频格式 |
+--------------------------+------------+--------------------------+
|Matroska |mkv |Matroska/WebM视频格式 |
+--------------------------+------------+--------------------------+
|MOV/QuickTime/MP4 |mov/mp4/m4a/|支持3GP、3GP2、PSP、iPod |
| |3gp/3g2/mj2 |之类视频格式 |
+--------------------------+------------+--------------------------+
|MPEG-PS (program stream) |mpeg |也就是VOB文件/SVCD/DVD格式|
+--------------------------+------------+--------------------------+
|MPEG-TS (transport stream)|ts | 即DVB传输流 |
+--------------------------+------------+--------------------------+
|RealMedia |rm/rmvb | Real视频格式 |
+--------------------------+------------+--------------------------+
|WebM |webm | Html视频格式 |
+--------------------------+------------+--------------------------+
"""
params = {
'path': remote_path,
'type': video_type,
}
return self._request('file', 'streaming', extra_params=params,
**kwargs)
def list_streams(self, file_type, start=0, limit=100,
filter_path=None, **kwargs):
"""以视频、音频、图片及文档四种类型的视图获取所创建应用程序下的
文件列表.
:param file_type: 类型分为video、audio、image及doc四种。
:param start: 返回条目控制起始值,缺省值为0。
:param limit: 返回条目控制长度,缺省为1000,可配置。
:param filter_path: 需要过滤的前缀路径,如:/apps/album
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:return: Response 对象
"""
params = {
'type': file_type,
'start': start,
'limit': limit,
'filter_path': filter_path,
}
return self._request('stream', 'list', extra_params=params,
**kwargs)
def download_stream(self, remote_path, **kwargs):
"""为当前用户下载一个流式文件.其参数和返回结果与下载单个文件的相同.
:param remote_path: 需要下载的文件路径,以/开头的绝对路径,含文件名。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:return: Response 对象
"""
params = {
'path': remote_path,
}
url = 'https://d.pcs.baidu.com/rest/2.0/pcs/file'
return self._request('stream', 'download', url=url,
extra_params=params, **kwargs)
def rapid_upload(self, remote_path, content_length, content_md5,
content_crc32, slice_md5, ondup=None, **kwargs):
"""秒传一个文件.
.. warning::
* 被秒传文件必须大于256KB(即 256*1024 B)。
* 校验段为文件的前256KB,秒传接口需要提供校验段的MD5。
(非强一致接口,上传后请等待1秒后再读取)
:param remote_path: 上传文件的全路径名。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param content_length: 待秒传文件的长度。
:param content_md5: 待秒传文件的MD5。
:param content_crc32: 待秒传文件的CRC32。
:param slice_md5: 待秒传文件校验段的MD5。
:param ondup: (可选)
* 'overwrite':表示覆盖同名文件;
* 'newcopy':表示生成文件副本并进行重命名,命名规则为“
文件名_日期.后缀”。
:return: Response 对象
"""
data = {
'path': remote_path,
'content-length': content_length,
'content-md5': content_md5,
'content-crc32': content_crc32,
'slice-md5': slice_md5,
'ondup': ondup,
}
return self._request('file', 'rapidupload', data=data, **kwargs)
def add_download_task(self, source_url, remote_path,
rate_limit=None, timeout=60 * 60,
expires=None, callback='', **kwargs):
"""添加离线下载任务,实现单个文件离线下载.
:param source_url: 源文件的URL。
:param remote_path: 下载后的文件保存路径。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param rate_limit: 下载限速,默认不限速。
:type rate_limit: int or long
:param timeout: 下载超时时间,默认3600秒。
:param expires: 请求失效时间,如果有,则会校验。
:type expires: int
:param callback: 下载完毕后的回调,默认为空。
:type callback: str
:return: Response 对象
"""
data = {
'source_url': source_url,
'save_path': remote_path,
'expires': expires,
'rate_limit': rate_limit,
'timeout': timeout,
'callback': callback,
}
return self._request('services/cloud_dl', 'add_task',
data=data, **kwargs)
def query_download_tasks(self, task_ids, operate_type=1,
expires=None, **kwargs):
"""根据任务ID号,查询离线下载任务信息及进度信息。
:param task_ids: 要查询的任务ID列表
:type task_ids: list or tuple
:param operate_type:
* 0:查任务信息
* 1:查进度信息,默认为1
:param expires: 请求失效时间,如果有,则会校验。
:type expires: int
:return: Response 对象
"""
params = {
'task_ids': ','.join(map(str, task_ids)),
'op_type': operate_type,
'expires': expires,
}
return self._request('services/cloud_dl', 'query_task',
extra_params=params, **kwargs)
def list_download_tasks(self, need_task_info=1, start=0, limit=10, asc=0,
create_time=None, status=None, source_url=None,
remote_path=None, expires=None, **kwargs):
"""查询离线下载任务ID列表及任务信息.
:param need_task_info: 是否需要返回任务信息:
* 0:不需要
* 1:需要,默认为1
:param start: 查询任务起始位置,默认为0。
:param limit: 设定返回任务数量,默认为10。
:param asc:
* 0:降序,默认值
* 1:升序
:param create_time: 任务创建时间,默认为空。
:type create_time: int
:param status: 任务状态,默认为空。
0:下载成功,1:下载进行中 2:系统错误,3:资源不存在,
4:下载超时,5:资源存在但下载失败, 6:存储空间不足,
7:目标地址数据已存在, 8:任务取消.
:type status: int
:param source_url: 源地址URL,默认为空。
:param remote_path: 文件保存路径,默认为空。
.. warning::
* 路径长度限制为1000;
* 径中不能包含以下字符:``\\\\ ? | " > < : *``;
* 文件名或路径名开头结尾不能是 ``.``
或空白字符,空白字符包括:
``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。
:param expires: 请求失效时间,如果有,则会校验。
:type expires: int
:return: Response 对象
"""
data = {
'expires': expires,
'start': start,
'limit': limit,
'asc': asc,
'source_url': source_url,
'save_path': remote_path,
'create_time': create_time,
'status': status,
'need_task_info': need_task_info,
}
return self._request('services/cloud_dl', 'list_task',
data=data, **kwargs)
def cancel_download_task(self, task_id, expires=None, **kwargs):
"""取消离线下载任务.
:param task_id: 要取消的任务ID号。
:type task_id: str
:param expires: 请求失效时间,如果有,则会校验。
:type expires: int
:return: Response 对象
"""
data = {
'expires': expires,
'task_id': task_id,
}
return self._request('services/cloud_dl', 'cancle_task',
data=data, **kwargs)
def list_recycle_bin(self, start=0, limit=1000, **kwargs):
"""获取回收站中的文件及目录列表.
:param start: 返回条目的起始值,缺省值为0
:param limit: 返回条目的长度,缺省值为1000
:return: Response 对象
"""
params = {
'start': start,
'limit': limit,
}
return self._request('file', 'listrecycle',
extra_params=params, **kwargs)
def restore_recycle_bin(self, fs_id, **kwargs):
"""还原单个文件或目录(非强一致接口,调用后请sleep 1秒读取).
:param fs_id: 所还原的文件或目录在PCS的临时唯一标识ID。
:type fs_id: str
:return: Response 对象
"""
data = {
'fs_id': fs_id,
}
return self._request('file', 'restore', data=data, **kwargs)
def multi_restore_recycle_bin(self, fs_ids, **kwargs):
"""批量还原文件或目录(非强一致接口,调用后请sleep1秒 ).
:param fs_ids: 所还原的文件或目录在 PCS 的临时唯一标识 ID 的列表。
:type fs_ids: list or tuple
:return: Response 对象
"""
data = {
'param': json.dumps({
'list': [{'fs_id': fs_id} for fs_id in fs_ids]
}),
}
return self._request('file', 'restore', data=data, **kwargs)
def clean_recycle_bin(self, **kwargs):
"""清空回收站.
:return: Response 对象
"""
data = {
'type': 'recycle',
}
return self._request('file', 'delete', data=data, **kwargs)
|
matrixorz/justpic
|
justpic/vendor/baidupcs/api.py
|
Python
|
mit
| 38,434
|
# encoding: utf-8
"""
Test suite for the docx.blkcntnr (block item container) module
"""
from __future__ import absolute_import, print_function, unicode_literals
import pytest
from docx.blkcntnr import BlockItemContainer
from docx.table import Table
from docx.text import Paragraph
from .unitutil.cxml import element, xml
class DescribeBlockItemContainer(object):
def it_can_add_a_paragraph(self, add_paragraph_fixture):
blkcntnr, text, style, expected_xml = add_paragraph_fixture
paragraph = blkcntnr.add_paragraph(text, style)
assert blkcntnr._element.xml == expected_xml
assert isinstance(paragraph, Paragraph)
def it_can_add_a_table(self, add_table_fixture):
blkcntnr, rows, cols, expected_xml = add_table_fixture
table = blkcntnr.add_table(rows, cols)
assert blkcntnr._element.xml == expected_xml
assert isinstance(table, Table)
def it_provides_access_to_the_paragraphs_it_contains(
self, paragraphs_fixture):
# test len(), iterable, and indexed access
blkcntnr, expected_count = paragraphs_fixture
paragraphs = blkcntnr.paragraphs
assert len(paragraphs) == expected_count
count = 0
for idx, paragraph in enumerate(paragraphs):
assert isinstance(paragraph, Paragraph)
assert paragraphs[idx] is paragraph
count += 1
assert count == expected_count
def it_provides_access_to_the_tables_it_contains(self, tables_fixture):
# test len(), iterable, and indexed access
blkcntnr, expected_count = tables_fixture
tables = blkcntnr.tables
assert len(tables) == expected_count
count = 0
for idx, table in enumerate(tables):
assert isinstance(table, Table)
assert tables[idx] is table
count += 1
assert count == expected_count
# fixtures -------------------------------------------------------
@pytest.fixture(params=[
('w:body', '', None,
'w:body/w:p'),
('w:body', 'foobar', None,
'w:body/w:p/w:r/w:t"foobar"'),
('w:body', '', 'Heading1',
'w:body/w:p/w:pPr/w:pStyle{w:val=Heading1}'),
('w:body', 'barfoo', 'BodyText',
'w:body/w:p/(w:pPr/w:pStyle{w:val=BodyText},w:r/w:t"barfoo")'),
])
def add_paragraph_fixture(self, request):
blkcntnr_cxml, text, style, after_cxml = request.param
blkcntnr = BlockItemContainer(element(blkcntnr_cxml), None)
expected_xml = xml(after_cxml)
return blkcntnr, text, style, expected_xml
@pytest.fixture(params=[
('w:body', 0, 0, 'w:body/w:tbl/(w:tblPr/w:tblW{w:type=auto,w:w=0},w:'
'tblGrid)'),
('w:body', 1, 0, 'w:body/w:tbl/(w:tblPr/w:tblW{w:type=auto,w:w=0},w:'
'tblGrid,w:tr)'),
('w:body', 0, 1, 'w:body/w:tbl/(w:tblPr/w:tblW{w:type=auto,w:w=0},w:'
'tblGrid/w:gridCol)'),
('w:body', 1, 1, 'w:body/w:tbl/(w:tblPr/w:tblW{w:type=auto,w:w=0},w:'
'tblGrid/w:gridCol,w:tr/w:tc/w:p)'),
])
def add_table_fixture(self, request):
blkcntnr_cxml, rows, cols, after_cxml = request.param
blkcntnr = BlockItemContainer(element(blkcntnr_cxml), None)
expected_xml = xml(after_cxml)
return blkcntnr, rows, cols, expected_xml
@pytest.fixture(params=[
('w:body', 0),
('w:body/w:p', 1),
('w:body/(w:p,w:p)', 2),
('w:body/(w:p,w:tbl)', 1),
('w:body/(w:p,w:tbl,w:p)', 2),
])
def paragraphs_fixture(self, request):
blkcntnr_cxml, expected_count = request.param
blkcntnr = BlockItemContainer(element(blkcntnr_cxml), None)
return blkcntnr, expected_count
@pytest.fixture(params=[
('w:body', 0),
('w:body/w:tbl', 1),
('w:body/(w:tbl,w:tbl)', 2),
('w:body/(w:p,w:tbl)', 1),
('w:body/(w:tbl,w:tbl,w:p)', 2),
])
def tables_fixture(self, request):
blkcntnr_cxml, expected_count = request.param
blkcntnr = BlockItemContainer(element(blkcntnr_cxml), None)
return blkcntnr, expected_count
|
holli-holzer/python-docx
|
tests/test_blkcntnr.py
|
Python
|
mit
| 4,231
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import models, migrations
def seed_sections(apps, schema_editor):
Section = apps.get_model("blog", "Section")
db_alias = schema_editor.connection.alias
for section in settings.PINAX_BLOG_SECTIONS:
Section.objects.using(db_alias).create(slug=section[0], name=section[1])
class Migration(migrations.Migration):
dependencies = [
('blog', '0002_post_state'),
]
operations = [
migrations.CreateModel(
name='Section',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=150)),
('slug', models.SlugField(unique=True)),
('enabled', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
migrations.RunPython(
code=seed_sections,
),
migrations.AlterField(
model_name='post',
name='section',
field=models.ForeignKey(to='blog.Section'),
preserve_default=True,
),
]
|
easton402/pinax-blog
|
pinax/blog/migrations/0003_auto_20150529_0405.py
|
Python
|
mit
| 1,277
|
import json
import unreal_engine as ue
from unreal_engine.classes import Skeleton, AnimSequence, SkeletalMesh, Material, MorphTarget, AnimSequence, AnimSequenceFactory
from unreal_engine import FTransform, FVector, FRotator, FQuat, FSoftSkinVertex, FMorphTargetDelta, FRawAnimSequenceTrack
from unreal_engine.structs import SkeletalMaterial, MeshUVChannelInfo, FloatCurve, RichCurve, RichCurveKey, SmartName, RawCurveTracks
from collections import OrderedDict
import numpy
class ThreeJSLoader:
def __init__(self, filename, scale=1.0):
# we need ordered json dictionaries
with open(filename) as json_file:
self.model = json.load(json_file, object_pairs_hook=OrderedDict)
self.scale = scale
# ask the user where to generate the new assets
new_path = ue.create_modal_save_asset_dialog('Choose destination path')
package_name = ue.object_path_to_package_name(new_path)
object_name = ue.get_base_filename(new_path)
self.skeleton = self.build_skeleton(package_name, object_name)
# this is the list of soft skin vertices (they contains mesh data as well as bone influences)
self.vertices = []
# this contain mapping between the soft skin vertices and the json file vertex index (this is required for building morph targets)
self.vertex_map = []
self.mesh = self.build_mesh(package_name, object_name)
self.curves = self.build_morph_targets()
self.animation = self.build_animation(package_name, object_name)
def build_skeleton(self, pkg_name, obj_name):
pkg = ue.get_or_create_package('{0}_Skeleton'.format(pkg_name))
skel = Skeleton('{0}_Skeleton'.format(obj_name), pkg)
# add a root bone from which all of the others will descend
# (this trick will avoid generating an invalid skeleton [and a crash], as in UE4 only one root can exist)
skel.skeleton_add_bone('root', -1, FTransform())
# iterate bones in the json file, note that we move from opengl axis to UE4
# (y on top, x right, z forward right-handed) to (y right, x forward left-handed, z on top)
for bone in self.model['bones']:
# assume no rotation
quat = FQuat()
# give priority to quaternions
# remember to negate x and y axis, as we invert z on position
if 'rotq' in bone:
quat = FQuat(bone['rotq'][2], bone['rotq'][0] * -1,
bone['rotq'][1] * -1, bone['rotq'][3])
elif 'rot' in bone:
quat = FRotator(bone['rot'][2], bone['rot'][0] - 180
, bone['rot'][1] - 180).quaternion()
pos = FVector(bone['pos'][2] * -1, bone['pos'][0],
bone['pos'][1]) * self.scale
# always set parent+1 as we added the root bone before
skel.skeleton_add_bone(
bone['name'], bone['parent'] + 1, FTransform(pos, quat))
skel.save_package()
return skel
def build_soft_vertex(self, index):
# create a new soft skin vertex, holding tangents, normal, uvs, influences...
v = FSoftSkinVertex()
v_index = self.model['faces'][index] * 3
# here we assume 2 bone influences, technically we should honour what the json influencesPerVertex field exposes
b_index = self.model['faces'][index] * 2
v.position = FVector(self.model['vertices'][v_index + 2] * -1, self.model['vertices']
[v_index], self.model['vertices'][v_index + 1]) * self.scale
v.influence_weights = (
self.model['skinWeights'][b_index], self.model['skinWeights'][b_index + 1])
v.influence_bones = (
self.model['skinIndices'][b_index] + 1, self.model['skinIndices'][b_index + 1] + 1)
# return the json index too, as we will need it later for computing morph targets
return (v, v_index)
def get_normals(self, index):
n_index = self.model['faces'][index] * 3
return FVector(self.model['normals'][n_index + 2] * -1, self.model['normals'][n_index], self.model['normals'][n_index + 1])
def build_mesh(self, pkg_name, obj_name):
index = 0
# this supports only format 3 (now deprecated, https://github.com/mrdoob/three.js/wiki/JSON-Model-format-3)
while index < len(self.model['faces']):
face = self.model['faces'][index]
index += 1
points = 3
v0 = v1 = v2 = v3 = None
if face & 1 == 0:
# triangle
v0, v0_index = self.build_soft_vertex(index)
v1, v1_index = self.build_soft_vertex(index + 1)
v2, v2_index = self.build_soft_vertex(index + 2)
else:
# quad
v0, v0_index = self.build_soft_vertex(index)
v1, v1_index = self.build_soft_vertex(index + 1)
v2, v2_index = self.build_soft_vertex(index + 2)
v3, v3_index = self.build_soft_vertex(index + 3)
if v3:
points = 4
index += points
if face & 2:
index += 1
if face & 4:
index += 1
if face & 8:
index += points
if face & 16:
index += 1
if face & 32:
v0.tangent_z = self.get_normals(index)
v1.tangent_z = self.get_normals(index + 1)
v2.tangent_z = self.get_normals(index + 2)
if v3:
v3.tangent_z = self.get_normals(index + 3)
index += points
if face & 64:
index += 1
if face & 128:
index += points
if points == 3:
# we need to fix winding, from OpenGL (counterwise) to UE4 (clockwise)
self.vertices.append(v2)
self.vertex_map.append(v2_index)
self.vertices.append(v0)
self.vertex_map.append(v0_index)
self.vertices.append(v1)
self.vertex_map.append(v1_index)
else:
# we have a quad, generate two triangles
# we need to fix winding, from OpenGL (counterwise) to UE4 (clockwise)
self.vertices.append(v3)
self.vertex_map.append(v3_index)
self.vertices.append(v0)
self.vertex_map.append(v0_index)
self.vertices.append(v1)
self.vertex_map.append(v1_index)
self.vertices.append(v2)
self.vertex_map.append(v2_index)
self.vertices.append(v3)
self.vertex_map.append(v3_index)
self.vertices.append(v1)
self.vertex_map.append(v1_index)
pkg = ue.get_or_create_package(pkg_name)
sm = SkeletalMesh(obj_name, pkg)
sm.skeletal_mesh_set_skeleton(self.skeleton)
# generate the LOD from the list of soft skin vertices
sm.skeletal_mesh_build_lod(self.vertices)
sm.save_package()
return sm
def build_morph_targets(self):
# when we build the skeletal mesh LOD by passing soft skin vertices
# UE4 will internally optimize the vertices to reduce duplicates
# for this reason the vertex index we built is different from the one stored into UE4
# the skeletal_mesh_to_import_vertex_map() returns the original mapping given the new one
import_map = self.mesh.skeletal_mesh_to_import_vertex_map()
# we will fill animation curves for later usage
curves = []
for morph_item in self.model['morphTargets']:
# ensure the MorphTarget has the SkeletalMesh as outer
morph = MorphTarget('', self.mesh)
deltas = []
for idx, import_index in enumerate(import_map):
# get the original json vertex index
vertex_index = self.vertex_map[import_index]
# get the original soft skin vertex
vdata = self.vertices[import_index]
x = morph_item['vertices'][vertex_index + 2] * -1
y = morph_item['vertices'][vertex_index]
z = morph_item['vertices'][vertex_index + 1]
delta = FMorphTargetDelta()
delta.source_idx = idx
# store the difference between original vertex position and the morph target one
delta.position_delta = (
FVector(x, y, z) * self.scale) - vdata.position
deltas.append(delta)
# check for the return value, as sometimes morph targets
# in json files do not generate any kind of modification
# so unreal will skip it
if morph.morph_target_populate_deltas(deltas):
# register the morph target
self.mesh.skeletal_mesh_register_morph_target(morph)
# add curve, not required, we can use it later for skeletal-based animations
curves.append(FloatCurve(Name=SmartName(DisplayName=morph.get_name()), FloatCurve=RichCurve(
Keys=[RichCurveKey(Time=0.0, Value=0.0), RichCurveKey(Time=1.0, Value=1.0)])))
self.mesh.save_package()
return curves
def build_animation(self, pkg_name, obj_name):
factory = AnimSequenceFactory()
factory.TargetSkeleton = self.skeleton
new_anim = factory.factory_create_new('{0}_Animation'.format(pkg_name))
new_anim.NumFrames = self.model['animation']['length'] * \
self.model['animation']['fps']
new_anim.SequenceLength = self.model['animation']['length']
# each bone maps to a track in UE4 animations
for bone_index, track in enumerate(self.model['animation']['hierarchy']):
# retrieve the bone/track name from the index (remember to add 1 as we have the additional root bone)
bone_name = self.skeleton.skeleton_get_bone_name(bone_index + 1)
positions = []
rotations = []
scales = []
for key in track['keys']:
t = key['time']
if 'pos' in key:
positions.append(
(t, FVector(key['pos'][2] * -1, key['pos'][0], key['pos'][1]) * 100))
if 'rotq' in key:
rotations.append((t, FQuat(
key['rotq'][2], key['rotq'][0] * -1, key['rotq'][1] * -1, key['rotq'][3])))
elif 'rot' in key:
# is it a quaternion ?
if len(key['rot']) == 4:
rotations.append(
(t, FQuat(key['rot'][2], key['rot'][0] * -1, key['rot'][1] * -1, key['rot'][3])))
else:
rotations.append(
(t, FRotator(key['rot'][2], key['rot'][0] - 180, key['rot'][1] - 180).quaternion()))
pos_keys = []
rot_keys = []
# generate the right number of frames
for t in numpy.arange(0, self.model['animation']['length'], 1.0 / self.model['animation']['fps']):
pos_keys.append(self.interpolate_vector(positions, t))
rot_keys.append(self.interpolate_quaternion(
rotations, t).get_normalized())
track_data = FRawAnimSequenceTrack()
track_data.pos_keys = pos_keys
track_data.rot_keys = rot_keys
new_anim.add_new_raw_track(bone_name, track_data)
# if we have curves, just add them to the animation
if self.curves:
new_anim.RawCurveData = RawCurveTracks(FloatCurves=self.curves)
new_anim.save_package()
return new_anim
def interpolate_vector(self, timeline, t):
keys = []
x_values = []
y_values = []
z_values = []
for key, value in timeline:
keys.append(key)
x_values.append(value[0])
y_values.append(value[1])
z_values.append(value[2])
x = numpy.interp(t, keys, x_values)
y = numpy.interp(t, keys, y_values)
z = numpy.interp(t, keys, z_values)
return FVector(x, y, z)
def interpolate_quaternion(self, timeline, t):
keys = []
x_values = []
y_values = []
z_values = []
w_values = []
for key, value in timeline:
keys.append(key)
x_values.append(value[0])
y_values.append(value[1])
z_values.append(value[2])
w_values.append(value[3])
x = numpy.interp(t, keys, x_values)
y = numpy.interp(t, keys, y_values)
z = numpy.interp(t, keys, z_values)
w = numpy.interp(t, keys, w_values)
return FQuat(x, y, z, w)
filename = ue.open_file_dialog('Choose a three.js file')[0]
threejs = ThreeJSLoader(filename, 100)
ue.open_editor_for_asset(threejs.animation)
|
kitelightning/UnrealEnginePython
|
tutorials/SnippetsForStaticAndSkeletalMeshes_Assets/threejs_importer.py
|
Python
|
mit
| 13,106
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import warnings
from scipy import fftpack
def get_image_quadrants(IM, reorient=True, symmetry_axis=None,
use_quadrants=(True, True, True, True),
symmetrize_method="average"):
"""
Given an image (m,n) return its 4 quadrants Q0, Q1, Q2, Q3
as defined below.
Parameters
----------
IM : 2D np.array
Image data shape (rows, cols)
reorient : boolean
Reorient quadrants to match the orientation of Q0 (top-right)
symmetry_axis : int or tuple
can have values of ``None``, ``0``, ``1``, or ``(0,1)`` and specifies
no symmetry, vertical symmetry axis, horizontal symmetry axis, and both vertical
and horizontal symmetry axes. Quadrants are added.
See Note.
use_quadrants : boolean tuple
Include quadrant (Q0, Q1, Q2, Q3) in the symmetry combination(s)
and final image
symmetrize_method: str
Method used for symmetrizing the image.
average: Simply average the quadrants.
fourier: Axial symmetry implies that the Fourier components of the 2-D
projection should be real. Removing the imaginary components in
reciprocal space leaves a symmetric projection.
ref: Overstreet, K., et al. "Multiple scattering and the density
distribution of a Cs MOT." Optics express 13.24 (2005): 9672-9682.
http://dx.doi.org/10.1364/OPEX.13.009672
Returns
-------
Q0, Q1, Q2, Q3 : tuple of 2D np.arrays
shape: (``rows//2+rows%2, cols//2+cols%2``)
all oriented in the same direction as Q0 if ``reorient=True``
Notes
-----
The symmetry_axis keyword averages quadrants like this: ::
+--------+--------+
| Q1 * | * Q0 |
| * | * |
| * | * | cQ1 | cQ0
+--------o--------+ --(output) -> ----o----
| * | * | cQ2 | cQ3
| * | * |
| Q2 * | * Q3 | cQi == combined quadrants
+--------+--------+
symmetry_axis = None - individual quadrants
symmetry_axis = 0 (vertical) - average Q0+Q1, and Q2+Q3
symmetry_axis = 1 (horizontal) - average Q1+Q2, and Q0+Q3
symmetry_axis = (0, 1) (both) - combine and average all 4 quadrants
The end results look like this: ::
(0) symmetry_axis = None
returned image Q1 | Q0
----o----
Q2 | Q3
(1) symmetry_axis = 0
Combine: Q01 = Q0 + Q1, Q23 = Q2 + Q3
returned image Q01 | Q01
-----o-----
Q23 | Q23
(2) symmetry_axis = 1
Combine: Q12 = Q1 + Q2, Q03 = Q0 + Q3
returned image Q12 | Q03
-----o-----
Q12 | Q03
(3) symmetry_axis = (0, 1)
Combine all quadrants: Q = Q0 + Q1 + Q2 + Q3
returned image Q | Q
---o--- all quadrants equivalent
Q | Q
"""
IM = np.atleast_2d(IM)
if not isinstance(symmetry_axis, (list, tuple)):
# if the user supplies an int, make it into a 1-element list:
symmetry_axis = [symmetry_axis]
if ((symmetry_axis == [None] and (use_quadrants[0]==False
or use_quadrants[1]==False
or use_quadrants[2]==False
or use_quadrants[3]==False)) or
# at least one empty
(symmetry_axis == [0] and use_quadrants[0]==False and
use_quadrants[1]==False) or # top empty
(symmetry_axis == [0] and use_quadrants[2]==False and
use_quadrants[3]==False) or # bot empty
(symmetry_axis == [1] and use_quadrants[1]==False and
use_quadrants[2]==False) or # left empty
(symmetry_axis == [1] and use_quadrants[0]==False and
use_quadrants[3]==False) # right empty
or
not np.any(use_quadrants)
):
raise ValueError('At least one quadrant would be empty.'
' Please check symmetry_axis and use_quadrant'
' values to ensure that all quadrants will have a'
' defined value.')
n, m = IM.shape
# odd size increased by 1
n_c = n // 2 + n % 2
m_c = m // 2 + m % 2
if isinstance(symmetry_axis, tuple) and not reorient:
raise ValueError(
'In order to add quadrants (i.e., to apply horizontal or \
vertical symmetry), you must reorient the image.')
if symmetrize_method == "fourier":
if np.sum(use_quadrants)<4:
warnings.warn("Using Fourier transformation to symmetrize the"
" data will use all 4 quadrants!!")
if 0 in symmetry_axis:
IM = fftpack.ifft(fftpack.fft(IM).real).real
if 1 in symmetry_axis:
IM = fftpack.ifft(fftpack.fft(IM.T).real).T.real
# define 4 quadrants of the image
# see definition above
Q0 = IM[:n_c, -m_c:]*use_quadrants[0]
Q1 = IM[:n_c, :m_c]*use_quadrants[1]
Q2 = IM[-n_c:, :m_c]*use_quadrants[2]
Q3 = IM[-n_c:, -m_c:]*use_quadrants[3]
if reorient:
Q1 = np.fliplr(Q1)
Q3 = np.flipud(Q3)
Q2 = np.fliplr(np.flipud(Q2))
if symmetrize_method == "fourier":
return Q0, Q1, Q2, Q3
elif symmetrize_method == "average":
if symmetry_axis==(0, 1):
Q = (Q0 + Q1 + Q2 + Q3)/np.sum(use_quadrants)
return Q, Q, Q, Q
if 0 in symmetry_axis: # vertical axis image symmetry
Q0 = Q1 = (Q0 + Q1)/(use_quadrants[0] + use_quadrants[1])
Q2 = Q3 = (Q2 + Q3)/(use_quadrants[2] + use_quadrants[3])
if 1 in symmetry_axis: # horizontal axis image symmetry
Q1 = Q2 = (Q1 + Q2)/(use_quadrants[1] + use_quadrants[2])
Q0 = Q3 = (Q0 + Q3)/(use_quadrants[0] + use_quadrants[3])
return Q0, Q1, Q2, Q3
else:
raise ValueError("Invalid method for symmetrizing the image!!")
def put_image_quadrants(Q, original_image_shape, symmetry_axis=None):
"""
Reassemble image from 4 quadrants Q = (Q0, Q1, Q2, Q3)
The reverse process to get_image_quadrants(reorient=True)
Note: the quadrants should all be oriented as Q0, the upper right quadrant
Parameters
----------
Q: tuple of np.array (Q0, Q1, Q2, Q3)
Image quadrants all oriented as Q0
shape (``rows//2+rows%2, cols//2+cols%2``) ::
+--------+--------+
| Q1 * | * Q0 |
| * | * |
| * | * |
+--------o--------+
| * | * |
| * | * |
| Q2 * | * Q3 |
+--------+--------+
original_image_shape: tuple
(rows, cols)
reverses the padding added by `get_image_quadrants()` for odd-axis sizes
odd row trims 1 row from Q1, Q0
odd column trims 1 column from Q1, Q2
symmetry_axis : int or tuple
impose image symmetry
``symmetry_axis = 0 (vertical) - Q0 == Q1 and Q3 == Q2``
``symmetry_axis = 1 (horizontal) - Q2 == Q1 and Q3 == Q0``
Returns
-------
IM : np.array
Reassembled image of shape (rows, cols): ::
symmetry_axis =
None 0 1 (0,1)
Q1 | Q0 Q1 | Q1 Q1 | Q0 Q1 | Q1
----o---- or ----o---- or ----o---- or ----o----
Q2 | Q3 Q2 | Q2 Q1 | Q0 Q1 | Q1
"""
Q0, Q1, Q2, Q3 = Q
if not isinstance(symmetry_axis, (list, tuple)):
# if the user supplies an int, make it into a 1-element list:
symmetry_axis = [symmetry_axis]
if 0 in symmetry_axis:
Q0 = Q1
Q3 = Q2
if 1 in symmetry_axis:
Q2 = Q1
Q3 = Q0
if original_image_shape[0] % 2:
# odd-rows => remove duplicate bottom row from Q1, Q0
Q0 = Q0[:-1, :]
Q1 = Q1[:-1, :]
if original_image_shape[1] % 2:
# odd-columns => remove duplicate first column from Q1, Q2
Q1 = Q1[:, 1:]
Q2 = Q2[:, 1:]
Top = np.concatenate((np.fliplr(Q1), Q0), axis=1)
Bottom = np.flipud(np.concatenate((np.fliplr(Q2), Q3), axis=1))
IM = np.concatenate((Top, Bottom), axis=0)
return IM
# def center_image(data, center, n, ndim=2):
# """
# This centers the image at the given center and makes it of size n by n
#
# THIS FUNCTION IS DEPRECIATED.
# All centering functions should be moves to abel.tools.center
# """
#
# Nh, Nw = data.shape
# n_2 = n//2
# if ndim == 1:
# cx = int(center)
# im = np.zeros((1, 2*n))
# im[0, n-cx:n-cx+Nw] = data
# im = im[:, n_2:n+n_2]
# # This is really not efficient
# # Processing 2D image with identical rows while we just want a
# # 1D slice
# im = np.repeat(im, n, axis=0)
#
# elif ndim == 2:
# cx, cy = np.asarray(center, dtype='int')
#
# # Make an array of zeros that is large enough for cropping or padding:
# sz = 2*np.round(n + np.max((Nw, Nh)))
# im = np.zeros((sz, sz))
#
# # Set center of "zeros image" to be the data
# im[sz//2-cy:sz//2-cy+Nh, sz//2-cx:sz//2-cx+Nw] = data
#
# # Crop padded image to size n
# # note the n%2 which return the appropriate image size for both
# # odd and even images
# im = im[sz//2-n_2:n_2+sz//2+n % 2, sz//2-n_2:n_2+sz//2+n % 2]
#
# else:
# raise ValueError
#
# return im
#
#
# def center_image_asym(data, center_column, n_vert, n_horz, verbose=False):
# """
# This centers a (rectangular) image at the given center_column
# and makes it of size n_vert by n_horz
#
# THIS FUNCTION IS DEPRECIATED.
# All centering functions should be moved to abel.tools.center
# """
#
# if data.ndim > 2:
# raise ValueError("Array to be centered must be 1- or 2-dimensional")
#
# c_im = np.copy(data) # make a copy of the original data for manipulation
# data_vert, data_horz = c_im.shape
# pad_mode = str("constant")
#
# if data_horz % 2 == 0:
# # Add column of zeros to the extreme right
# # to give data array odd columns
# c_im = np.pad(c_im, ((0, 0), (0, 1)), pad_mode, constant_values=0)
# data_vert, data_horz = c_im.shape # update data dimensions
#
# delta_h = int(center_column - data_horz//2)
# if delta_h != 0:
# if delta_h < 0:
# # Specified center is to the left of nominal center
# # Add compensating zeroes on the left edge
# c_im = np.pad(c_im, ((0, 0), (2*np.abs(delta_h), 0)), pad_mode,
# constant_values=0)
# data_vert, data_horz = c_im.shape
# else:
# # Specified center is to the right of nominal center
# # Add compensating zeros on the right edge
# c_im = np.pad(c_im, ((0, 0), (0, 2*delta_h)), pad_mode,
# constant_values=0)
# data_vert, data_horz = c_im.shape
#
# if n_vert >= data_vert and n_horz >= data_horz:
# pad_up = (n_vert - data_vert)//2
# pad_down = n_vert - data_vert - pad_up
# pad_left = (n_horz - data_horz)//2
# pad_right = n_horz - data_horz - pad_left
#
# c_im = np.pad(
# c_im, ((pad_up, pad_down), (pad_left, pad_right)),
# pad_mode, constant_values=0)
#
# elif n_vert >= data_vert and n_horz < data_horz:
# pad_up = (n_vert - data_vert)//2
# pad_down = n_vert - data_vert - pad_up
# crop_left = (data_horz - n_horz)//2
# crop_right = data_horz - n_horz - crop_left
# if verbose:
# print("Warning: cropping %d pixels from the sides \
# of the image" % crop_left)
# c_im = np.pad(
# c_im[:, crop_left:-crop_right], ((pad_up, pad_down), (0, 0)),
# pad_mode, constant_values=0)
#
# elif n_vert < data_vert and n_horz >= data_horz:
# crop_up = (data_vert - n_vert)//2
# crop_down = data_vert - n_vert - crop_up
# pad_left = (n_horz - data_horz)//2
# pad_right = n_horz - data_horz - pad_left
# if verbose:
# print("Warning: cropping %d pixels from top and bottom \
# of the image" % crop_up)
# c_im = np.pad(
# c_im[crop_up:-crop_down], ((0, 0), (pad_left, pad_right)),
# pad_mode, constant_values=0)
#
# elif n_vert < data_vert and n_horz < data_horz:
# crop_up = (data_vert - n_vert)//2
# crop_down = data_vert - n_vert - crop_up
# crop_left = (data_horz - n_horz)//2
# crop_right = data_horz - n_horz - crop_left
# if verbose:
# print("Warning: cropping %d pixels from top and bottom \
# and %d pixels from the sides of the image " % (
# crop_up, crop_left))
# c_im = c_im[crop_up:-crop_down, crop_left:-crop_right]
#
# else:
# raise ValueError('Input data dimensions incompatible \
# with chosen basis set.')
#
# return c_im
|
rth/PyAbel
|
abel/tools/symmetry.py
|
Python
|
mit
| 14,042
|
"""Django app configuration for the Gold Membership app."""
from __future__ import absolute_import
from django.apps import AppConfig
class GoldAppConfig(AppConfig):
name = 'readthedocs.gold'
verbose_name = 'Gold'
def ready(self):
import readthedocs.gold.signals # noqa
|
safwanrahman/readthedocs.org
|
readthedocs/gold/apps.py
|
Python
|
mit
| 294
|
# -*- coding: UTF-8 -*-
# by Mafarricos
# email: MafaStudios@gmail.com
# This program is free software: GNU General Public License
import os,urllib
import links,search
from resources.libs import basic
def createstrm(name,imdbid,year,url):
addon_id = links.link().yify_id
addon_path = os.path.join(links.link().installfolder,addon_id)
addon_getsettings = links.link().getSetting("yify_enabled")
addon_pos = links.link().getSetting("yify_pos")
addonplay = links.link().yify_play
if len(addon_pos) == 1: addon_pos = '0'+addon_pos
srtmBasePath = links.link().strmPath
if not os.path.exists(addon_path) and addon_getsettings == 'true': links.link().setSetting("yify_enabled",'false')
if addon_getsettings == 'true':
strmPath = os.path.join(srtmBasePath,addon_pos+'.'+addon_id+'.strm')
searchresponse = '"title":"%s","link":"(.+?)","post_content":".+?","image":".+?","year":"%s"' % (name,year)
url = search.basic_search(links.link().yify_search,name,imdbid,year,searchresponse,'Name')
if url:
playurl = addonplay % (urllib.quote_plus(name+' ('+year+')'),urllib.quote_plus(url))
basic.writefile(strmPath,'w',playurl)
|
dannyperry571/theapprentice
|
script.module.addonsresolver/resources/libs/parsers/yify.py
|
Python
|
gpl-2.0
| 1,140
|
# test for xml.dom.minidom
import copy
import pickle
from test import support
import unittest
import xml.dom.minidom
from xml.dom.minidom import parse, Node, Document, parseString
from xml.dom.minidom import getDOMImplementation
tstfile = support.findfile("test.xml", subdir="xmltestdata")
sample = ("<?xml version='1.0' encoding='us-ascii'?>\n"
"<!DOCTYPE doc PUBLIC 'http://xml.python.org/public'"
" 'http://xml.python.org/system' [\n"
" <!ELEMENT e EMPTY>\n"
" <!ENTITY ent SYSTEM 'http://xml.python.org/entity'>\n"
"]><doc attr='value'> text\n"
"<?pi sample?> <!-- comment --> <e/> </doc>")
# The tests of DocumentType importing use these helpers to construct
# the documents to work with, since not all DOM builders actually
# create the DocumentType nodes.
def create_doc_without_doctype(doctype=None):
return getDOMImplementation().createDocument(None, "doc", doctype)
def create_nonempty_doctype():
doctype = getDOMImplementation().createDocumentType("doc", None, None)
doctype.entities._seq = []
doctype.notations._seq = []
notation = xml.dom.minidom.Notation("my-notation", None,
"http://xml.python.org/notations/my")
doctype.notations._seq.append(notation)
entity = xml.dom.minidom.Entity("my-entity", None,
"http://xml.python.org/entities/my",
"my-notation")
entity.version = "1.0"
entity.encoding = "utf-8"
entity.actualEncoding = "us-ascii"
doctype.entities._seq.append(entity)
return doctype
def create_doc_with_doctype():
doctype = create_nonempty_doctype()
doc = create_doc_without_doctype(doctype)
doctype.entities.item(0).ownerDocument = doc
doctype.notations.item(0).ownerDocument = doc
return doc
class MinidomTest(unittest.TestCase):
def confirm(self, test, testname = "Test"):
self.assertTrue(test, testname)
def checkWholeText(self, node, s):
t = node.wholeText
self.confirm(t == s, "looking for %r, found %r" % (s, t))
def testDocumentAsyncAttr(self):
doc = Document()
self.assertFalse(doc.async_)
self.assertFalse(Document.async_)
def testParseFromBinaryFile(self):
with open(tstfile, 'rb') as file:
dom = parse(file)
dom.unlink()
self.confirm(isinstance(dom, Document))
def testParseFromTextFile(self):
with open(tstfile, 'r', encoding='iso-8859-1') as file:
dom = parse(file)
dom.unlink()
self.confirm(isinstance(dom, Document))
def testGetElementsByTagName(self):
dom = parse(tstfile)
self.confirm(dom.getElementsByTagName("LI") == \
dom.documentElement.getElementsByTagName("LI"))
dom.unlink()
def testInsertBefore(self):
dom = parseString("<doc><foo/></doc>")
root = dom.documentElement
elem = root.childNodes[0]
nelem = dom.createElement("element")
root.insertBefore(nelem, elem)
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2
and root.childNodes[0] is nelem
and root.childNodes.item(0) is nelem
and root.childNodes[1] is elem
and root.childNodes.item(1) is elem
and root.firstChild is nelem
and root.lastChild is elem
and root.toxml() == "<doc><element/><foo/></doc>"
, "testInsertBefore -- node properly placed in tree")
nelem = dom.createElement("element")
root.insertBefore(nelem, None)
self.confirm(len(root.childNodes) == 3
and root.childNodes.length == 3
and root.childNodes[1] is elem
and root.childNodes.item(1) is elem
and root.childNodes[2] is nelem
and root.childNodes.item(2) is nelem
and root.lastChild is nelem
and nelem.previousSibling is elem
and root.toxml() == "<doc><element/><foo/><element/></doc>"
, "testInsertBefore -- node properly placed in tree")
nelem2 = dom.createElement("bar")
root.insertBefore(nelem2, nelem)
self.confirm(len(root.childNodes) == 4
and root.childNodes.length == 4
and root.childNodes[2] is nelem2
and root.childNodes.item(2) is nelem2
and root.childNodes[3] is nelem
and root.childNodes.item(3) is nelem
and nelem2.nextSibling is nelem
and nelem.previousSibling is nelem2
and root.toxml() ==
"<doc><element/><foo/><bar/><element/></doc>"
, "testInsertBefore -- node properly placed in tree")
dom.unlink()
def _create_fragment_test_nodes(self):
dom = parseString("<doc/>")
orig = dom.createTextNode("original")
c1 = dom.createTextNode("foo")
c2 = dom.createTextNode("bar")
c3 = dom.createTextNode("bat")
dom.documentElement.appendChild(orig)
frag = dom.createDocumentFragment()
frag.appendChild(c1)
frag.appendChild(c2)
frag.appendChild(c3)
return dom, orig, c1, c2, c3, frag
def testInsertBeforeFragment(self):
dom, orig, c1, c2, c3, frag = self._create_fragment_test_nodes()
dom.documentElement.insertBefore(frag, None)
self.confirm(tuple(dom.documentElement.childNodes) ==
(orig, c1, c2, c3),
"insertBefore(<fragment>, None)")
frag.unlink()
dom.unlink()
dom, orig, c1, c2, c3, frag = self._create_fragment_test_nodes()
dom.documentElement.insertBefore(frag, orig)
self.confirm(tuple(dom.documentElement.childNodes) ==
(c1, c2, c3, orig),
"insertBefore(<fragment>, orig)")
frag.unlink()
dom.unlink()
def testAppendChild(self):
dom = parse(tstfile)
dom.documentElement.appendChild(dom.createComment("Hello"))
self.confirm(dom.documentElement.childNodes[-1].nodeName == "#comment")
self.confirm(dom.documentElement.childNodes[-1].data == "Hello")
dom.unlink()
def testAppendChildFragment(self):
dom, orig, c1, c2, c3, frag = self._create_fragment_test_nodes()
dom.documentElement.appendChild(frag)
self.confirm(tuple(dom.documentElement.childNodes) ==
(orig, c1, c2, c3),
"appendChild(<fragment>)")
frag.unlink()
dom.unlink()
def testReplaceChildFragment(self):
dom, orig, c1, c2, c3, frag = self._create_fragment_test_nodes()
dom.documentElement.replaceChild(frag, orig)
orig.unlink()
self.confirm(tuple(dom.documentElement.childNodes) == (c1, c2, c3),
"replaceChild(<fragment>)")
frag.unlink()
dom.unlink()
def testLegalChildren(self):
dom = Document()
elem = dom.createElement('element')
text = dom.createTextNode('text')
self.assertRaises(xml.dom.HierarchyRequestErr, dom.appendChild, text)
dom.appendChild(elem)
self.assertRaises(xml.dom.HierarchyRequestErr, dom.insertBefore, text,
elem)
self.assertRaises(xml.dom.HierarchyRequestErr, dom.replaceChild, text,
elem)
nodemap = elem.attributes
self.assertRaises(xml.dom.HierarchyRequestErr, nodemap.setNamedItem,
text)
self.assertRaises(xml.dom.HierarchyRequestErr, nodemap.setNamedItemNS,
text)
elem.appendChild(text)
dom.unlink()
def testNamedNodeMapSetItem(self):
dom = Document()
elem = dom.createElement('element')
attrs = elem.attributes
attrs["foo"] = "bar"
a = attrs.item(0)
self.confirm(a.ownerDocument is dom,
"NamedNodeMap.__setitem__() sets ownerDocument")
self.confirm(a.ownerElement is elem,
"NamedNodeMap.__setitem__() sets ownerElement")
self.confirm(a.value == "bar",
"NamedNodeMap.__setitem__() sets value")
self.confirm(a.nodeValue == "bar",
"NamedNodeMap.__setitem__() sets nodeValue")
elem.unlink()
dom.unlink()
def testNonZero(self):
dom = parse(tstfile)
self.confirm(dom)# should not be zero
dom.appendChild(dom.createComment("foo"))
self.confirm(not dom.childNodes[-1].childNodes)
dom.unlink()
def testUnlink(self):
dom = parse(tstfile)
self.assertTrue(dom.childNodes)
dom.unlink()
self.assertFalse(dom.childNodes)
def testContext(self):
with parse(tstfile) as dom:
self.assertTrue(dom.childNodes)
self.assertFalse(dom.childNodes)
def testElement(self):
dom = Document()
dom.appendChild(dom.createElement("abc"))
self.confirm(dom.documentElement)
dom.unlink()
def testAAA(self):
dom = parseString("<abc/>")
el = dom.documentElement
el.setAttribute("spam", "jam2")
self.confirm(el.toxml() == '<abc spam="jam2"/>', "testAAA")
a = el.getAttributeNode("spam")
self.confirm(a.ownerDocument is dom,
"setAttribute() sets ownerDocument")
self.confirm(a.ownerElement is dom.documentElement,
"setAttribute() sets ownerElement")
dom.unlink()
def testAAB(self):
dom = parseString("<abc/>")
el = dom.documentElement
el.setAttribute("spam", "jam")
el.setAttribute("spam", "jam2")
self.confirm(el.toxml() == '<abc spam="jam2"/>', "testAAB")
dom.unlink()
def testAddAttr(self):
dom = Document()
child = dom.appendChild(dom.createElement("abc"))
child.setAttribute("def", "ghi")
self.confirm(child.getAttribute("def") == "ghi")
self.confirm(child.attributes["def"].value == "ghi")
child.setAttribute("jkl", "mno")
self.confirm(child.getAttribute("jkl") == "mno")
self.confirm(child.attributes["jkl"].value == "mno")
self.confirm(len(child.attributes) == 2)
child.setAttribute("def", "newval")
self.confirm(child.getAttribute("def") == "newval")
self.confirm(child.attributes["def"].value == "newval")
self.confirm(len(child.attributes) == 2)
dom.unlink()
def testDeleteAttr(self):
dom = Document()
child = dom.appendChild(dom.createElement("abc"))
self.confirm(len(child.attributes) == 0)
child.setAttribute("def", "ghi")
self.confirm(len(child.attributes) == 1)
del child.attributes["def"]
self.confirm(len(child.attributes) == 0)
dom.unlink()
def testRemoveAttr(self):
dom = Document()
child = dom.appendChild(dom.createElement("abc"))
child.setAttribute("def", "ghi")
self.confirm(len(child.attributes) == 1)
self.assertRaises(xml.dom.NotFoundErr, child.removeAttribute, "foo")
child.removeAttribute("def")
self.confirm(len(child.attributes) == 0)
dom.unlink()
def testRemoveAttrNS(self):
dom = Document()
child = dom.appendChild(
dom.createElementNS("http://www.python.org", "python:abc"))
child.setAttributeNS("http://www.w3.org", "xmlns:python",
"http://www.python.org")
child.setAttributeNS("http://www.python.org", "python:abcattr", "foo")
self.assertRaises(xml.dom.NotFoundErr, child.removeAttributeNS,
"foo", "http://www.python.org")
self.confirm(len(child.attributes) == 2)
child.removeAttributeNS("http://www.python.org", "abcattr")
self.confirm(len(child.attributes) == 1)
dom.unlink()
def testRemoveAttributeNode(self):
dom = Document()
child = dom.appendChild(dom.createElement("foo"))
child.setAttribute("spam", "jam")
self.confirm(len(child.attributes) == 1)
node = child.getAttributeNode("spam")
self.assertRaises(xml.dom.NotFoundErr, child.removeAttributeNode,
None)
child.removeAttributeNode(node)
self.confirm(len(child.attributes) == 0
and child.getAttributeNode("spam") is None)
dom2 = Document()
child2 = dom2.appendChild(dom2.createElement("foo"))
node2 = child2.getAttributeNode("spam")
self.assertRaises(xml.dom.NotFoundErr, child2.removeAttributeNode,
node2)
dom.unlink()
def testHasAttribute(self):
dom = Document()
child = dom.appendChild(dom.createElement("foo"))
child.setAttribute("spam", "jam")
self.confirm(child.hasAttribute("spam"))
def testChangeAttr(self):
dom = parseString("<abc/>")
el = dom.documentElement
el.setAttribute("spam", "jam")
self.confirm(len(el.attributes) == 1)
el.setAttribute("spam", "bam")
# Set this attribute to be an ID and make sure that doesn't change
# when changing the value:
el.setIdAttribute("spam")
self.confirm(len(el.attributes) == 1
and el.attributes["spam"].value == "bam"
and el.attributes["spam"].nodeValue == "bam"
and el.getAttribute("spam") == "bam"
and el.getAttributeNode("spam").isId)
el.attributes["spam"] = "ham"
self.confirm(len(el.attributes) == 1
and el.attributes["spam"].value == "ham"
and el.attributes["spam"].nodeValue == "ham"
and el.getAttribute("spam") == "ham"
and el.attributes["spam"].isId)
el.setAttribute("spam2", "bam")
self.confirm(len(el.attributes) == 2
and el.attributes["spam"].value == "ham"
and el.attributes["spam"].nodeValue == "ham"
and el.getAttribute("spam") == "ham"
and el.attributes["spam2"].value == "bam"
and el.attributes["spam2"].nodeValue == "bam"
and el.getAttribute("spam2") == "bam")
el.attributes["spam2"] = "bam2"
self.confirm(len(el.attributes) == 2
and el.attributes["spam"].value == "ham"
and el.attributes["spam"].nodeValue == "ham"
and el.getAttribute("spam") == "ham"
and el.attributes["spam2"].value == "bam2"
and el.attributes["spam2"].nodeValue == "bam2"
and el.getAttribute("spam2") == "bam2")
dom.unlink()
def testGetAttrList(self):
pass
def testGetAttrValues(self):
pass
def testGetAttrLength(self):
pass
def testGetAttribute(self):
dom = Document()
child = dom.appendChild(
dom.createElementNS("http://www.python.org", "python:abc"))
self.assertEqual(child.getAttribute('missing'), '')
def testGetAttributeNS(self):
dom = Document()
child = dom.appendChild(
dom.createElementNS("http://www.python.org", "python:abc"))
child.setAttributeNS("http://www.w3.org", "xmlns:python",
"http://www.python.org")
self.assertEqual(child.getAttributeNS("http://www.w3.org", "python"),
'http://www.python.org')
self.assertEqual(child.getAttributeNS("http://www.w3.org", "other"),
'')
child2 = child.appendChild(dom.createElement('abc'))
self.assertEqual(child2.getAttributeNS("http://www.python.org", "missing"),
'')
def testGetAttributeNode(self): pass
def testGetElementsByTagNameNS(self):
d="""<foo xmlns:minidom='http://pyxml.sf.net/minidom'>
<minidom:myelem/>
</foo>"""
dom = parseString(d)
elems = dom.getElementsByTagNameNS("http://pyxml.sf.net/minidom",
"myelem")
self.confirm(len(elems) == 1
and elems[0].namespaceURI == "http://pyxml.sf.net/minidom"
and elems[0].localName == "myelem"
and elems[0].prefix == "minidom"
and elems[0].tagName == "minidom:myelem"
and elems[0].nodeName == "minidom:myelem")
dom.unlink()
def get_empty_nodelist_from_elements_by_tagName_ns_helper(self, doc, nsuri,
lname):
nodelist = doc.getElementsByTagNameNS(nsuri, lname)
self.confirm(len(nodelist) == 0)
def testGetEmptyNodeListFromElementsByTagNameNS(self):
doc = parseString('<doc/>')
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, 'http://xml.python.org/namespaces/a', 'localname')
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, '*', 'splat')
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, 'http://xml.python.org/namespaces/a', '*')
doc = parseString('<doc xmlns="http://xml.python.org/splat"><e/></doc>')
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, "http://xml.python.org/splat", "not-there")
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, "*", "not-there")
self.get_empty_nodelist_from_elements_by_tagName_ns_helper(
doc, "http://somewhere.else.net/not-there", "e")
def testElementReprAndStr(self):
dom = Document()
el = dom.appendChild(dom.createElement("abc"))
string1 = repr(el)
string2 = str(el)
self.confirm(string1 == string2)
dom.unlink()
def testElementReprAndStrUnicode(self):
dom = Document()
el = dom.appendChild(dom.createElement("abc"))
string1 = repr(el)
string2 = str(el)
self.confirm(string1 == string2)
dom.unlink()
def testElementReprAndStrUnicodeNS(self):
dom = Document()
el = dom.appendChild(
dom.createElementNS("http://www.slashdot.org", "slash:abc"))
string1 = repr(el)
string2 = str(el)
self.confirm(string1 == string2)
self.confirm("slash:abc" in string1)
dom.unlink()
def testAttributeRepr(self):
dom = Document()
el = dom.appendChild(dom.createElement("abc"))
node = el.setAttribute("abc", "def")
self.confirm(str(node) == repr(node))
dom.unlink()
def testTextNodeRepr(self): pass
def testWriteXML(self):
str = '<?xml version="1.0" ?><a b="c"/>'
dom = parseString(str)
domstr = dom.toxml()
dom.unlink()
self.confirm(str == domstr)
def testAltNewline(self):
str = '<?xml version="1.0" ?>\n<a b="c"/>\n'
dom = parseString(str)
domstr = dom.toprettyxml(newl="\r\n")
dom.unlink()
self.confirm(domstr == str.replace("\n", "\r\n"))
def test_toprettyxml_with_text_nodes(self):
# see issue #4147, text nodes are not indented
decl = '<?xml version="1.0" ?>\n'
self.assertEqual(parseString('<B>A</B>').toprettyxml(),
decl + '<B>A</B>\n')
self.assertEqual(parseString('<C>A<B>A</B></C>').toprettyxml(),
decl + '<C>\n\tA\n\t<B>A</B>\n</C>\n')
self.assertEqual(parseString('<C><B>A</B>A</C>').toprettyxml(),
decl + '<C>\n\t<B>A</B>\n\tA\n</C>\n')
self.assertEqual(parseString('<C><B>A</B><B>A</B></C>').toprettyxml(),
decl + '<C>\n\t<B>A</B>\n\t<B>A</B>\n</C>\n')
self.assertEqual(parseString('<C><B>A</B>A<B>A</B></C>').toprettyxml(),
decl + '<C>\n\t<B>A</B>\n\tA\n\t<B>A</B>\n</C>\n')
def test_toprettyxml_with_adjacent_text_nodes(self):
# see issue #4147, adjacent text nodes are indented normally
dom = Document()
elem = dom.createElement('elem')
elem.appendChild(dom.createTextNode('TEXT'))
elem.appendChild(dom.createTextNode('TEXT'))
dom.appendChild(elem)
decl = '<?xml version="1.0" ?>\n'
self.assertEqual(dom.toprettyxml(),
decl + '<elem>\n\tTEXT\n\tTEXT\n</elem>\n')
def test_toprettyxml_preserves_content_of_text_node(self):
# see issue #4147
for str in ('<B>A</B>', '<A><B>C</B></A>'):
dom = parseString(str)
dom2 = parseString(dom.toprettyxml())
self.assertEqual(
dom.getElementsByTagName('B')[0].childNodes[0].toxml(),
dom2.getElementsByTagName('B')[0].childNodes[0].toxml())
def testProcessingInstruction(self):
dom = parseString('<e><?mypi \t\n data \t\n ?></e>')
pi = dom.documentElement.firstChild
self.confirm(pi.target == "mypi"
and pi.data == "data \t\n "
and pi.nodeName == "mypi"
and pi.nodeType == Node.PROCESSING_INSTRUCTION_NODE
and pi.attributes is None
and not pi.hasChildNodes()
and len(pi.childNodes) == 0
and pi.firstChild is None
and pi.lastChild is None
and pi.localName is None
and pi.namespaceURI == xml.dom.EMPTY_NAMESPACE)
def testProcessingInstructionRepr(self): pass
def testTextRepr(self): pass
def testWriteText(self): pass
def testDocumentElement(self): pass
def testTooManyDocumentElements(self):
doc = parseString("<doc/>")
elem = doc.createElement("extra")
# Should raise an exception when adding an extra document element.
self.assertRaises(xml.dom.HierarchyRequestErr, doc.appendChild, elem)
elem.unlink()
doc.unlink()
def testCreateElementNS(self): pass
def testCreateAttributeNS(self): pass
def testParse(self): pass
def testParseString(self): pass
def testComment(self): pass
def testAttrListItem(self): pass
def testAttrListItems(self): pass
def testAttrListItemNS(self): pass
def testAttrListKeys(self): pass
def testAttrListKeysNS(self): pass
def testRemoveNamedItem(self):
doc = parseString("<doc a=''/>")
e = doc.documentElement
attrs = e.attributes
a1 = e.getAttributeNode("a")
a2 = attrs.removeNamedItem("a")
self.confirm(a1.isSameNode(a2))
self.assertRaises(xml.dom.NotFoundErr, attrs.removeNamedItem, "a")
def testRemoveNamedItemNS(self):
doc = parseString("<doc xmlns:a='http://xml.python.org/' a:b=''/>")
e = doc.documentElement
attrs = e.attributes
a1 = e.getAttributeNodeNS("http://xml.python.org/", "b")
a2 = attrs.removeNamedItemNS("http://xml.python.org/", "b")
self.confirm(a1.isSameNode(a2))
self.assertRaises(xml.dom.NotFoundErr, attrs.removeNamedItemNS,
"http://xml.python.org/", "b")
def testAttrListValues(self): pass
def testAttrListLength(self): pass
def testAttrList__getitem__(self): pass
def testAttrList__setitem__(self): pass
def testSetAttrValueandNodeValue(self): pass
def testParseElement(self): pass
def testParseAttributes(self): pass
def testParseElementNamespaces(self): pass
def testParseAttributeNamespaces(self): pass
def testParseProcessingInstructions(self): pass
def testChildNodes(self): pass
def testFirstChild(self): pass
def testHasChildNodes(self):
dom = parseString("<doc><foo/></doc>")
doc = dom.documentElement
self.assertTrue(doc.hasChildNodes())
dom2 = parseString("<doc/>")
doc2 = dom2.documentElement
self.assertFalse(doc2.hasChildNodes())
def _testCloneElementCopiesAttributes(self, e1, e2, test):
attrs1 = e1.attributes
attrs2 = e2.attributes
keys1 = list(attrs1.keys())
keys2 = list(attrs2.keys())
keys1.sort()
keys2.sort()
self.confirm(keys1 == keys2, "clone of element has same attribute keys")
for i in range(len(keys1)):
a1 = attrs1.item(i)
a2 = attrs2.item(i)
self.confirm(a1 is not a2
and a1.value == a2.value
and a1.nodeValue == a2.nodeValue
and a1.namespaceURI == a2.namespaceURI
and a1.localName == a2.localName
, "clone of attribute node has proper attribute values")
self.confirm(a2.ownerElement is e2,
"clone of attribute node correctly owned")
def _setupCloneElement(self, deep):
dom = parseString("<doc attr='value'><foo/></doc>")
root = dom.documentElement
clone = root.cloneNode(deep)
self._testCloneElementCopiesAttributes(
root, clone, "testCloneElement" + (deep and "Deep" or "Shallow"))
# mutilate the original so shared data is detected
root.tagName = root.nodeName = "MODIFIED"
root.setAttribute("attr", "NEW VALUE")
root.setAttribute("added", "VALUE")
return dom, clone
def testCloneElementShallow(self):
dom, clone = self._setupCloneElement(0)
self.confirm(len(clone.childNodes) == 0
and clone.childNodes.length == 0
and clone.parentNode is None
and clone.toxml() == '<doc attr="value"/>'
, "testCloneElementShallow")
dom.unlink()
def testCloneElementDeep(self):
dom, clone = self._setupCloneElement(1)
self.confirm(len(clone.childNodes) == 1
and clone.childNodes.length == 1
and clone.parentNode is None
and clone.toxml() == '<doc attr="value"><foo/></doc>'
, "testCloneElementDeep")
dom.unlink()
def testCloneDocumentShallow(self):
doc = parseString("<?xml version='1.0'?>\n"
"<!-- comment -->"
"<!DOCTYPE doc [\n"
"<!NOTATION notation SYSTEM 'http://xml.python.org/'>\n"
"]>\n"
"<doc attr='value'/>")
doc2 = doc.cloneNode(0)
self.confirm(doc2 is None,
"testCloneDocumentShallow:"
" shallow cloning of documents makes no sense!")
def testCloneDocumentDeep(self):
doc = parseString("<?xml version='1.0'?>\n"
"<!-- comment -->"
"<!DOCTYPE doc [\n"
"<!NOTATION notation SYSTEM 'http://xml.python.org/'>\n"
"]>\n"
"<doc attr='value'/>")
doc2 = doc.cloneNode(1)
self.confirm(not (doc.isSameNode(doc2) or doc2.isSameNode(doc)),
"testCloneDocumentDeep: document objects not distinct")
self.confirm(len(doc.childNodes) == len(doc2.childNodes),
"testCloneDocumentDeep: wrong number of Document children")
self.confirm(doc2.documentElement.nodeType == Node.ELEMENT_NODE,
"testCloneDocumentDeep: documentElement not an ELEMENT_NODE")
self.confirm(doc2.documentElement.ownerDocument.isSameNode(doc2),
"testCloneDocumentDeep: documentElement owner is not new document")
self.confirm(not doc.documentElement.isSameNode(doc2.documentElement),
"testCloneDocumentDeep: documentElement should not be shared")
if doc.doctype is not None:
# check the doctype iff the original DOM maintained it
self.confirm(doc2.doctype.nodeType == Node.DOCUMENT_TYPE_NODE,
"testCloneDocumentDeep: doctype not a DOCUMENT_TYPE_NODE")
self.confirm(doc2.doctype.ownerDocument.isSameNode(doc2))
self.confirm(not doc.doctype.isSameNode(doc2.doctype))
def testCloneDocumentTypeDeepOk(self):
doctype = create_nonempty_doctype()
clone = doctype.cloneNode(1)
self.confirm(clone is not None
and clone.nodeName == doctype.nodeName
and clone.name == doctype.name
and clone.publicId == doctype.publicId
and clone.systemId == doctype.systemId
and len(clone.entities) == len(doctype.entities)
and clone.entities.item(len(clone.entities)) is None
and len(clone.notations) == len(doctype.notations)
and clone.notations.item(len(clone.notations)) is None
and len(clone.childNodes) == 0)
for i in range(len(doctype.entities)):
se = doctype.entities.item(i)
ce = clone.entities.item(i)
self.confirm((not se.isSameNode(ce))
and (not ce.isSameNode(se))
and ce.nodeName == se.nodeName
and ce.notationName == se.notationName
and ce.publicId == se.publicId
and ce.systemId == se.systemId
and ce.encoding == se.encoding
and ce.actualEncoding == se.actualEncoding
and ce.version == se.version)
for i in range(len(doctype.notations)):
sn = doctype.notations.item(i)
cn = clone.notations.item(i)
self.confirm((not sn.isSameNode(cn))
and (not cn.isSameNode(sn))
and cn.nodeName == sn.nodeName
and cn.publicId == sn.publicId
and cn.systemId == sn.systemId)
def testCloneDocumentTypeDeepNotOk(self):
doc = create_doc_with_doctype()
clone = doc.doctype.cloneNode(1)
self.confirm(clone is None, "testCloneDocumentTypeDeepNotOk")
def testCloneDocumentTypeShallowOk(self):
doctype = create_nonempty_doctype()
clone = doctype.cloneNode(0)
self.confirm(clone is not None
and clone.nodeName == doctype.nodeName
and clone.name == doctype.name
and clone.publicId == doctype.publicId
and clone.systemId == doctype.systemId
and len(clone.entities) == 0
and clone.entities.item(0) is None
and len(clone.notations) == 0
and clone.notations.item(0) is None
and len(clone.childNodes) == 0)
def testCloneDocumentTypeShallowNotOk(self):
doc = create_doc_with_doctype()
clone = doc.doctype.cloneNode(0)
self.confirm(clone is None, "testCloneDocumentTypeShallowNotOk")
def check_import_document(self, deep, testName):
doc1 = parseString("<doc/>")
doc2 = parseString("<doc/>")
self.assertRaises(xml.dom.NotSupportedErr, doc1.importNode, doc2, deep)
def testImportDocumentShallow(self):
self.check_import_document(0, "testImportDocumentShallow")
def testImportDocumentDeep(self):
self.check_import_document(1, "testImportDocumentDeep")
def testImportDocumentTypeShallow(self):
src = create_doc_with_doctype()
target = create_doc_without_doctype()
self.assertRaises(xml.dom.NotSupportedErr, target.importNode,
src.doctype, 0)
def testImportDocumentTypeDeep(self):
src = create_doc_with_doctype()
target = create_doc_without_doctype()
self.assertRaises(xml.dom.NotSupportedErr, target.importNode,
src.doctype, 1)
# Testing attribute clones uses a helper, and should always be deep,
# even if the argument to cloneNode is false.
def check_clone_attribute(self, deep, testName):
doc = parseString("<doc attr='value'/>")
attr = doc.documentElement.getAttributeNode("attr")
self.assertNotEqual(attr, None)
clone = attr.cloneNode(deep)
self.confirm(not clone.isSameNode(attr))
self.confirm(not attr.isSameNode(clone))
self.confirm(clone.ownerElement is None,
testName + ": ownerElement should be None")
self.confirm(clone.ownerDocument.isSameNode(attr.ownerDocument),
testName + ": ownerDocument does not match")
self.confirm(clone.specified,
testName + ": cloned attribute must have specified == True")
def testCloneAttributeShallow(self):
self.check_clone_attribute(0, "testCloneAttributeShallow")
def testCloneAttributeDeep(self):
self.check_clone_attribute(1, "testCloneAttributeDeep")
def check_clone_pi(self, deep, testName):
doc = parseString("<?target data?><doc/>")
pi = doc.firstChild
self.assertEqual(pi.nodeType, Node.PROCESSING_INSTRUCTION_NODE)
clone = pi.cloneNode(deep)
self.confirm(clone.target == pi.target
and clone.data == pi.data)
def testClonePIShallow(self):
self.check_clone_pi(0, "testClonePIShallow")
def testClonePIDeep(self):
self.check_clone_pi(1, "testClonePIDeep")
def check_clone_node_entity(self, clone_document):
# bpo-35052: Test user data handler in cloneNode() on a document with
# an entity
document = xml.dom.minidom.parseString("""
<?xml version="1.0" ?>
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd"
[ <!ENTITY smile "☺"> ]
>
<doc>Don't let entities make you frown ⌣</doc>
""".strip())
class Handler:
def handle(self, operation, key, data, src, dst):
self.operation = operation
self.key = key
self.data = data
self.src = src
self.dst = dst
handler = Handler()
doctype = document.doctype
entity = doctype.entities['smile']
entity.setUserData("key", "data", handler)
if clone_document:
# clone Document
clone = document.cloneNode(deep=True)
self.assertEqual(clone.documentElement.firstChild.wholeText,
"Don't let entities make you frown ☺")
operation = xml.dom.UserDataHandler.NODE_IMPORTED
dst = clone.doctype.entities['smile']
else:
# clone DocumentType
with support.swap_attr(doctype, 'ownerDocument', None):
clone = doctype.cloneNode(deep=True)
operation = xml.dom.UserDataHandler.NODE_CLONED
dst = clone.entities['smile']
self.assertEqual(handler.operation, operation)
self.assertEqual(handler.key, "key")
self.assertEqual(handler.data, "data")
self.assertIs(handler.src, entity)
self.assertIs(handler.dst, dst)
def testCloneNodeEntity(self):
self.check_clone_node_entity(False)
self.check_clone_node_entity(True)
def testNormalize(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode("first"))
root.appendChild(doc.createTextNode("second"))
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2,
"testNormalize -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 1
and root.childNodes.length == 1
and root.firstChild is root.lastChild
and root.firstChild.data == "firstsecond"
, "testNormalize -- result")
doc.unlink()
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode(""))
doc.normalize()
self.confirm(len(root.childNodes) == 0
and root.childNodes.length == 0,
"testNormalize -- single empty node removed")
doc.unlink()
def testNormalizeCombineAndNextSibling(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode("first"))
root.appendChild(doc.createTextNode("second"))
root.appendChild(doc.createElement("i"))
self.confirm(len(root.childNodes) == 3
and root.childNodes.length == 3,
"testNormalizeCombineAndNextSibling -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2
and root.firstChild.data == "firstsecond"
and root.firstChild is not root.lastChild
and root.firstChild.nextSibling is root.lastChild
and root.firstChild.previousSibling is None
and root.lastChild.previousSibling is root.firstChild
and root.lastChild.nextSibling is None
, "testNormalizeCombinedAndNextSibling -- result")
doc.unlink()
def testNormalizeDeleteWithPrevSibling(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode("first"))
root.appendChild(doc.createTextNode(""))
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2,
"testNormalizeDeleteWithPrevSibling -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 1
and root.childNodes.length == 1
and root.firstChild.data == "first"
and root.firstChild is root.lastChild
and root.firstChild.nextSibling is None
and root.firstChild.previousSibling is None
, "testNormalizeDeleteWithPrevSibling -- result")
doc.unlink()
def testNormalizeDeleteWithNextSibling(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode(""))
root.appendChild(doc.createTextNode("second"))
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2,
"testNormalizeDeleteWithNextSibling -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 1
and root.childNodes.length == 1
and root.firstChild.data == "second"
and root.firstChild is root.lastChild
and root.firstChild.nextSibling is None
and root.firstChild.previousSibling is None
, "testNormalizeDeleteWithNextSibling -- result")
doc.unlink()
def testNormalizeDeleteWithTwoNonTextSiblings(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createElement("i"))
root.appendChild(doc.createTextNode(""))
root.appendChild(doc.createElement("i"))
self.confirm(len(root.childNodes) == 3
and root.childNodes.length == 3,
"testNormalizeDeleteWithTwoSiblings -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2
and root.firstChild is not root.lastChild
and root.firstChild.nextSibling is root.lastChild
and root.firstChild.previousSibling is None
and root.lastChild.previousSibling is root.firstChild
and root.lastChild.nextSibling is None
, "testNormalizeDeleteWithTwoSiblings -- result")
doc.unlink()
def testNormalizeDeleteAndCombine(self):
doc = parseString("<doc/>")
root = doc.documentElement
root.appendChild(doc.createTextNode(""))
root.appendChild(doc.createTextNode("second"))
root.appendChild(doc.createTextNode(""))
root.appendChild(doc.createTextNode("fourth"))
root.appendChild(doc.createTextNode(""))
self.confirm(len(root.childNodes) == 5
and root.childNodes.length == 5,
"testNormalizeDeleteAndCombine -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 1
and root.childNodes.length == 1
and root.firstChild is root.lastChild
and root.firstChild.data == "secondfourth"
and root.firstChild.previousSibling is None
and root.firstChild.nextSibling is None
, "testNormalizeDeleteAndCombine -- result")
doc.unlink()
def testNormalizeRecursion(self):
doc = parseString("<doc>"
"<o>"
"<i/>"
"t"
#
#x
"</o>"
"<o>"
"<o>"
"t2"
#x2
"</o>"
"t3"
#x3
"</o>"
#
"</doc>")
root = doc.documentElement
root.childNodes[0].appendChild(doc.createTextNode(""))
root.childNodes[0].appendChild(doc.createTextNode("x"))
root.childNodes[1].childNodes[0].appendChild(doc.createTextNode("x2"))
root.childNodes[1].appendChild(doc.createTextNode("x3"))
root.appendChild(doc.createTextNode(""))
self.confirm(len(root.childNodes) == 3
and root.childNodes.length == 3
and len(root.childNodes[0].childNodes) == 4
and root.childNodes[0].childNodes.length == 4
and len(root.childNodes[1].childNodes) == 3
and root.childNodes[1].childNodes.length == 3
and len(root.childNodes[1].childNodes[0].childNodes) == 2
and root.childNodes[1].childNodes[0].childNodes.length == 2
, "testNormalize2 -- preparation")
doc.normalize()
self.confirm(len(root.childNodes) == 2
and root.childNodes.length == 2
and len(root.childNodes[0].childNodes) == 2
and root.childNodes[0].childNodes.length == 2
and len(root.childNodes[1].childNodes) == 2
and root.childNodes[1].childNodes.length == 2
and len(root.childNodes[1].childNodes[0].childNodes) == 1
and root.childNodes[1].childNodes[0].childNodes.length == 1
, "testNormalize2 -- childNodes lengths")
self.confirm(root.childNodes[0].childNodes[1].data == "tx"
and root.childNodes[1].childNodes[0].childNodes[0].data == "t2x2"
and root.childNodes[1].childNodes[1].data == "t3x3"
, "testNormalize2 -- joined text fields")
self.confirm(root.childNodes[0].childNodes[1].nextSibling is None
and root.childNodes[0].childNodes[1].previousSibling
is root.childNodes[0].childNodes[0]
and root.childNodes[0].childNodes[0].previousSibling is None
and root.childNodes[0].childNodes[0].nextSibling
is root.childNodes[0].childNodes[1]
and root.childNodes[1].childNodes[1].nextSibling is None
and root.childNodes[1].childNodes[1].previousSibling
is root.childNodes[1].childNodes[0]
and root.childNodes[1].childNodes[0].previousSibling is None
and root.childNodes[1].childNodes[0].nextSibling
is root.childNodes[1].childNodes[1]
, "testNormalize2 -- sibling pointers")
doc.unlink()
def testBug0777884(self):
doc = parseString("<o>text</o>")
text = doc.documentElement.childNodes[0]
self.assertEqual(text.nodeType, Node.TEXT_NODE)
# Should run quietly, doing nothing.
text.normalize()
doc.unlink()
def testBug1433694(self):
doc = parseString("<o><i/>t</o>")
node = doc.documentElement
node.childNodes[1].nodeValue = ""
node.normalize()
self.confirm(node.childNodes[-1].nextSibling is None,
"Final child's .nextSibling should be None")
def testSiblings(self):
doc = parseString("<doc><?pi?>text?<elm/></doc>")
root = doc.documentElement
(pi, text, elm) = root.childNodes
self.confirm(pi.nextSibling is text and
pi.previousSibling is None and
text.nextSibling is elm and
text.previousSibling is pi and
elm.nextSibling is None and
elm.previousSibling is text, "testSiblings")
doc.unlink()
def testParents(self):
doc = parseString(
"<doc><elm1><elm2/><elm2><elm3/></elm2></elm1></doc>")
root = doc.documentElement
elm1 = root.childNodes[0]
(elm2a, elm2b) = elm1.childNodes
elm3 = elm2b.childNodes[0]
self.confirm(root.parentNode is doc and
elm1.parentNode is root and
elm2a.parentNode is elm1 and
elm2b.parentNode is elm1 and
elm3.parentNode is elm2b, "testParents")
doc.unlink()
def testNodeListItem(self):
doc = parseString("<doc><e/><e/></doc>")
children = doc.childNodes
docelem = children[0]
self.confirm(children[0] is children.item(0)
and children.item(1) is None
and docelem.childNodes.item(0) is docelem.childNodes[0]
and docelem.childNodes.item(1) is docelem.childNodes[1]
and docelem.childNodes.item(0).childNodes.item(0) is None,
"test NodeList.item()")
doc.unlink()
def testEncodings(self):
doc = parseString('<foo>€</foo>')
self.assertEqual(doc.toxml(),
'<?xml version="1.0" ?><foo>\u20ac</foo>')
self.assertEqual(doc.toxml('utf-8'),
b'<?xml version="1.0" encoding="utf-8"?><foo>\xe2\x82\xac</foo>')
self.assertEqual(doc.toxml('iso-8859-15'),
b'<?xml version="1.0" encoding="iso-8859-15"?><foo>\xa4</foo>')
self.assertEqual(doc.toxml('us-ascii'),
b'<?xml version="1.0" encoding="us-ascii"?><foo>€</foo>')
self.assertEqual(doc.toxml('utf-16'),
'<?xml version="1.0" encoding="utf-16"?>'
'<foo>\u20ac</foo>'.encode('utf-16'))
# Verify that character decoding errors raise exceptions instead
# of crashing
self.assertRaises(UnicodeDecodeError, parseString,
b'<fran\xe7ais>Comment \xe7a va ? Tr\xe8s bien ?</fran\xe7ais>')
doc.unlink()
class UserDataHandler:
called = 0
def handle(self, operation, key, data, src, dst):
dst.setUserData(key, data + 1, self)
src.setUserData(key, None, None)
self.called = 1
def testUserData(self):
dom = Document()
n = dom.createElement('e')
self.confirm(n.getUserData("foo") is None)
n.setUserData("foo", None, None)
self.confirm(n.getUserData("foo") is None)
n.setUserData("foo", 12, 12)
n.setUserData("bar", 13, 13)
self.confirm(n.getUserData("foo") == 12)
self.confirm(n.getUserData("bar") == 13)
n.setUserData("foo", None, None)
self.confirm(n.getUserData("foo") is None)
self.confirm(n.getUserData("bar") == 13)
handler = self.UserDataHandler()
n.setUserData("bar", 12, handler)
c = n.cloneNode(1)
self.confirm(handler.called
and n.getUserData("bar") is None
and c.getUserData("bar") == 13)
n.unlink()
c.unlink()
dom.unlink()
def checkRenameNodeSharedConstraints(self, doc, node):
# Make sure illegal NS usage is detected:
self.assertRaises(xml.dom.NamespaceErr, doc.renameNode, node,
"http://xml.python.org/ns", "xmlns:foo")
doc2 = parseString("<doc/>")
self.assertRaises(xml.dom.WrongDocumentErr, doc2.renameNode, node,
xml.dom.EMPTY_NAMESPACE, "foo")
def testRenameAttribute(self):
doc = parseString("<doc a='v'/>")
elem = doc.documentElement
attrmap = elem.attributes
attr = elem.attributes['a']
# Simple renaming
attr = doc.renameNode(attr, xml.dom.EMPTY_NAMESPACE, "b")
self.confirm(attr.name == "b"
and attr.nodeName == "b"
and attr.localName is None
and attr.namespaceURI == xml.dom.EMPTY_NAMESPACE
and attr.prefix is None
and attr.value == "v"
and elem.getAttributeNode("a") is None
and elem.getAttributeNode("b").isSameNode(attr)
and attrmap["b"].isSameNode(attr)
and attr.ownerDocument.isSameNode(doc)
and attr.ownerElement.isSameNode(elem))
# Rename to have a namespace, no prefix
attr = doc.renameNode(attr, "http://xml.python.org/ns", "c")
self.confirm(attr.name == "c"
and attr.nodeName == "c"
and attr.localName == "c"
and attr.namespaceURI == "http://xml.python.org/ns"
and attr.prefix is None
and attr.value == "v"
and elem.getAttributeNode("a") is None
and elem.getAttributeNode("b") is None
and elem.getAttributeNode("c").isSameNode(attr)
and elem.getAttributeNodeNS(
"http://xml.python.org/ns", "c").isSameNode(attr)
and attrmap["c"].isSameNode(attr)
and attrmap[("http://xml.python.org/ns", "c")].isSameNode(attr))
# Rename to have a namespace, with prefix
attr = doc.renameNode(attr, "http://xml.python.org/ns2", "p:d")
self.confirm(attr.name == "p:d"
and attr.nodeName == "p:d"
and attr.localName == "d"
and attr.namespaceURI == "http://xml.python.org/ns2"
and attr.prefix == "p"
and attr.value == "v"
and elem.getAttributeNode("a") is None
and elem.getAttributeNode("b") is None
and elem.getAttributeNode("c") is None
and elem.getAttributeNodeNS(
"http://xml.python.org/ns", "c") is None
and elem.getAttributeNode("p:d").isSameNode(attr)
and elem.getAttributeNodeNS(
"http://xml.python.org/ns2", "d").isSameNode(attr)
and attrmap["p:d"].isSameNode(attr)
and attrmap[("http://xml.python.org/ns2", "d")].isSameNode(attr))
# Rename back to a simple non-NS node
attr = doc.renameNode(attr, xml.dom.EMPTY_NAMESPACE, "e")
self.confirm(attr.name == "e"
and attr.nodeName == "e"
and attr.localName is None
and attr.namespaceURI == xml.dom.EMPTY_NAMESPACE
and attr.prefix is None
and attr.value == "v"
and elem.getAttributeNode("a") is None
and elem.getAttributeNode("b") is None
and elem.getAttributeNode("c") is None
and elem.getAttributeNode("p:d") is None
and elem.getAttributeNodeNS(
"http://xml.python.org/ns", "c") is None
and elem.getAttributeNode("e").isSameNode(attr)
and attrmap["e"].isSameNode(attr))
self.assertRaises(xml.dom.NamespaceErr, doc.renameNode, attr,
"http://xml.python.org/ns", "xmlns")
self.checkRenameNodeSharedConstraints(doc, attr)
doc.unlink()
def testRenameElement(self):
doc = parseString("<doc/>")
elem = doc.documentElement
# Simple renaming
elem = doc.renameNode(elem, xml.dom.EMPTY_NAMESPACE, "a")
self.confirm(elem.tagName == "a"
and elem.nodeName == "a"
and elem.localName is None
and elem.namespaceURI == xml.dom.EMPTY_NAMESPACE
and elem.prefix is None
and elem.ownerDocument.isSameNode(doc))
# Rename to have a namespace, no prefix
elem = doc.renameNode(elem, "http://xml.python.org/ns", "b")
self.confirm(elem.tagName == "b"
and elem.nodeName == "b"
and elem.localName == "b"
and elem.namespaceURI == "http://xml.python.org/ns"
and elem.prefix is None
and elem.ownerDocument.isSameNode(doc))
# Rename to have a namespace, with prefix
elem = doc.renameNode(elem, "http://xml.python.org/ns2", "p:c")
self.confirm(elem.tagName == "p:c"
and elem.nodeName == "p:c"
and elem.localName == "c"
and elem.namespaceURI == "http://xml.python.org/ns2"
and elem.prefix == "p"
and elem.ownerDocument.isSameNode(doc))
# Rename back to a simple non-NS node
elem = doc.renameNode(elem, xml.dom.EMPTY_NAMESPACE, "d")
self.confirm(elem.tagName == "d"
and elem.nodeName == "d"
and elem.localName is None
and elem.namespaceURI == xml.dom.EMPTY_NAMESPACE
and elem.prefix is None
and elem.ownerDocument.isSameNode(doc))
self.checkRenameNodeSharedConstraints(doc, elem)
doc.unlink()
def testRenameOther(self):
# We have to create a comment node explicitly since not all DOM
# builders used with minidom add comments to the DOM.
doc = xml.dom.minidom.getDOMImplementation().createDocument(
xml.dom.EMPTY_NAMESPACE, "e", None)
node = doc.createComment("comment")
self.assertRaises(xml.dom.NotSupportedErr, doc.renameNode, node,
xml.dom.EMPTY_NAMESPACE, "foo")
doc.unlink()
def testWholeText(self):
doc = parseString("<doc>a</doc>")
elem = doc.documentElement
text = elem.childNodes[0]
self.assertEqual(text.nodeType, Node.TEXT_NODE)
self.checkWholeText(text, "a")
elem.appendChild(doc.createTextNode("b"))
self.checkWholeText(text, "ab")
elem.insertBefore(doc.createCDATASection("c"), text)
self.checkWholeText(text, "cab")
# make sure we don't cross other nodes
splitter = doc.createComment("comment")
elem.appendChild(splitter)
text2 = doc.createTextNode("d")
elem.appendChild(text2)
self.checkWholeText(text, "cab")
self.checkWholeText(text2, "d")
x = doc.createElement("x")
elem.replaceChild(x, splitter)
splitter = x
self.checkWholeText(text, "cab")
self.checkWholeText(text2, "d")
x = doc.createProcessingInstruction("y", "z")
elem.replaceChild(x, splitter)
splitter = x
self.checkWholeText(text, "cab")
self.checkWholeText(text2, "d")
elem.removeChild(splitter)
self.checkWholeText(text, "cabd")
self.checkWholeText(text2, "cabd")
def testPatch1094164(self):
doc = parseString("<doc><e/></doc>")
elem = doc.documentElement
e = elem.firstChild
self.confirm(e.parentNode is elem, "Before replaceChild()")
# Check that replacing a child with itself leaves the tree unchanged
elem.replaceChild(e, e)
self.confirm(e.parentNode is elem, "After replaceChild()")
def testReplaceWholeText(self):
def setup():
doc = parseString("<doc>a<e/>d</doc>")
elem = doc.documentElement
text1 = elem.firstChild
text2 = elem.lastChild
splitter = text1.nextSibling
elem.insertBefore(doc.createTextNode("b"), splitter)
elem.insertBefore(doc.createCDATASection("c"), text1)
return doc, elem, text1, splitter, text2
doc, elem, text1, splitter, text2 = setup()
text = text1.replaceWholeText("new content")
self.checkWholeText(text, "new content")
self.checkWholeText(text2, "d")
self.confirm(len(elem.childNodes) == 3)
doc, elem, text1, splitter, text2 = setup()
text = text2.replaceWholeText("new content")
self.checkWholeText(text, "new content")
self.checkWholeText(text1, "cab")
self.confirm(len(elem.childNodes) == 5)
doc, elem, text1, splitter, text2 = setup()
text = text1.replaceWholeText("")
self.checkWholeText(text2, "d")
self.confirm(text is None
and len(elem.childNodes) == 2)
def testSchemaType(self):
doc = parseString(
"<!DOCTYPE doc [\n"
" <!ENTITY e1 SYSTEM 'http://xml.python.org/e1'>\n"
" <!ENTITY e2 SYSTEM 'http://xml.python.org/e2'>\n"
" <!ATTLIST doc id ID #IMPLIED \n"
" ref IDREF #IMPLIED \n"
" refs IDREFS #IMPLIED \n"
" enum (a|b) #IMPLIED \n"
" ent ENTITY #IMPLIED \n"
" ents ENTITIES #IMPLIED \n"
" nm NMTOKEN #IMPLIED \n"
" nms NMTOKENS #IMPLIED \n"
" text CDATA #IMPLIED \n"
" >\n"
"]><doc id='name' notid='name' text='splat!' enum='b'"
" ref='name' refs='name name' ent='e1' ents='e1 e2'"
" nm='123' nms='123 abc' />")
elem = doc.documentElement
# We don't want to rely on any specific loader at this point, so
# just make sure we can get to all the names, and that the
# DTD-based namespace is right. The names can vary by loader
# since each supports a different level of DTD information.
t = elem.schemaType
self.confirm(t.name is None
and t.namespace == xml.dom.EMPTY_NAMESPACE)
names = "id notid text enum ref refs ent ents nm nms".split()
for name in names:
a = elem.getAttributeNode(name)
t = a.schemaType
self.confirm(hasattr(t, "name")
and t.namespace == xml.dom.EMPTY_NAMESPACE)
def testSetIdAttribute(self):
doc = parseString("<doc a1='v' a2='w'/>")
e = doc.documentElement
a1 = e.getAttributeNode("a1")
a2 = e.getAttributeNode("a2")
self.confirm(doc.getElementById("v") is None
and not a1.isId
and not a2.isId)
e.setIdAttribute("a1")
self.confirm(e.isSameNode(doc.getElementById("v"))
and a1.isId
and not a2.isId)
e.setIdAttribute("a2")
self.confirm(e.isSameNode(doc.getElementById("v"))
and e.isSameNode(doc.getElementById("w"))
and a1.isId
and a2.isId)
# replace the a1 node; the new node should *not* be an ID
a3 = doc.createAttribute("a1")
a3.value = "v"
e.setAttributeNode(a3)
self.confirm(doc.getElementById("v") is None
and e.isSameNode(doc.getElementById("w"))
and not a1.isId
and a2.isId
and not a3.isId)
# renaming an attribute should not affect its ID-ness:
doc.renameNode(a2, xml.dom.EMPTY_NAMESPACE, "an")
self.confirm(e.isSameNode(doc.getElementById("w"))
and a2.isId)
def testSetIdAttributeNS(self):
NS1 = "http://xml.python.org/ns1"
NS2 = "http://xml.python.org/ns2"
doc = parseString("<doc"
" xmlns:ns1='" + NS1 + "'"
" xmlns:ns2='" + NS2 + "'"
" ns1:a1='v' ns2:a2='w'/>")
e = doc.documentElement
a1 = e.getAttributeNodeNS(NS1, "a1")
a2 = e.getAttributeNodeNS(NS2, "a2")
self.confirm(doc.getElementById("v") is None
and not a1.isId
and not a2.isId)
e.setIdAttributeNS(NS1, "a1")
self.confirm(e.isSameNode(doc.getElementById("v"))
and a1.isId
and not a2.isId)
e.setIdAttributeNS(NS2, "a2")
self.confirm(e.isSameNode(doc.getElementById("v"))
and e.isSameNode(doc.getElementById("w"))
and a1.isId
and a2.isId)
# replace the a1 node; the new node should *not* be an ID
a3 = doc.createAttributeNS(NS1, "a1")
a3.value = "v"
e.setAttributeNode(a3)
self.confirm(e.isSameNode(doc.getElementById("w")))
self.confirm(not a1.isId)
self.confirm(a2.isId)
self.confirm(not a3.isId)
self.confirm(doc.getElementById("v") is None)
# renaming an attribute should not affect its ID-ness:
doc.renameNode(a2, xml.dom.EMPTY_NAMESPACE, "an")
self.confirm(e.isSameNode(doc.getElementById("w"))
and a2.isId)
def testSetIdAttributeNode(self):
NS1 = "http://xml.python.org/ns1"
NS2 = "http://xml.python.org/ns2"
doc = parseString("<doc"
" xmlns:ns1='" + NS1 + "'"
" xmlns:ns2='" + NS2 + "'"
" ns1:a1='v' ns2:a2='w'/>")
e = doc.documentElement
a1 = e.getAttributeNodeNS(NS1, "a1")
a2 = e.getAttributeNodeNS(NS2, "a2")
self.confirm(doc.getElementById("v") is None
and not a1.isId
and not a2.isId)
e.setIdAttributeNode(a1)
self.confirm(e.isSameNode(doc.getElementById("v"))
and a1.isId
and not a2.isId)
e.setIdAttributeNode(a2)
self.confirm(e.isSameNode(doc.getElementById("v"))
and e.isSameNode(doc.getElementById("w"))
and a1.isId
and a2.isId)
# replace the a1 node; the new node should *not* be an ID
a3 = doc.createAttributeNS(NS1, "a1")
a3.value = "v"
e.setAttributeNode(a3)
self.confirm(e.isSameNode(doc.getElementById("w")))
self.confirm(not a1.isId)
self.confirm(a2.isId)
self.confirm(not a3.isId)
self.confirm(doc.getElementById("v") is None)
# renaming an attribute should not affect its ID-ness:
doc.renameNode(a2, xml.dom.EMPTY_NAMESPACE, "an")
self.confirm(e.isSameNode(doc.getElementById("w"))
and a2.isId)
def assert_recursive_equal(self, doc, doc2):
stack = [(doc, doc2)]
while stack:
n1, n2 = stack.pop()
self.assertEqual(n1.nodeType, n2.nodeType)
self.assertEqual(len(n1.childNodes), len(n2.childNodes))
self.assertEqual(n1.nodeName, n2.nodeName)
self.assertFalse(n1.isSameNode(n2))
self.assertFalse(n2.isSameNode(n1))
if n1.nodeType == Node.DOCUMENT_TYPE_NODE:
len(n1.entities)
len(n2.entities)
len(n1.notations)
len(n2.notations)
self.assertEqual(len(n1.entities), len(n2.entities))
self.assertEqual(len(n1.notations), len(n2.notations))
for i in range(len(n1.notations)):
# XXX this loop body doesn't seem to be executed?
no1 = n1.notations.item(i)
no2 = n1.notations.item(i)
self.assertEqual(no1.name, no2.name)
self.assertEqual(no1.publicId, no2.publicId)
self.assertEqual(no1.systemId, no2.systemId)
stack.append((no1, no2))
for i in range(len(n1.entities)):
e1 = n1.entities.item(i)
e2 = n2.entities.item(i)
self.assertEqual(e1.notationName, e2.notationName)
self.assertEqual(e1.publicId, e2.publicId)
self.assertEqual(e1.systemId, e2.systemId)
stack.append((e1, e2))
if n1.nodeType != Node.DOCUMENT_NODE:
self.assertTrue(n1.ownerDocument.isSameNode(doc))
self.assertTrue(n2.ownerDocument.isSameNode(doc2))
for i in range(len(n1.childNodes)):
stack.append((n1.childNodes[i], n2.childNodes[i]))
def testPickledDocument(self):
doc = parseString(sample)
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(doc, proto)
doc2 = pickle.loads(s)
self.assert_recursive_equal(doc, doc2)
def testDeepcopiedDocument(self):
doc = parseString(sample)
doc2 = copy.deepcopy(doc)
self.assert_recursive_equal(doc, doc2)
def testSerializeCommentNodeWithDoubleHyphen(self):
doc = create_doc_without_doctype()
doc.appendChild(doc.createComment("foo--bar"))
self.assertRaises(ValueError, doc.toxml)
def testEmptyXMLNSValue(self):
doc = parseString("<element xmlns=''>\n"
"<foo/>\n</element>")
doc2 = parseString(doc.toxml())
self.confirm(doc2.namespaceURI == xml.dom.EMPTY_NAMESPACE)
def testExceptionOnSpacesInXMLNSValue(self):
with self.assertRaisesRegex(ValueError, 'Unsupported syntax'):
parseString('<element xmlns:abc="http:abc.com/de f g/hi/j k"><abc:foo /></element>')
def testDocRemoveChild(self):
doc = parse(tstfile)
title_tag = doc.documentElement.getElementsByTagName("TITLE")[0]
self.assertRaises( xml.dom.NotFoundErr, doc.removeChild, title_tag)
num_children_before = len(doc.childNodes)
doc.removeChild(doc.childNodes[0])
num_children_after = len(doc.childNodes)
self.assertTrue(num_children_after == num_children_before - 1)
def testProcessingInstructionNameError(self):
# wrong variable in .nodeValue property will
# lead to "NameError: name 'data' is not defined"
doc = parse(tstfile)
pi = doc.createProcessingInstruction("y", "z")
pi.nodeValue = "crash"
if __name__ == "__main__":
unittest.main()
|
FFMG/myoddweb.piger
|
monitor/api/python/Python-3.7.2/Lib/test/test_minidom.py
|
Python
|
gpl-2.0
| 66,881
|
"""Tests for base_events.py"""
import errno
import logging
import math
import os
import socket
import sys
import threading
import time
import unittest
from unittest import mock
import asyncio
from asyncio import base_events
from asyncio import constants
from asyncio import events
from test.test_asyncio import utils as test_utils
from test import support
from test.support.script_helper import assert_python_ok
MOCK_ANY = mock.ANY
PY34 = sys.version_info >= (3, 4)
def mock_socket_module():
m_socket = mock.MagicMock(spec=socket)
for name in (
'AF_INET', 'AF_INET6', 'AF_UNSPEC', 'IPPROTO_TCP', 'IPPROTO_UDP',
'SOCK_STREAM', 'SOCK_DGRAM', 'SOL_SOCKET', 'SO_REUSEADDR', 'inet_pton'
):
if hasattr(socket, name):
setattr(m_socket, name, getattr(socket, name))
else:
delattr(m_socket, name)
m_socket.socket = mock.MagicMock()
m_socket.socket.return_value = test_utils.mock_nonblocking_socket()
m_socket.getaddrinfo._is_coroutine = False
return m_socket
def patch_socket(f):
return mock.patch('asyncio.base_events.socket',
new_callable=mock_socket_module)(f)
class BaseEventTests(test_utils.TestCase):
def test_ipaddr_info(self):
UNSPEC = socket.AF_UNSPEC
INET = socket.AF_INET
INET6 = socket.AF_INET6
STREAM = socket.SOCK_STREAM
DGRAM = socket.SOCK_DGRAM
TCP = socket.IPPROTO_TCP
UDP = socket.IPPROTO_UDP
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', 1, INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info(b'1.2.3.4', 1, INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', 1, UNSPEC, STREAM, TCP))
self.assertEqual(
(INET, DGRAM, UDP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', 1, UNSPEC, DGRAM, UDP))
# Socket type STREAM implies TCP protocol.
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', 1, UNSPEC, STREAM, 0))
# Socket type DGRAM implies UDP protocol.
self.assertEqual(
(INET, DGRAM, UDP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', 1, UNSPEC, DGRAM, 0))
# No socket type.
self.assertIsNone(
base_events._ipaddr_info('1.2.3.4', 1, UNSPEC, 0, 0))
# IPv4 address with family IPv6.
self.assertIsNone(
base_events._ipaddr_info('1.2.3.4', 1, INET6, STREAM, TCP))
self.assertEqual(
(INET6, STREAM, TCP, '', ('::3', 1, 0, 0)),
base_events._ipaddr_info('::3', 1, INET6, STREAM, TCP))
self.assertEqual(
(INET6, STREAM, TCP, '', ('::3', 1, 0, 0)),
base_events._ipaddr_info('::3', 1, UNSPEC, STREAM, TCP))
# IPv6 address with family IPv4.
self.assertIsNone(
base_events._ipaddr_info('::3', 1, INET, STREAM, TCP))
# IPv6 address with zone index.
self.assertIsNone(
base_events._ipaddr_info('::3%lo0', 1, INET6, STREAM, TCP))
def test_port_parameter_types(self):
# Test obscure kinds of arguments for "port".
INET = socket.AF_INET
STREAM = socket.SOCK_STREAM
TCP = socket.IPPROTO_TCP
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 0)),
base_events._ipaddr_info('1.2.3.4', None, INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 0)),
base_events._ipaddr_info('1.2.3.4', b'', INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 0)),
base_events._ipaddr_info('1.2.3.4', '', INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', '1', INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', b'1', INET, STREAM, TCP))
@patch_socket
def test_ipaddr_info_no_inet_pton(self, m_socket):
del m_socket.inet_pton
self.assertIsNone(base_events._ipaddr_info('1.2.3.4', 1,
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP))
class BaseEventLoopTests(test_utils.TestCase):
def setUp(self):
super().setUp()
self.loop = base_events.BaseEventLoop()
self.loop._selector = mock.Mock()
self.loop._selector.select.return_value = ()
self.set_event_loop(self.loop)
def test_not_implemented(self):
m = mock.Mock()
self.assertRaises(
NotImplementedError,
self.loop._make_socket_transport, m, m)
self.assertRaises(
NotImplementedError,
self.loop._make_ssl_transport, m, m, m, m)
self.assertRaises(
NotImplementedError,
self.loop._make_datagram_transport, m, m)
self.assertRaises(
NotImplementedError, self.loop._process_events, [])
self.assertRaises(
NotImplementedError, self.loop._write_to_self)
self.assertRaises(
NotImplementedError,
self.loop._make_read_pipe_transport, m, m)
self.assertRaises(
NotImplementedError,
self.loop._make_write_pipe_transport, m, m)
gen = self.loop._make_subprocess_transport(m, m, m, m, m, m, m)
with self.assertRaises(NotImplementedError):
gen.send(None)
def test_close(self):
self.assertFalse(self.loop.is_closed())
self.loop.close()
self.assertTrue(self.loop.is_closed())
# it should be possible to call close() more than once
self.loop.close()
self.loop.close()
# operation blocked when the loop is closed
f = asyncio.Future(loop=self.loop)
self.assertRaises(RuntimeError, self.loop.run_forever)
self.assertRaises(RuntimeError, self.loop.run_until_complete, f)
def test__add_callback_handle(self):
h = asyncio.Handle(lambda: False, (), self.loop, None)
self.loop._add_callback(h)
self.assertFalse(self.loop._scheduled)
self.assertIn(h, self.loop._ready)
def test__add_callback_cancelled_handle(self):
h = asyncio.Handle(lambda: False, (), self.loop, None)
h.cancel()
self.loop._add_callback(h)
self.assertFalse(self.loop._scheduled)
self.assertFalse(self.loop._ready)
def test_set_default_executor(self):
executor = mock.Mock()
self.loop.set_default_executor(executor)
self.assertIs(executor, self.loop._default_executor)
def test_call_soon(self):
def cb():
pass
h = self.loop.call_soon(cb)
self.assertEqual(h._callback, cb)
self.assertIsInstance(h, asyncio.Handle)
self.assertIn(h, self.loop._ready)
def test_call_soon_non_callable(self):
self.loop.set_debug(True)
with self.assertRaisesRegex(TypeError, 'a callable object'):
self.loop.call_soon(1)
def test_call_later(self):
def cb():
pass
h = self.loop.call_later(10.0, cb)
self.assertIsInstance(h, asyncio.TimerHandle)
self.assertIn(h, self.loop._scheduled)
self.assertNotIn(h, self.loop._ready)
def test_call_later_negative_delays(self):
calls = []
def cb(arg):
calls.append(arg)
self.loop._process_events = mock.Mock()
self.loop.call_later(-1, cb, 'a')
self.loop.call_later(-2, cb, 'b')
test_utils.run_briefly(self.loop)
self.assertEqual(calls, ['b', 'a'])
def test_time_and_call_at(self):
def cb():
self.loop.stop()
self.loop._process_events = mock.Mock()
delay = 0.1
when = self.loop.time() + delay
self.loop.call_at(when, cb)
t0 = self.loop.time()
self.loop.run_forever()
dt = self.loop.time() - t0
# 50 ms: maximum granularity of the event loop
self.assertGreaterEqual(dt, delay - 0.050, dt)
# tolerate a difference of +800 ms because some Python buildbots
# are really slow
self.assertLessEqual(dt, 0.9, dt)
def check_thread(self, loop, debug):
def cb():
pass
loop.set_debug(debug)
if debug:
msg = ("Non-thread-safe operation invoked on an event loop other "
"than the current one")
with self.assertRaisesRegex(RuntimeError, msg):
loop.call_soon(cb)
with self.assertRaisesRegex(RuntimeError, msg):
loop.call_later(60, cb)
with self.assertRaisesRegex(RuntimeError, msg):
loop.call_at(loop.time() + 60, cb)
else:
loop.call_soon(cb)
loop.call_later(60, cb)
loop.call_at(loop.time() + 60, cb)
def test_check_thread(self):
def check_in_thread(loop, event, debug, create_loop, fut):
# wait until the event loop is running
event.wait()
try:
if create_loop:
loop2 = base_events.BaseEventLoop()
try:
asyncio.set_event_loop(loop2)
self.check_thread(loop, debug)
finally:
asyncio.set_event_loop(None)
loop2.close()
else:
self.check_thread(loop, debug)
except Exception as exc:
loop.call_soon_threadsafe(fut.set_exception, exc)
else:
loop.call_soon_threadsafe(fut.set_result, None)
def test_thread(loop, debug, create_loop=False):
event = threading.Event()
fut = asyncio.Future(loop=loop)
loop.call_soon(event.set)
args = (loop, event, debug, create_loop, fut)
thread = threading.Thread(target=check_in_thread, args=args)
thread.start()
loop.run_until_complete(fut)
thread.join()
self.loop._process_events = mock.Mock()
self.loop._write_to_self = mock.Mock()
# raise RuntimeError if the thread has no event loop
test_thread(self.loop, True)
# check disabled if debug mode is disabled
test_thread(self.loop, False)
# raise RuntimeError if the event loop of the thread is not the called
# event loop
test_thread(self.loop, True, create_loop=True)
# check disabled if debug mode is disabled
test_thread(self.loop, False, create_loop=True)
def test__run_once(self):
h1 = asyncio.TimerHandle(time.monotonic() + 5.0, lambda: True, (),
self.loop, None)
h2 = asyncio.TimerHandle(time.monotonic() + 10.0, lambda: True, (),
self.loop, None)
h1.cancel()
self.loop._process_events = mock.Mock()
self.loop._scheduled.append(h1)
self.loop._scheduled.append(h2)
self.loop._run_once()
t = self.loop._selector.select.call_args[0][0]
self.assertTrue(9.5 < t < 10.5, t)
self.assertEqual([h2], self.loop._scheduled)
self.assertTrue(self.loop._process_events.called)
def test_set_debug(self):
self.loop.set_debug(True)
self.assertTrue(self.loop.get_debug())
self.loop.set_debug(False)
self.assertFalse(self.loop.get_debug())
@mock.patch('asyncio.base_events.logger')
def test__run_once_logging(self, m_logger):
def slow_select(timeout):
# Sleep a bit longer than a second to avoid timer resolution
# issues.
time.sleep(1.1)
return []
# logging needs debug flag
self.loop.set_debug(True)
# Log to INFO level if timeout > 1.0 sec.
self.loop._selector.select = slow_select
self.loop._process_events = mock.Mock()
self.loop._run_once()
self.assertEqual(logging.INFO, m_logger.log.call_args[0][0])
def fast_select(timeout):
time.sleep(0.001)
return []
self.loop._selector.select = fast_select
self.loop._run_once()
self.assertEqual(logging.DEBUG, m_logger.log.call_args[0][0])
def test__run_once_schedule_handle(self):
handle = None
processed = False
def cb(loop):
nonlocal processed, handle
processed = True
handle = loop.call_soon(lambda: True)
h = asyncio.TimerHandle(time.monotonic() - 1, cb, (self.loop,),
self.loop, None)
self.loop._process_events = mock.Mock()
self.loop._scheduled.append(h)
self.loop._run_once()
self.assertTrue(processed)
self.assertEqual([handle], list(self.loop._ready))
def test__run_once_cancelled_event_cleanup(self):
self.loop._process_events = mock.Mock()
self.assertTrue(
0 < base_events._MIN_CANCELLED_TIMER_HANDLES_FRACTION < 1.0)
def cb():
pass
# Set up one "blocking" event that will not be cancelled to
# ensure later cancelled events do not make it to the head
# of the queue and get cleaned.
not_cancelled_count = 1
self.loop.call_later(3000, cb)
# Add less than threshold (base_events._MIN_SCHEDULED_TIMER_HANDLES)
# cancelled handles, ensure they aren't removed
cancelled_count = 2
for x in range(2):
h = self.loop.call_later(3600, cb)
h.cancel()
# Add some cancelled events that will be at head and removed
cancelled_count += 2
for x in range(2):
h = self.loop.call_later(100, cb)
h.cancel()
# This test is invalid if _MIN_SCHEDULED_TIMER_HANDLES is too low
self.assertLessEqual(cancelled_count + not_cancelled_count,
base_events._MIN_SCHEDULED_TIMER_HANDLES)
self.assertEqual(self.loop._timer_cancelled_count, cancelled_count)
self.loop._run_once()
cancelled_count -= 2
self.assertEqual(self.loop._timer_cancelled_count, cancelled_count)
self.assertEqual(len(self.loop._scheduled),
cancelled_count + not_cancelled_count)
# Need enough events to pass _MIN_CANCELLED_TIMER_HANDLES_FRACTION
# so that deletion of cancelled events will occur on next _run_once
add_cancel_count = int(math.ceil(
base_events._MIN_SCHEDULED_TIMER_HANDLES *
base_events._MIN_CANCELLED_TIMER_HANDLES_FRACTION)) + 1
add_not_cancel_count = max(base_events._MIN_SCHEDULED_TIMER_HANDLES -
add_cancel_count, 0)
# Add some events that will not be cancelled
not_cancelled_count += add_not_cancel_count
for x in range(add_not_cancel_count):
self.loop.call_later(3600, cb)
# Add enough cancelled events
cancelled_count += add_cancel_count
for x in range(add_cancel_count):
h = self.loop.call_later(3600, cb)
h.cancel()
# Ensure all handles are still scheduled
self.assertEqual(len(self.loop._scheduled),
cancelled_count + not_cancelled_count)
self.loop._run_once()
# Ensure cancelled events were removed
self.assertEqual(len(self.loop._scheduled), not_cancelled_count)
# Ensure only uncancelled events remain scheduled
self.assertTrue(all([not x._cancelled for x in self.loop._scheduled]))
def test_run_until_complete_type_error(self):
self.assertRaises(TypeError,
self.loop.run_until_complete, 'blah')
def test_run_until_complete_loop(self):
task = asyncio.Future(loop=self.loop)
other_loop = self.new_test_loop()
self.addCleanup(other_loop.close)
self.assertRaises(ValueError,
other_loop.run_until_complete, task)
def test_run_until_complete_loop_orphan_future_close_loop(self):
class ShowStopper(BaseException):
pass
async def foo(delay):
await asyncio.sleep(delay, loop=self.loop)
def throw():
raise ShowStopper
self.loop._process_events = mock.Mock()
self.loop.call_soon(throw)
try:
self.loop.run_until_complete(foo(0.1))
except ShowStopper:
pass
# This call fails if run_until_complete does not clean up
# done-callback for the previous future.
self.loop.run_until_complete(foo(0.2))
def test_subprocess_exec_invalid_args(self):
args = [sys.executable, '-c', 'pass']
# missing program parameter (empty args)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol)
# expected multiple arguments, not a list
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol, args)
# program arguments must be strings, not int
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol, sys.executable, 123)
# universal_newlines, shell, bufsize must not be set
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol, *args, universal_newlines=True)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol, *args, shell=True)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol, *args, bufsize=4096)
def test_subprocess_shell_invalid_args(self):
# expected a string, not an int or a list
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_shell,
asyncio.SubprocessProtocol, 123)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_shell,
asyncio.SubprocessProtocol, [sys.executable, '-c', 'pass'])
# universal_newlines, shell, bufsize must not be set
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_shell,
asyncio.SubprocessProtocol, 'exit 0', universal_newlines=True)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_shell,
asyncio.SubprocessProtocol, 'exit 0', shell=True)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_shell,
asyncio.SubprocessProtocol, 'exit 0', bufsize=4096)
def test_default_exc_handler_callback(self):
self.loop._process_events = mock.Mock()
def zero_error(fut):
fut.set_result(True)
1/0
# Test call_soon (events.Handle)
with mock.patch('asyncio.base_events.logger') as log:
fut = asyncio.Future(loop=self.loop)
self.loop.call_soon(zero_error, fut)
fut.add_done_callback(lambda fut: self.loop.stop())
self.loop.run_forever()
log.error.assert_called_with(
test_utils.MockPattern('Exception in callback.*zero'),
exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY))
# Test call_later (events.TimerHandle)
with mock.patch('asyncio.base_events.logger') as log:
fut = asyncio.Future(loop=self.loop)
self.loop.call_later(0.01, zero_error, fut)
fut.add_done_callback(lambda fut: self.loop.stop())
self.loop.run_forever()
log.error.assert_called_with(
test_utils.MockPattern('Exception in callback.*zero'),
exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY))
def test_default_exc_handler_coro(self):
self.loop._process_events = mock.Mock()
@asyncio.coroutine
def zero_error_coro():
yield from asyncio.sleep(0.01, loop=self.loop)
1/0
# Test Future.__del__
with mock.patch('asyncio.base_events.logger') as log:
fut = asyncio.ensure_future(zero_error_coro(), loop=self.loop)
fut.add_done_callback(lambda *args: self.loop.stop())
self.loop.run_forever()
fut = None # Trigger Future.__del__ or futures._TracebackLogger
support.gc_collect()
if PY34:
# Future.__del__ in Python 3.4 logs error with
# an actual exception context
log.error.assert_called_with(
test_utils.MockPattern('.*exception was never retrieved'),
exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY))
else:
# futures._TracebackLogger logs only textual traceback
log.error.assert_called_with(
test_utils.MockPattern(
'.*exception was never retrieved.*ZeroDiv'),
exc_info=False)
def test_set_exc_handler_invalid(self):
with self.assertRaisesRegex(TypeError, 'A callable object or None'):
self.loop.set_exception_handler('spam')
def test_set_exc_handler_custom(self):
def zero_error():
1/0
def run_loop():
handle = self.loop.call_soon(zero_error)
self.loop._run_once()
return handle
self.loop.set_debug(True)
self.loop._process_events = mock.Mock()
self.assertIsNone(self.loop.get_exception_handler())
mock_handler = mock.Mock()
self.loop.set_exception_handler(mock_handler)
self.assertIs(self.loop.get_exception_handler(), mock_handler)
handle = run_loop()
mock_handler.assert_called_with(self.loop, {
'exception': MOCK_ANY,
'message': test_utils.MockPattern(
'Exception in callback.*zero_error'),
'handle': handle,
'source_traceback': handle._source_traceback,
})
mock_handler.reset_mock()
self.loop.set_exception_handler(None)
with mock.patch('asyncio.base_events.logger') as log:
run_loop()
log.error.assert_called_with(
test_utils.MockPattern(
'Exception in callback.*zero'),
exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY))
assert not mock_handler.called
def test_set_exc_handler_broken(self):
def run_loop():
def zero_error():
1/0
self.loop.call_soon(zero_error)
self.loop._run_once()
def handler(loop, context):
raise AttributeError('spam')
self.loop._process_events = mock.Mock()
self.loop.set_exception_handler(handler)
with mock.patch('asyncio.base_events.logger') as log:
run_loop()
log.error.assert_called_with(
test_utils.MockPattern(
'Unhandled error in exception handler'),
exc_info=(AttributeError, MOCK_ANY, MOCK_ANY))
def test_default_exc_handler_broken(self):
_context = None
class Loop(base_events.BaseEventLoop):
_selector = mock.Mock()
_process_events = mock.Mock()
def default_exception_handler(self, context):
nonlocal _context
_context = context
# Simulates custom buggy "default_exception_handler"
raise ValueError('spam')
loop = Loop()
self.addCleanup(loop.close)
asyncio.set_event_loop(loop)
def run_loop():
def zero_error():
1/0
loop.call_soon(zero_error)
loop._run_once()
with mock.patch('asyncio.base_events.logger') as log:
run_loop()
log.error.assert_called_with(
'Exception in default exception handler',
exc_info=True)
def custom_handler(loop, context):
raise ValueError('ham')
_context = None
loop.set_exception_handler(custom_handler)
with mock.patch('asyncio.base_events.logger') as log:
run_loop()
log.error.assert_called_with(
test_utils.MockPattern('Exception in default exception.*'
'while handling.*in custom'),
exc_info=True)
# Check that original context was passed to default
# exception handler.
self.assertIn('context', _context)
self.assertIs(type(_context['context']['exception']),
ZeroDivisionError)
def test_set_task_factory_invalid(self):
with self.assertRaisesRegex(
TypeError, 'task factory must be a callable or None'):
self.loop.set_task_factory(1)
self.assertIsNone(self.loop.get_task_factory())
def test_set_task_factory(self):
self.loop._process_events = mock.Mock()
class MyTask(asyncio.Task):
pass
@asyncio.coroutine
def coro():
pass
factory = lambda loop, coro: MyTask(coro, loop=loop)
self.assertIsNone(self.loop.get_task_factory())
self.loop.set_task_factory(factory)
self.assertIs(self.loop.get_task_factory(), factory)
task = self.loop.create_task(coro())
self.assertTrue(isinstance(task, MyTask))
self.loop.run_until_complete(task)
self.loop.set_task_factory(None)
self.assertIsNone(self.loop.get_task_factory())
task = self.loop.create_task(coro())
self.assertTrue(isinstance(task, asyncio.Task))
self.assertFalse(isinstance(task, MyTask))
self.loop.run_until_complete(task)
def test_env_var_debug(self):
code = '\n'.join((
'import asyncio',
'loop = asyncio.get_event_loop()',
'print(loop.get_debug())'))
# Test with -E to not fail if the unit test was run with
# PYTHONASYNCIODEBUG set to a non-empty string
sts, stdout, stderr = assert_python_ok('-E', '-c', code)
self.assertEqual(stdout.rstrip(), b'False')
sts, stdout, stderr = assert_python_ok('-c', code,
PYTHONASYNCIODEBUG='',
PYTHONDEVMODE='')
self.assertEqual(stdout.rstrip(), b'False')
sts, stdout, stderr = assert_python_ok('-c', code,
PYTHONASYNCIODEBUG='1',
PYTHONDEVMODE='')
self.assertEqual(stdout.rstrip(), b'True')
sts, stdout, stderr = assert_python_ok('-E', '-c', code,
PYTHONASYNCIODEBUG='1')
self.assertEqual(stdout.rstrip(), b'False')
# -X dev
sts, stdout, stderr = assert_python_ok('-E', '-X', 'dev',
'-c', code)
self.assertEqual(stdout.rstrip(), b'True')
def test_create_task(self):
class MyTask(asyncio.Task):
pass
@asyncio.coroutine
def test():
pass
class EventLoop(base_events.BaseEventLoop):
def create_task(self, coro):
return MyTask(coro, loop=loop)
loop = EventLoop()
self.set_event_loop(loop)
coro = test()
task = asyncio.ensure_future(coro, loop=loop)
self.assertIsInstance(task, MyTask)
# make warnings quiet
task._log_destroy_pending = False
coro.close()
def test_run_forever_keyboard_interrupt(self):
# Python issue #22601: ensure that the temporary task created by
# run_forever() consumes the KeyboardInterrupt and so don't log
# a warning
@asyncio.coroutine
def raise_keyboard_interrupt():
raise KeyboardInterrupt
self.loop._process_events = mock.Mock()
self.loop.call_exception_handler = mock.Mock()
try:
self.loop.run_until_complete(raise_keyboard_interrupt())
except KeyboardInterrupt:
pass
self.loop.close()
support.gc_collect()
self.assertFalse(self.loop.call_exception_handler.called)
def test_run_until_complete_baseexception(self):
# Python issue #22429: run_until_complete() must not schedule a pending
# call to stop() if the future raised a BaseException
@asyncio.coroutine
def raise_keyboard_interrupt():
raise KeyboardInterrupt
self.loop._process_events = mock.Mock()
try:
self.loop.run_until_complete(raise_keyboard_interrupt())
except KeyboardInterrupt:
pass
def func():
self.loop.stop()
func.called = True
func.called = False
try:
self.loop.call_soon(func)
self.loop.run_forever()
except KeyboardInterrupt:
pass
self.assertTrue(func.called)
def test_single_selecter_event_callback_after_stopping(self):
# Python issue #25593: A stopped event loop may cause event callbacks
# to run more than once.
event_sentinel = object()
callcount = 0
doer = None
def proc_events(event_list):
nonlocal doer
if event_sentinel in event_list:
doer = self.loop.call_soon(do_event)
def do_event():
nonlocal callcount
callcount += 1
self.loop.call_soon(clear_selector)
def clear_selector():
doer.cancel()
self.loop._selector.select.return_value = ()
self.loop._process_events = proc_events
self.loop._selector.select.return_value = (event_sentinel,)
for i in range(1, 3):
with self.subTest('Loop %d/2' % i):
self.loop.call_soon(self.loop.stop)
self.loop.run_forever()
self.assertEqual(callcount, 1)
def test_run_once(self):
# Simple test for test_utils.run_once(). It may seem strange
# to have a test for this (the function isn't even used!) but
# it's a de-factor standard API for library tests. This tests
# the idiom: loop.call_soon(loop.stop); loop.run_forever().
count = 0
def callback():
nonlocal count
count += 1
self.loop._process_events = mock.Mock()
self.loop.call_soon(callback)
test_utils.run_once(self.loop)
self.assertEqual(count, 1)
def test_run_forever_pre_stopped(self):
# Test that the old idiom for pre-stopping the loop works.
self.loop._process_events = mock.Mock()
self.loop.stop()
self.loop.run_forever()
self.loop._selector.select.assert_called_once_with(0)
async def leave_unfinalized_asyncgen(self):
# Create an async generator, iterate it partially, and leave it
# to be garbage collected.
# Used in async generator finalization tests.
# Depends on implementation details of garbage collector. Changes
# in gc may break this function.
status = {'started': False,
'stopped': False,
'finalized': False}
async def agen():
status['started'] = True
try:
for item in ['ZERO', 'ONE', 'TWO', 'THREE', 'FOUR']:
yield item
finally:
status['finalized'] = True
ag = agen()
ai = ag.__aiter__()
async def iter_one():
try:
item = await ai.__anext__()
except StopAsyncIteration:
return
if item == 'THREE':
status['stopped'] = True
return
asyncio.create_task(iter_one())
asyncio.create_task(iter_one())
return status
def test_asyncgen_finalization_by_gc(self):
# Async generators should be finalized when garbage collected.
self.loop._process_events = mock.Mock()
self.loop._write_to_self = mock.Mock()
with support.disable_gc():
status = self.loop.run_until_complete(self.leave_unfinalized_asyncgen())
while not status['stopped']:
test_utils.run_briefly(self.loop)
self.assertTrue(status['started'])
self.assertTrue(status['stopped'])
self.assertFalse(status['finalized'])
support.gc_collect()
test_utils.run_briefly(self.loop)
self.assertTrue(status['finalized'])
def test_asyncgen_finalization_by_gc_in_other_thread(self):
# Python issue 34769: If garbage collector runs in another
# thread, async generators will not finalize in debug
# mode.
self.loop._process_events = mock.Mock()
self.loop._write_to_self = mock.Mock()
self.loop.set_debug(True)
with support.disable_gc():
status = self.loop.run_until_complete(self.leave_unfinalized_asyncgen())
while not status['stopped']:
test_utils.run_briefly(self.loop)
self.assertTrue(status['started'])
self.assertTrue(status['stopped'])
self.assertFalse(status['finalized'])
self.loop.run_until_complete(
self.loop.run_in_executor(None, support.gc_collect))
test_utils.run_briefly(self.loop)
self.assertTrue(status['finalized'])
class MyProto(asyncio.Protocol):
done = None
def __init__(self, create_future=False):
self.state = 'INITIAL'
self.nbytes = 0
if create_future:
self.done = asyncio.Future()
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
transport.write(b'GET / HTTP/1.0\r\nHost: example.com\r\n\r\n')
def data_received(self, data):
assert self.state == 'CONNECTED', self.state
self.nbytes += len(data)
def eof_received(self):
assert self.state == 'CONNECTED', self.state
self.state = 'EOF'
def connection_lost(self, exc):
assert self.state in ('CONNECTED', 'EOF'), self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MyDatagramProto(asyncio.DatagramProtocol):
done = None
def __init__(self, create_future=False, loop=None):
self.state = 'INITIAL'
self.nbytes = 0
if create_future:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'INITIALIZED'
def datagram_received(self, data, addr):
assert self.state == 'INITIALIZED', self.state
self.nbytes += len(data)
def error_received(self, exc):
assert self.state == 'INITIALIZED', self.state
def connection_lost(self, exc):
assert self.state == 'INITIALIZED', self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class BaseEventLoopWithSelectorTests(test_utils.TestCase):
def setUp(self):
super().setUp()
self.loop = asyncio.new_event_loop()
self.set_event_loop(self.loop)
@mock.patch('socket.getnameinfo')
def test_getnameinfo(self, m_gai):
m_gai.side_effect = lambda *args: 42
r = self.loop.run_until_complete(self.loop.getnameinfo(('abc', 123)))
self.assertEqual(r, 42)
@patch_socket
def test_create_connection_multiple_errors(self, m_socket):
class MyProto(asyncio.Protocol):
pass
@asyncio.coroutine
def getaddrinfo(*args, **kw):
yield from []
return [(2, 1, 6, '', ('107.6.106.82', 80)),
(2, 1, 6, '', ('107.6.106.82', 80))]
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
idx = -1
errors = ['err1', 'err2']
def _socket(*args, **kw):
nonlocal idx, errors
idx += 1
raise OSError(errors[idx])
m_socket.socket = _socket
self.loop.getaddrinfo = getaddrinfo_task
coro = self.loop.create_connection(MyProto, 'example.com', 80)
with self.assertRaises(OSError) as cm:
self.loop.run_until_complete(coro)
self.assertEqual(str(cm.exception), 'Multiple exceptions: err1, err2')
@patch_socket
def test_create_connection_timeout(self, m_socket):
# Ensure that the socket is closed on timeout
sock = mock.Mock()
m_socket.socket.return_value = sock
def getaddrinfo(*args, **kw):
fut = asyncio.Future(loop=self.loop)
addr = (socket.AF_INET, socket.SOCK_STREAM, 0, '',
('127.0.0.1', 80))
fut.set_result([addr])
return fut
self.loop.getaddrinfo = getaddrinfo
with mock.patch.object(self.loop, 'sock_connect',
side_effect=asyncio.TimeoutError):
coro = self.loop.create_connection(MyProto, '127.0.0.1', 80)
with self.assertRaises(asyncio.TimeoutError):
self.loop.run_until_complete(coro)
self.assertTrue(sock.close.called)
def test_create_connection_host_port_sock(self):
coro = self.loop.create_connection(
MyProto, 'example.com', 80, sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
def test_create_connection_wrong_sock(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
with sock:
coro = self.loop.create_connection(MyProto, sock=sock)
with self.assertRaisesRegex(ValueError,
'A Stream Socket was expected'):
self.loop.run_until_complete(coro)
def test_create_server_wrong_sock(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
with sock:
coro = self.loop.create_server(MyProto, sock=sock)
with self.assertRaisesRegex(ValueError,
'A Stream Socket was expected'):
self.loop.run_until_complete(coro)
def test_create_server_ssl_timeout_for_plain_socket(self):
coro = self.loop.create_server(
MyProto, 'example.com', 80, ssl_handshake_timeout=1)
with self.assertRaisesRegex(
ValueError,
'ssl_handshake_timeout is only meaningful with ssl'):
self.loop.run_until_complete(coro)
@unittest.skipUnless(hasattr(socket, 'SOCK_NONBLOCK'),
'no socket.SOCK_NONBLOCK (linux only)')
def test_create_server_stream_bittype(self):
sock = socket.socket(
socket.AF_INET, socket.SOCK_STREAM | socket.SOCK_NONBLOCK)
with sock:
coro = self.loop.create_server(lambda: None, sock=sock)
srv = self.loop.run_until_complete(coro)
srv.close()
self.loop.run_until_complete(srv.wait_closed())
@unittest.skipUnless(hasattr(socket, 'AF_INET6'), 'no IPv6 support')
def test_create_server_ipv6(self):
async def main():
srv = await asyncio.start_server(
lambda: None, '::1', 0, loop=self.loop)
try:
self.assertGreater(len(srv.sockets), 0)
finally:
srv.close()
await srv.wait_closed()
try:
self.loop.run_until_complete(main())
except OSError as ex:
if (hasattr(errno, 'EADDRNOTAVAIL') and
ex.errno == errno.EADDRNOTAVAIL):
self.skipTest('failed to bind to ::1')
else:
raise
def test_create_datagram_endpoint_wrong_sock(self):
sock = socket.socket(socket.AF_INET)
with sock:
coro = self.loop.create_datagram_endpoint(MyProto, sock=sock)
with self.assertRaisesRegex(ValueError,
'A UDP Socket was expected'):
self.loop.run_until_complete(coro)
def test_create_connection_no_host_port_sock(self):
coro = self.loop.create_connection(MyProto)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
def test_create_connection_no_getaddrinfo(self):
@asyncio.coroutine
def getaddrinfo(*args, **kw):
yield from []
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
coro = self.loop.create_connection(MyProto, 'example.com', 80)
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
def test_create_connection_connect_err(self):
async def getaddrinfo(*args, **kw):
return [(2, 1, 6, '', ('107.6.106.82', 80))]
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
self.loop.sock_connect = mock.Mock()
self.loop.sock_connect.side_effect = OSError
coro = self.loop.create_connection(MyProto, 'example.com', 80)
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
def test_create_connection_multiple(self):
@asyncio.coroutine
def getaddrinfo(*args, **kw):
return [(2, 1, 6, '', ('0.0.0.1', 80)),
(2, 1, 6, '', ('0.0.0.2', 80))]
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
self.loop.sock_connect = mock.Mock()
self.loop.sock_connect.side_effect = OSError
coro = self.loop.create_connection(
MyProto, 'example.com', 80, family=socket.AF_INET)
with self.assertRaises(OSError):
self.loop.run_until_complete(coro)
@patch_socket
def test_create_connection_multiple_errors_local_addr(self, m_socket):
def bind(addr):
if addr[0] == '0.0.0.1':
err = OSError('Err')
err.strerror = 'Err'
raise err
m_socket.socket.return_value.bind = bind
@asyncio.coroutine
def getaddrinfo(*args, **kw):
return [(2, 1, 6, '', ('0.0.0.1', 80)),
(2, 1, 6, '', ('0.0.0.2', 80))]
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
self.loop.sock_connect = mock.Mock()
self.loop.sock_connect.side_effect = OSError('Err2')
coro = self.loop.create_connection(
MyProto, 'example.com', 80, family=socket.AF_INET,
local_addr=(None, 8080))
with self.assertRaises(OSError) as cm:
self.loop.run_until_complete(coro)
self.assertTrue(str(cm.exception).startswith('Multiple exceptions: '))
self.assertTrue(m_socket.socket.return_value.close.called)
def _test_create_connection_ip_addr(self, m_socket, allow_inet_pton):
# Test the fallback code, even if this system has inet_pton.
if not allow_inet_pton:
del m_socket.inet_pton
m_socket.getaddrinfo = socket.getaddrinfo
sock = m_socket.socket.return_value
self.loop._add_reader = mock.Mock()
self.loop._add_reader._is_coroutine = False
self.loop._add_writer = mock.Mock()
self.loop._add_writer._is_coroutine = False
coro = self.loop.create_connection(asyncio.Protocol, '1.2.3.4', 80)
t, p = self.loop.run_until_complete(coro)
try:
sock.connect.assert_called_with(('1.2.3.4', 80))
_, kwargs = m_socket.socket.call_args
self.assertEqual(kwargs['family'], m_socket.AF_INET)
self.assertEqual(kwargs['type'], m_socket.SOCK_STREAM)
finally:
t.close()
test_utils.run_briefly(self.loop) # allow transport to close
sock.family = socket.AF_INET6
coro = self.loop.create_connection(asyncio.Protocol, '::1', 80)
t, p = self.loop.run_until_complete(coro)
try:
# Without inet_pton we use getaddrinfo, which transforms ('::1', 80)
# to ('::1', 80, 0, 0). The last 0s are flow info, scope id.
[address] = sock.connect.call_args[0]
host, port = address[:2]
self.assertRegex(host, r'::(0\.)*1')
self.assertEqual(port, 80)
_, kwargs = m_socket.socket.call_args
self.assertEqual(kwargs['family'], m_socket.AF_INET6)
self.assertEqual(kwargs['type'], m_socket.SOCK_STREAM)
finally:
t.close()
test_utils.run_briefly(self.loop) # allow transport to close
@patch_socket
def test_create_connection_ip_addr(self, m_socket):
self._test_create_connection_ip_addr(m_socket, True)
@patch_socket
def test_create_connection_no_inet_pton(self, m_socket):
self._test_create_connection_ip_addr(m_socket, False)
@patch_socket
def test_create_connection_service_name(self, m_socket):
m_socket.getaddrinfo = socket.getaddrinfo
sock = m_socket.socket.return_value
self.loop._add_reader = mock.Mock()
self.loop._add_reader._is_coroutine = False
self.loop._add_writer = mock.Mock()
self.loop._add_writer._is_coroutine = False
for service, port in ('http', 80), (b'http', 80):
coro = self.loop.create_connection(asyncio.Protocol,
'127.0.0.1', service)
t, p = self.loop.run_until_complete(coro)
try:
sock.connect.assert_called_with(('127.0.0.1', port))
_, kwargs = m_socket.socket.call_args
self.assertEqual(kwargs['family'], m_socket.AF_INET)
self.assertEqual(kwargs['type'], m_socket.SOCK_STREAM)
finally:
t.close()
test_utils.run_briefly(self.loop) # allow transport to close
for service in 'nonsense', b'nonsense':
coro = self.loop.create_connection(asyncio.Protocol,
'127.0.0.1', service)
with self.assertRaises(OSError):
self.loop.run_until_complete(coro)
def test_create_connection_no_local_addr(self):
@asyncio.coroutine
def getaddrinfo(host, *args, **kw):
if host == 'example.com':
return [(2, 1, 6, '', ('107.6.106.82', 80)),
(2, 1, 6, '', ('107.6.106.82', 80))]
else:
return []
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
coro = self.loop.create_connection(
MyProto, 'example.com', 80, family=socket.AF_INET,
local_addr=(None, 8080))
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
@patch_socket
def test_create_connection_bluetooth(self, m_socket):
# See http://bugs.python.org/issue27136, fallback to getaddrinfo when
# we can't recognize an address is resolved, e.g. a Bluetooth address.
addr = ('00:01:02:03:04:05', 1)
def getaddrinfo(host, port, *args, **kw):
assert (host, port) == addr
return [(999, 1, 999, '', (addr, 1))]
m_socket.getaddrinfo = getaddrinfo
sock = m_socket.socket()
coro = self.loop.sock_connect(sock, addr)
self.loop.run_until_complete(coro)
def test_create_connection_ssl_server_hostname_default(self):
self.loop.getaddrinfo = mock.Mock()
def mock_getaddrinfo(*args, **kwds):
f = asyncio.Future(loop=self.loop)
f.set_result([(socket.AF_INET, socket.SOCK_STREAM,
socket.SOL_TCP, '', ('1.2.3.4', 80))])
return f
self.loop.getaddrinfo.side_effect = mock_getaddrinfo
self.loop.sock_connect = mock.Mock()
self.loop.sock_connect.return_value = self.loop.create_future()
self.loop.sock_connect.return_value.set_result(None)
self.loop._make_ssl_transport = mock.Mock()
class _SelectorTransportMock:
_sock = None
def get_extra_info(self, key):
return mock.Mock()
def close(self):
self._sock.close()
def mock_make_ssl_transport(sock, protocol, sslcontext, waiter,
**kwds):
waiter.set_result(None)
transport = _SelectorTransportMock()
transport._sock = sock
return transport
self.loop._make_ssl_transport.side_effect = mock_make_ssl_transport
ANY = mock.ANY
handshake_timeout = object()
# First try the default server_hostname.
self.loop._make_ssl_transport.reset_mock()
coro = self.loop.create_connection(
MyProto, 'python.org', 80, ssl=True,
ssl_handshake_timeout=handshake_timeout)
transport, _ = self.loop.run_until_complete(coro)
transport.close()
self.loop._make_ssl_transport.assert_called_with(
ANY, ANY, ANY, ANY,
server_side=False,
server_hostname='python.org',
ssl_handshake_timeout=handshake_timeout)
# Next try an explicit server_hostname.
self.loop._make_ssl_transport.reset_mock()
coro = self.loop.create_connection(
MyProto, 'python.org', 80, ssl=True,
server_hostname='perl.com',
ssl_handshake_timeout=handshake_timeout)
transport, _ = self.loop.run_until_complete(coro)
transport.close()
self.loop._make_ssl_transport.assert_called_with(
ANY, ANY, ANY, ANY,
server_side=False,
server_hostname='perl.com',
ssl_handshake_timeout=handshake_timeout)
# Finally try an explicit empty server_hostname.
self.loop._make_ssl_transport.reset_mock()
coro = self.loop.create_connection(
MyProto, 'python.org', 80, ssl=True,
server_hostname='',
ssl_handshake_timeout=handshake_timeout)
transport, _ = self.loop.run_until_complete(coro)
transport.close()
self.loop._make_ssl_transport.assert_called_with(
ANY, ANY, ANY, ANY,
server_side=False,
server_hostname='',
ssl_handshake_timeout=handshake_timeout)
def test_create_connection_no_ssl_server_hostname_errors(self):
# When not using ssl, server_hostname must be None.
coro = self.loop.create_connection(MyProto, 'python.org', 80,
server_hostname='')
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
coro = self.loop.create_connection(MyProto, 'python.org', 80,
server_hostname='python.org')
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
def test_create_connection_ssl_server_hostname_errors(self):
# When using ssl, server_hostname may be None if host is non-empty.
coro = self.loop.create_connection(MyProto, '', 80, ssl=True)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
coro = self.loop.create_connection(MyProto, None, 80, ssl=True)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
sock = socket.socket()
coro = self.loop.create_connection(MyProto, None, None,
ssl=True, sock=sock)
self.addCleanup(sock.close)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
def test_create_connection_ssl_timeout_for_plain_socket(self):
coro = self.loop.create_connection(
MyProto, 'example.com', 80, ssl_handshake_timeout=1)
with self.assertRaisesRegex(
ValueError,
'ssl_handshake_timeout is only meaningful with ssl'):
self.loop.run_until_complete(coro)
def test_create_server_empty_host(self):
# if host is empty string use None instead
host = object()
@asyncio.coroutine
def getaddrinfo(*args, **kw):
nonlocal host
host = args[0]
yield from []
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
fut = self.loop.create_server(MyProto, '', 0)
self.assertRaises(OSError, self.loop.run_until_complete, fut)
self.assertIsNone(host)
def test_create_server_host_port_sock(self):
fut = self.loop.create_server(
MyProto, '0.0.0.0', 0, sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
def test_create_server_no_host_port_sock(self):
fut = self.loop.create_server(MyProto)
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
def test_create_server_no_getaddrinfo(self):
getaddrinfo = self.loop.getaddrinfo = mock.Mock()
getaddrinfo.return_value = self.loop.create_future()
getaddrinfo.return_value.set_result(None)
f = self.loop.create_server(MyProto, 'python.org', 0)
self.assertRaises(OSError, self.loop.run_until_complete, f)
@patch_socket
def test_create_server_nosoreuseport(self, m_socket):
m_socket.getaddrinfo = socket.getaddrinfo
del m_socket.SO_REUSEPORT
m_socket.socket.return_value = mock.Mock()
f = self.loop.create_server(
MyProto, '0.0.0.0', 0, reuse_port=True)
self.assertRaises(ValueError, self.loop.run_until_complete, f)
@patch_socket
def test_create_server_soreuseport_only_defined(self, m_socket):
m_socket.getaddrinfo = socket.getaddrinfo
m_socket.socket.return_value = mock.Mock()
m_socket.SO_REUSEPORT = -1
f = self.loop.create_server(
MyProto, '0.0.0.0', 0, reuse_port=True)
self.assertRaises(ValueError, self.loop.run_until_complete, f)
@patch_socket
def test_create_server_cant_bind(self, m_socket):
class Err(OSError):
strerror = 'error'
m_socket.getaddrinfo.return_value = [
(2, 1, 6, '', ('127.0.0.1', 10100))]
m_socket.getaddrinfo._is_coroutine = False
m_sock = m_socket.socket.return_value = mock.Mock()
m_sock.bind.side_effect = Err
fut = self.loop.create_server(MyProto, '0.0.0.0', 0)
self.assertRaises(OSError, self.loop.run_until_complete, fut)
self.assertTrue(m_sock.close.called)
@patch_socket
def test_create_datagram_endpoint_no_addrinfo(self, m_socket):
m_socket.getaddrinfo.return_value = []
m_socket.getaddrinfo._is_coroutine = False
coro = self.loop.create_datagram_endpoint(
MyDatagramProto, local_addr=('localhost', 0))
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
def test_create_datagram_endpoint_addr_error(self):
coro = self.loop.create_datagram_endpoint(
MyDatagramProto, local_addr='localhost')
self.assertRaises(
AssertionError, self.loop.run_until_complete, coro)
coro = self.loop.create_datagram_endpoint(
MyDatagramProto, local_addr=('localhost', 1, 2, 3))
self.assertRaises(
AssertionError, self.loop.run_until_complete, coro)
def test_create_datagram_endpoint_connect_err(self):
self.loop.sock_connect = mock.Mock()
self.loop.sock_connect.side_effect = OSError
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol, remote_addr=('127.0.0.1', 0))
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
@patch_socket
def test_create_datagram_endpoint_socket_err(self, m_socket):
m_socket.getaddrinfo = socket.getaddrinfo
m_socket.socket.side_effect = OSError
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol, family=socket.AF_INET)
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol, local_addr=('127.0.0.1', 0))
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 not supported or enabled')
def test_create_datagram_endpoint_no_matching_family(self):
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol,
remote_addr=('127.0.0.1', 0), local_addr=('::1', 0))
self.assertRaises(
ValueError, self.loop.run_until_complete, coro)
@patch_socket
def test_create_datagram_endpoint_setblk_err(self, m_socket):
m_socket.socket.return_value.setblocking.side_effect = OSError
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol, family=socket.AF_INET)
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
self.assertTrue(
m_socket.socket.return_value.close.called)
def test_create_datagram_endpoint_noaddr_nofamily(self):
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
@patch_socket
def test_create_datagram_endpoint_cant_bind(self, m_socket):
class Err(OSError):
pass
m_socket.getaddrinfo = socket.getaddrinfo
m_sock = m_socket.socket.return_value = mock.Mock()
m_sock.bind.side_effect = Err
fut = self.loop.create_datagram_endpoint(
MyDatagramProto,
local_addr=('127.0.0.1', 0), family=socket.AF_INET)
self.assertRaises(Err, self.loop.run_until_complete, fut)
self.assertTrue(m_sock.close.called)
def test_create_datagram_endpoint_sock(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(('127.0.0.1', 0))
fut = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(create_future=True, loop=self.loop),
sock=sock)
transport, protocol = self.loop.run_until_complete(fut)
transport.close()
self.loop.run_until_complete(protocol.done)
self.assertEqual('CLOSED', protocol.state)
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_datagram_endpoint_sock_unix(self):
fut = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(create_future=True, loop=self.loop),
family=socket.AF_UNIX)
transport, protocol = self.loop.run_until_complete(fut)
assert transport._sock.family == socket.AF_UNIX
transport.close()
self.loop.run_until_complete(protocol.done)
self.assertEqual('CLOSED', protocol.state)
def test_create_datagram_endpoint_sock_sockopts(self):
class FakeSock:
type = socket.SOCK_DGRAM
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, local_addr=('127.0.0.1', 0), sock=FakeSock())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, remote_addr=('127.0.0.1', 0), sock=FakeSock())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, family=1, sock=FakeSock())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, proto=1, sock=FakeSock())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, flags=1, sock=FakeSock())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, reuse_address=True, sock=FakeSock())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, reuse_port=True, sock=FakeSock())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, allow_broadcast=True, sock=FakeSock())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
def test_create_datagram_endpoint_sockopts(self):
# Socket options should not be applied unless asked for.
# SO_REUSEADDR defaults to on for UNIX.
# SO_REUSEPORT is not available on all platforms.
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(create_future=True, loop=self.loop),
local_addr=('127.0.0.1', 0))
transport, protocol = self.loop.run_until_complete(coro)
sock = transport.get_extra_info('socket')
reuse_address_default_on = (
os.name == 'posix' and sys.platform != 'cygwin')
reuseport_supported = hasattr(socket, 'SO_REUSEPORT')
if reuse_address_default_on:
self.assertTrue(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR))
else:
self.assertFalse(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR))
if reuseport_supported:
self.assertFalse(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEPORT))
self.assertFalse(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_BROADCAST))
transport.close()
self.loop.run_until_complete(protocol.done)
self.assertEqual('CLOSED', protocol.state)
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(create_future=True, loop=self.loop),
local_addr=('127.0.0.1', 0),
reuse_address=True,
reuse_port=reuseport_supported,
allow_broadcast=True)
transport, protocol = self.loop.run_until_complete(coro)
sock = transport.get_extra_info('socket')
self.assertTrue(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR))
if reuseport_supported:
self.assertTrue(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEPORT))
self.assertTrue(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_BROADCAST))
transport.close()
self.loop.run_until_complete(protocol.done)
self.assertEqual('CLOSED', protocol.state)
@patch_socket
def test_create_datagram_endpoint_nosoreuseport(self, m_socket):
del m_socket.SO_REUSEPORT
m_socket.socket.return_value = mock.Mock()
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(loop=self.loop),
local_addr=('127.0.0.1', 0),
reuse_address=False,
reuse_port=True)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
@patch_socket
def test_create_datagram_endpoint_ip_addr(self, m_socket):
def getaddrinfo(*args, **kw):
self.fail('should not have called getaddrinfo')
m_socket.getaddrinfo = getaddrinfo
m_socket.socket.return_value.bind = bind = mock.Mock()
self.loop._add_reader = mock.Mock()
self.loop._add_reader._is_coroutine = False
reuseport_supported = hasattr(socket, 'SO_REUSEPORT')
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(loop=self.loop),
local_addr=('1.2.3.4', 0),
reuse_address=False,
reuse_port=reuseport_supported)
t, p = self.loop.run_until_complete(coro)
try:
bind.assert_called_with(('1.2.3.4', 0))
m_socket.socket.assert_called_with(family=m_socket.AF_INET,
proto=m_socket.IPPROTO_UDP,
type=m_socket.SOCK_DGRAM)
finally:
t.close()
test_utils.run_briefly(self.loop) # allow transport to close
def test_accept_connection_retry(self):
sock = mock.Mock()
sock.accept.side_effect = BlockingIOError()
self.loop._accept_connection(MyProto, sock)
self.assertFalse(sock.close.called)
@mock.patch('asyncio.base_events.logger')
def test_accept_connection_exception(self, m_log):
sock = mock.Mock()
sock.fileno.return_value = 10
sock.accept.side_effect = OSError(errno.EMFILE, 'Too many open files')
self.loop._remove_reader = mock.Mock()
self.loop.call_later = mock.Mock()
self.loop._accept_connection(MyProto, sock)
self.assertTrue(m_log.error.called)
self.assertFalse(sock.close.called)
self.loop._remove_reader.assert_called_with(10)
self.loop.call_later.assert_called_with(
constants.ACCEPT_RETRY_DELAY,
# self.loop._start_serving
mock.ANY,
MyProto, sock, None, None, mock.ANY, mock.ANY)
def test_call_coroutine(self):
@asyncio.coroutine
def simple_coroutine():
pass
self.loop.set_debug(True)
coro_func = simple_coroutine
coro_obj = coro_func()
self.addCleanup(coro_obj.close)
for func in (coro_func, coro_obj):
with self.assertRaises(TypeError):
self.loop.call_soon(func)
with self.assertRaises(TypeError):
self.loop.call_soon_threadsafe(func)
with self.assertRaises(TypeError):
self.loop.call_later(60, func)
with self.assertRaises(TypeError):
self.loop.call_at(self.loop.time() + 60, func)
with self.assertRaises(TypeError):
self.loop.run_until_complete(
self.loop.run_in_executor(None, func))
@mock.patch('asyncio.base_events.logger')
def test_log_slow_callbacks(self, m_logger):
def stop_loop_cb(loop):
loop.stop()
@asyncio.coroutine
def stop_loop_coro(loop):
yield from ()
loop.stop()
asyncio.set_event_loop(self.loop)
self.loop.set_debug(True)
self.loop.slow_callback_duration = 0.0
# slow callback
self.loop.call_soon(stop_loop_cb, self.loop)
self.loop.run_forever()
fmt, *args = m_logger.warning.call_args[0]
self.assertRegex(fmt % tuple(args),
"^Executing <Handle.*stop_loop_cb.*> "
"took .* seconds$")
# slow task
asyncio.ensure_future(stop_loop_coro(self.loop), loop=self.loop)
self.loop.run_forever()
fmt, *args = m_logger.warning.call_args[0]
self.assertRegex(fmt % tuple(args),
"^Executing <Task.*stop_loop_coro.*> "
"took .* seconds$")
class RunningLoopTests(unittest.TestCase):
def test_running_loop_within_a_loop(self):
@asyncio.coroutine
def runner(loop):
loop.run_forever()
loop = asyncio.new_event_loop()
outer_loop = asyncio.new_event_loop()
try:
with self.assertRaisesRegex(RuntimeError,
'while another loop is running'):
outer_loop.run_until_complete(runner(loop))
finally:
loop.close()
outer_loop.close()
class BaseLoopSockSendfileTests(test_utils.TestCase):
DATA = b"12345abcde" * 16 * 1024 # 160 KiB
class MyProto(asyncio.Protocol):
def __init__(self, loop):
self.started = False
self.closed = False
self.data = bytearray()
self.fut = loop.create_future()
self.transport = None
def connection_made(self, transport):
self.started = True
self.transport = transport
def data_received(self, data):
self.data.extend(data)
def connection_lost(self, exc):
self.closed = True
self.fut.set_result(None)
self.transport = None
async def wait_closed(self):
await self.fut
@classmethod
def setUpClass(cls):
cls.__old_bufsize = constants.SENDFILE_FALLBACK_READBUFFER_SIZE
constants.SENDFILE_FALLBACK_READBUFFER_SIZE = 1024 * 16
with open(support.TESTFN, 'wb') as fp:
fp.write(cls.DATA)
super().setUpClass()
@classmethod
def tearDownClass(cls):
constants.SENDFILE_FALLBACK_READBUFFER_SIZE = cls.__old_bufsize
support.unlink(support.TESTFN)
super().tearDownClass()
def setUp(self):
from asyncio.selector_events import BaseSelectorEventLoop
# BaseSelectorEventLoop() has no native implementation
self.loop = BaseSelectorEventLoop()
self.set_event_loop(self.loop)
self.file = open(support.TESTFN, 'rb')
self.addCleanup(self.file.close)
super().setUp()
def make_socket(self, blocking=False):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setblocking(blocking)
self.addCleanup(sock.close)
return sock
def run_loop(self, coro):
return self.loop.run_until_complete(coro)
def prepare(self):
sock = self.make_socket()
proto = self.MyProto(self.loop)
server = self.run_loop(self.loop.create_server(
lambda: proto, support.HOST, 0, family=socket.AF_INET))
addr = server.sockets[0].getsockname()
for _ in range(10):
try:
self.run_loop(self.loop.sock_connect(sock, addr))
except OSError:
self.run_loop(asyncio.sleep(0.5))
continue
else:
break
else:
# One last try, so we get the exception
self.run_loop(self.loop.sock_connect(sock, addr))
def cleanup():
server.close()
self.run_loop(server.wait_closed())
sock.close()
if proto.transport is not None:
proto.transport.close()
self.run_loop(proto.wait_closed())
self.addCleanup(cleanup)
return sock, proto
def test__sock_sendfile_native_failure(self):
sock, proto = self.prepare()
with self.assertRaisesRegex(events.SendfileNotAvailableError,
"sendfile is not available"):
self.run_loop(self.loop._sock_sendfile_native(sock, self.file,
0, None))
self.assertEqual(proto.data, b'')
self.assertEqual(self.file.tell(), 0)
def test_sock_sendfile_no_fallback(self):
sock, proto = self.prepare()
with self.assertRaisesRegex(events.SendfileNotAvailableError,
"sendfile is not available"):
self.run_loop(self.loop.sock_sendfile(sock, self.file,
fallback=False))
self.assertEqual(self.file.tell(), 0)
self.assertEqual(proto.data, b'')
def test_sock_sendfile_fallback(self):
sock, proto = self.prepare()
ret = self.run_loop(self.loop.sock_sendfile(sock, self.file))
sock.close()
self.run_loop(proto.wait_closed())
self.assertEqual(ret, len(self.DATA))
self.assertEqual(self.file.tell(), len(self.DATA))
self.assertEqual(proto.data, self.DATA)
def test_sock_sendfile_fallback_offset_and_count(self):
sock, proto = self.prepare()
ret = self.run_loop(self.loop.sock_sendfile(sock, self.file,
1000, 2000))
sock.close()
self.run_loop(proto.wait_closed())
self.assertEqual(ret, 2000)
self.assertEqual(self.file.tell(), 3000)
self.assertEqual(proto.data, self.DATA[1000:3000])
def test_blocking_socket(self):
self.loop.set_debug(True)
sock = self.make_socket(blocking=True)
with self.assertRaisesRegex(ValueError, "must be non-blocking"):
self.run_loop(self.loop.sock_sendfile(sock, self.file))
def test_nonbinary_file(self):
sock = self.make_socket()
with open(support.TESTFN, 'r') as f:
with self.assertRaisesRegex(ValueError, "binary mode"):
self.run_loop(self.loop.sock_sendfile(sock, f))
def test_nonstream_socket(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setblocking(False)
self.addCleanup(sock.close)
with self.assertRaisesRegex(ValueError, "only SOCK_STREAM type"):
self.run_loop(self.loop.sock_sendfile(sock, self.file))
def test_notint_count(self):
sock = self.make_socket()
with self.assertRaisesRegex(TypeError,
"count must be a positive integer"):
self.run_loop(self.loop.sock_sendfile(sock, self.file, 0, 'count'))
def test_negative_count(self):
sock = self.make_socket()
with self.assertRaisesRegex(ValueError,
"count must be a positive integer"):
self.run_loop(self.loop.sock_sendfile(sock, self.file, 0, -1))
def test_notint_offset(self):
sock = self.make_socket()
with self.assertRaisesRegex(TypeError,
"offset must be a non-negative integer"):
self.run_loop(self.loop.sock_sendfile(sock, self.file, 'offset'))
def test_negative_offset(self):
sock = self.make_socket()
with self.assertRaisesRegex(ValueError,
"offset must be a non-negative integer"):
self.run_loop(self.loop.sock_sendfile(sock, self.file, -1))
class TestSelectorUtils(test_utils.TestCase):
def check_set_nodelay(self, sock):
opt = sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
self.assertFalse(opt)
base_events._set_nodelay(sock)
opt = sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
self.assertTrue(opt)
@unittest.skipUnless(hasattr(socket, 'TCP_NODELAY'),
'need socket.TCP_NODELAY')
def test_set_nodelay(self):
sock = socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM,
proto=socket.IPPROTO_TCP)
with sock:
self.check_set_nodelay(sock)
sock = socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM,
proto=socket.IPPROTO_TCP)
with sock:
sock.setblocking(False)
self.check_set_nodelay(sock)
if __name__ == '__main__':
unittest.main()
|
FFMG/myoddweb.piger
|
monitor/api/python/Python-3.7.2/Lib/test/test_asyncio/test_base_events.py
|
Python
|
gpl-2.0
| 76,911
|
# -*- coding: utf-8 -*-
import datetime
import random
#from google.appengine.ext import db
from google.appengine.api import memcache
import gdata.calendar.service
"""
from django.utils import simplejson as json
from libs.BeautifulSoup import BeautifulSoup
from google.appengine.api import urlfetch
import xml.dom.minidom
"""
"""
import gdata.youtube.service
import gdata.gauth
import gdata.client
import gdata.data
import atom.http_core
import atom.core
"""
import conf
def cal_subscribed(email):
token = memcache.get(email, namespace="session")
if token == None:
return cal_check_acl(email)
else:
return token
def cal_check_acl(email):
## Not in memcache, so check the acl
calendar_service = gdata.calendar.service.CalendarService()
calendar_service.email = conf.EMAIL
calendar_service.password = conf.SECRET
calendar_service.source = 'Google-Calendar_Python_Sample-1.0'
calendar_service.ProgrammaticLogin()
## Get current ACL and iterate thru list till match, if one.
feed = calendar_service.GetCalendarAclFeed()
for i, a_rule in enumerate(feed.entry):
if a_rule.scope.value == email:
memcache.set(email, 1, 300, namespace="session")
return 1
memcache.set(email, 0, 300, namespace="session")
return 0
def cal_add_acl(email):
calendar_service = gdata.calendar.service.CalendarService()
calendar_service.email = conf.EMAIL
calendar_service.password = conf.SECRET
calendar_service.source = 'Google-Calendar_Python_Sample-1.0'
calendar_service.ProgrammaticLogin()
rule = gdata.calendar.CalendarAclEntry()
rule.scope = gdata.calendar.Scope(value=email)
rule.scope.type = 'user'
roleValue = 'http://schemas.google.com/gCal/2005#%s' % ('editor')
rule.role = gdata.calendar.Role(value=roleValue)
aclUrl = '/calendar/feeds/fg@freeflightsim.org/acl/full'
returned_rule = calendar_service.InsertAclEntry(rule, aclUrl)
memcache.set(email, 1, 300, namespace="session")
return 1
|
freeflightsim/fg-flying-club
|
flying-club.appspot.com/app/fetch.py
|
Python
|
gpl-2.0
| 1,924
|
from django import template
from django.template import Node, NodeList
from django.utils.datastructures import SortedDict
register = template.Library()
#==========================
# -*- coding: utf-8 -*-
'''
A smarter {% if %} tag for django templates.
While retaining current Django functionality, it also handles equality,
greater than and less than operators. Some common case examples::
{% if articles|length >= 5 %}...{% endif %}
{% if "ifnotequal tag" != "beautiful" %}...{% endif %}
'''
import unittest
from django import template
register = template.Library()
#===============================================================================
# Calculation objects
#===============================================================================
class BaseCalc(object):
def __init__(self, var1, var2=None, negate=False):
self.var1 = var1
self.var2 = var2
self.negate = negate
def resolve(self, context):
try:
var1, var2 = self.resolve_vars(context)
outcome = self.calculate(var1, var2)
except:
outcome = False
if self.negate:
return not outcome
return outcome
def resolve_vars(self, context):
var2 = self.var2 and self.var2.resolve(context)
return self.var1.resolve(context), var2
def calculate(self, var1, var2):
raise NotImplementedError()
class Or(BaseCalc):
def calculate(self, var1, var2):
return var1 or var2
class And(BaseCalc):
def calculate(self, var1, var2):
return var1 and var2
class Equals(BaseCalc):
def calculate(self, var1, var2):
return var1 == var2
class Greater(BaseCalc):
def calculate(self, var1, var2):
return var1 > var2
class GreaterOrEqual(BaseCalc):
def calculate(self, var1, var2):
return var1 >= var2
class In(BaseCalc):
def calculate(self, var1, var2):
return var1 in var2
#===============================================================================
# Tests
#===============================================================================
class TestVar(object):
"""
A basic self-resolvable object similar to a Django template variable. Used
to assist with tests.
"""
def __init__(self, value):
self.value = value
def resolve(self, context):
return self.value
class SmartIfTests(unittest.TestCase):
def setUp(self):
self.true = TestVar(True)
self.false = TestVar(False)
self.high = TestVar(9000)
self.low = TestVar(1)
def assertCalc(self, calc, context=None):
"""
Test a calculation is True, also checking the inverse "negate" case.
"""
context = context or {}
self.assert_(calc.resolve(context))
calc.negate = not calc.negate
self.assertFalse(calc.resolve(context))
def assertCalcFalse(self, calc, context=None):
"""
Test a calculation is False, also checking the inverse "negate" case.
"""
context = context or {}
self.assertFalse(calc.resolve(context))
calc.negate = not calc.negate
self.assert_(calc.resolve(context))
def test_or(self):
self.assertCalc(Or(self.true))
self.assertCalcFalse(Or(self.false))
self.assertCalc(Or(self.true, self.true))
self.assertCalc(Or(self.true, self.false))
self.assertCalc(Or(self.false, self.true))
self.assertCalcFalse(Or(self.false, self.false))
def test_and(self):
self.assertCalc(And(self.true, self.true))
self.assertCalcFalse(And(self.true, self.false))
self.assertCalcFalse(And(self.false, self.true))
self.assertCalcFalse(And(self.false, self.false))
def test_equals(self):
self.assertCalc(Equals(self.low, self.low))
self.assertCalcFalse(Equals(self.low, self.high))
def test_greater(self):
self.assertCalc(Greater(self.high, self.low))
self.assertCalcFalse(Greater(self.low, self.low))
self.assertCalcFalse(Greater(self.low, self.high))
def test_greater_or_equal(self):
self.assertCalc(GreaterOrEqual(self.high, self.low))
self.assertCalc(GreaterOrEqual(self.low, self.low))
self.assertCalcFalse(GreaterOrEqual(self.low, self.high))
def test_in(self):
list_ = TestVar([1,2,3])
invalid_list = TestVar(None)
self.assertCalc(In(self.low, list_))
self.assertCalcFalse(In(self.low, invalid_list))
def test_parse_bits(self):
var = IfParser([True]).parse()
self.assert_(var.resolve({}))
var = IfParser([False]).parse()
self.assertFalse(var.resolve({}))
var = IfParser([False, 'or', True]).parse()
self.assert_(var.resolve({}))
var = IfParser([False, 'and', True]).parse()
self.assertFalse(var.resolve({}))
var = IfParser(['not', False, 'and', 'not', False]).parse()
self.assert_(var.resolve({}))
var = IfParser([1, '=', 1]).parse()
self.assert_(var.resolve({}))
var = IfParser([1, '!=', 1]).parse()
self.assertFalse(var.resolve({}))
var = IfParser([3, '>', 2]).parse()
self.assert_(var.resolve({}))
var = IfParser([1, '<', 2]).parse()
self.assert_(var.resolve({}))
var = IfParser([2, 'not', 'in', [2, 3]]).parse()
self.assertFalse(var.resolve({}))
def test_boolean(self):
var = IfParser([True, 'and', True, 'and', True]).parse()
self.assert_(var.resolve({}))
var = IfParser([False, 'or', False, 'or', True]).parse()
self.assert_(var.resolve({}))
var = IfParser([True, 'and', False, 'or', True]).parse()
self.assert_(var.resolve({}))
var = IfParser([False, 'or', True, 'and', True]).parse()
self.assert_(var.resolve({}))
var = IfParser([True, 'and', True, 'and', False]).parse()
self.assertFalse(var.resolve({}))
var = IfParser([False, 'or', False, 'or', False]).parse()
self.assertFalse(var.resolve({}))
var = IfParser([False, 'or', True, 'and', False]).parse()
self.assertFalse(var.resolve({}))
var = IfParser([False, 'and', True, 'or', False]).parse()
self.assertFalse(var.resolve({}))
OPERATORS = {
'=': (Equals, True),
'==': (Equals, True),
'!=': (Equals, False),
'>': (Greater, True),
'>=': (GreaterOrEqual, True),
'<=': (Greater, False),
'<': (GreaterOrEqual, False),
'or': (Or, True),
'and': (And, True),
'in': (In, True),
}
class IfParser(object):
error_class = ValueError
def __init__(self, tokens):
self.tokens = tokens
def _get_tokens(self):
return self._tokens
def _set_tokens(self, tokens):
self._tokens = tokens
self.len = len(tokens)
self.pos = 0
tokens = property(_get_tokens, _set_tokens)
def parse(self):
if self.at_end():
raise self.error_class('No variables provided.')
var1 = self.get_var()
while not self.at_end():
token = self.get_token()
if token == 'not':
if self.at_end():
raise self.error_class('No variable provided after "not".')
token = self.get_token()
negate = True
else:
negate = False
if token not in OPERATORS:
raise self.error_class('%s is not a valid operator.' % token)
if self.at_end():
raise self.error_class('No variable provided after "%s"' % token)
op, true = OPERATORS[token]
if not true:
negate = not negate
var2 = self.get_var()
var1 = op(var1, var2, negate=negate)
return var1
def get_token(self):
token = self.tokens[self.pos]
self.pos += 1
return token
def at_end(self):
return self.pos >= self.len
def create_var(self, value):
return TestVar(value)
def get_var(self):
token = self.get_token()
if token == 'not':
if self.at_end():
raise self.error_class('No variable provided after "not".')
token = self.get_token()
return Or(self.create_var(token), negate=True)
return self.create_var(token)
#===============================================================================
# Actual templatetag code.
#===============================================================================
class TemplateIfParser(IfParser):
error_class = template.TemplateSyntaxError
def __init__(self, parser, *args, **kwargs):
self.template_parser = parser
return super(TemplateIfParser, self).__init__(*args, **kwargs)
def create_var(self, value):
return self.template_parser.compile_filter(value)
class SmartIfNode(template.Node):
def __init__(self, var, nodelist_true, nodelist_false=None):
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self.var = var
def render(self, context):
if self.var.resolve(context):
return self.nodelist_true.render(context)
if self.nodelist_false:
return self.nodelist_false.render(context)
return ''
def __repr__(self):
return "<Smart If node>"
def __iter__(self):
for node in self.nodelist_true:
yield node
if self.nodelist_false:
for node in self.nodelist_false:
yield node
def get_nodes_by_type(self, nodetype):
nodes = []
if isinstance(self, nodetype):
nodes.append(self)
nodes.extend(self.nodelist_true.get_nodes_by_type(nodetype))
if self.nodelist_false:
nodes.extend(self.nodelist_false.get_nodes_by_type(nodetype))
return nodes
#@register.tag('if')
def smart_if(parser, token):
'''
A smarter {% if %} tag for django templates.
While retaining current Django functionality, it also handles equality,
greater than and less than operators. Some common case examples::
{% if articles|length >= 5 %}...{% endif %}
{% if "ifnotequal tag" != "beautiful" %}...{% endif %}
Arguments and operators _must_ have a space between them, so
``{% if 1>2 %}`` is not a valid smart if tag.
All supported operators are: ``or``, ``and``, ``in``, ``=`` (or ``==``),
``!=``, ``>``, ``>=``, ``<`` and ``<=``.
'''
bits = token.split_contents()[1:]
var = TemplateIfParser(parser, bits).parse()
nodelist_true = parser.parse(('else', 'endsmart_if'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endsmart_if',))
parser.delete_first_token()
else:
nodelist_false = None
return SmartIfNode(var, nodelist_true, nodelist_false)
#==========================
ifinlist = register.tag(smart_if)
#==========================
# Based on code found here:
# http://stackoverflow.com/questions/2024660/django-sort-dict-in-template
#
# Required since dict.items|dictsort doesn't seem to work
# when iterating over the keys with a for loop
@register.filter(name='sort')
def listsort(value):
if isinstance(value, dict):
new_dict = SortedDict()
key_list = value.keys()
key_list.sort()
for key in key_list:
new_dict[key] = value[key]
return new_dict
elif isinstance(value, list):
new_list = list(value)
new_list.sort()
return new_list
else:
return value
listsort.is_safe = True
|
jantman/cobbler
|
web/cobbler_web/templatetags/site.py
|
Python
|
gpl-2.0
| 11,773
|
# Copyright 2002 by Andrew Dalke. All rights reserved.
# Revisions 2007-2009 copyright by Peter Cock. All rights reserved.
# Revisions 2008-2009 copyright by Cymon J. Cox. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
#
# Note that BioSQL (including the database schema and scripts) is
# available and licensed separately. Please consult www.biosql.org
"""Implementations of Biopython-like Seq objects on top of BioSQL.
This allows retrival of items stored in a BioSQL database using
a biopython-like SeqRecord and Seq interface.
Note: Currently we do not support recording per-letter-annotations
(like quality scores) in BioSQL.
"""
from Bio._py3k import unicode
from Bio import Alphabet
from Bio.Seq import Seq, UnknownSeq
from Bio.SeqRecord import SeqRecord, _RestrictedDict
from Bio import SeqFeature
class DBSeq(Seq):
"""BioSQL equivalent of the Biopython Seq object."""
def __init__(self, primary_id, adaptor, alphabet, start, length):
"""Create a new DBSeq object referring to a BioSQL entry.
You wouldn't normally create a DBSeq object yourself, this is done
for you when retreiving a DBSeqRecord object from the database.
"""
self.primary_id = primary_id
self.adaptor = adaptor
self.alphabet = alphabet
self._length = length
self.start = start
def __len__(self):
return self._length
def __getitem__(self, index): # Seq API requirement
# Note since Python 2.0, __getslice__ is deprecated
# and __getitem__ is used instead.
# See http://docs.python.org/ref/sequence-methods.html
if isinstance(index, int):
# Return a single letter as a string
i = index
if i < 0:
if -i > self._length:
raise IndexError(i)
i = i + self._length
elif i >= self._length:
raise IndexError(i)
return self.adaptor.get_subseq_as_string(self.primary_id,
self.start + i,
self.start + i + 1)
if not isinstance(index, slice):
raise ValueError("Unexpected index type")
# Return the (sub)sequence as another DBSeq or Seq object
# (see the Seq obect's __getitem__ method)
if index.start is None:
i = 0
else:
i = index.start
if i < 0:
# Map to equavilent positive index
if -i > self._length:
raise IndexError(i)
i = i + self._length
elif i >= self._length:
# Trivial case, should return empty string!
i = self._length
if index.stop is None:
j = self._length
else:
j = index.stop
if j < 0:
# Map to equavilent positive index
if -j > self._length:
raise IndexError(j)
j = j + self._length
elif j >= self._length:
j = self._length
if i >= j:
# Trivial case, empty string.
return Seq("", self.alphabet)
elif index.step is None or index.step == 1:
# Easy case - can return a DBSeq with the start and end adjusted
return self.__class__(self.primary_id, self.adaptor, self.alphabet,
self.start + i, j - i)
else:
# Tricky. Will have to create a Seq object because of the stride
full = self.adaptor.get_subseq_as_string(self.primary_id,
self.start + i,
self.start + j)
return Seq(full[::index.step], self.alphabet)
def tostring(self):
"""Returns the full sequence as a python string (DEPRECATED).
You are now encouraged to use str(my_seq) instead of
my_seq.tostring()."""
import warnings
warnings.warn("This method is obsolete; please use str(my_seq) "
"instead of my_seq.tostring().",
PendingDeprecationWarning)
return self.adaptor.get_subseq_as_string(self.primary_id,
self.start,
self.start + self._length)
def __str__(self):
"""Returns the full sequence as a python string."""
return self.adaptor.get_subseq_as_string(self.primary_id,
self.start,
self.start + self._length)
data = property(tostring, doc="Sequence as string (DEPRECATED)")
def toseq(self):
"""Returns the full sequence as a Seq object."""
# Note - the method name copies that of the MutableSeq object
return Seq(str(self), self.alphabet)
def __add__(self, other):
# Let the Seq object deal with the alphabet issues etc
return self.toseq() + other
def __radd__(self, other):
# Let the Seq object deal with the alphabet issues etc
return other + self.toseq()
def _retrieve_seq(adaptor, primary_id):
# The database schema ensures there will be only one matching
# row in the table.
# If an UnknownSeq was recorded, seq will be NULL,
# but length will be populated. This means length(seq)
# will return None.
seqs = adaptor.execute_and_fetchall(
"SELECT alphabet, length, length(seq) FROM biosequence"
" WHERE bioentry_id = %s", (primary_id,))
if not seqs:
return
assert len(seqs) == 1
moltype, given_length, length = seqs[0]
try:
length = int(length)
given_length = int(length)
assert length == given_length
have_seq = True
except TypeError:
assert length is None
seqs = adaptor.execute_and_fetchall(
"SELECT alphabet, length, seq FROM biosequence"
" WHERE bioentry_id = %s", (primary_id,))
assert len(seqs) == 1
moltype, given_length, seq = seqs[0]
assert seq is None or seq == ""
length = int(given_length)
have_seq = False
del seq
del given_length
moltype = moltype.lower() # might be upper case in database
# We have no way of knowing if these sequences will use IUPAC
# alphabets, and we certainly can't assume they are unambiguous!
if moltype == "dna":
alphabet = Alphabet.generic_dna
elif moltype == "rna":
alphabet = Alphabet.generic_rna
elif moltype == "protein":
alphabet = Alphabet.generic_protein
elif moltype == "unknown":
# This is used in BioSQL/Loader.py and would happen
# for any generic or nucleotide alphabets.
alphabet = Alphabet.single_letter_alphabet
else:
raise AssertionError("Unknown moltype: %s" % moltype)
if have_seq:
return DBSeq(primary_id, adaptor, alphabet, 0, int(length))
else:
return UnknownSeq(length, alphabet)
def _retrieve_dbxrefs(adaptor, primary_id):
"""Retrieve the database cross references for the sequence."""
_dbxrefs = []
dbxrefs = adaptor.execute_and_fetchall(
"SELECT dbname, accession, version"
" FROM bioentry_dbxref join dbxref using (dbxref_id)"
" WHERE bioentry_id = %s"
" ORDER BY rank", (primary_id,))
for dbname, accession, version in dbxrefs:
if version and version != "0":
v = "%s.%s" % (accession, version)
else:
v = accession
_dbxrefs.append("%s:%s" % (dbname, v))
return _dbxrefs
def _retrieve_features(adaptor, primary_id):
sql = "SELECT seqfeature_id, type.name, rank" \
" FROM seqfeature join term type on (type_term_id = type.term_id)" \
" WHERE bioentry_id = %s" \
" ORDER BY rank"
results = adaptor.execute_and_fetchall(sql, (primary_id,))
seq_feature_list = []
for seqfeature_id, seqfeature_type, seqfeature_rank in results:
# Get qualifiers [except for db_xref which is stored separately]
qvs = adaptor.execute_and_fetchall(
"SELECT name, value"
" FROM seqfeature_qualifier_value join term using (term_id)"
" WHERE seqfeature_id = %s"
" ORDER BY rank", (seqfeature_id,))
qualifiers = {}
for qv_name, qv_value in qvs:
qualifiers.setdefault(qv_name, []).append(qv_value)
# Get db_xrefs [special case of qualifiers]
qvs = adaptor.execute_and_fetchall(
"SELECT dbxref.dbname, dbxref.accession"
" FROM dbxref join seqfeature_dbxref using (dbxref_id)"
" WHERE seqfeature_dbxref.seqfeature_id = %s"
" ORDER BY rank", (seqfeature_id,))
for qv_name, qv_value in qvs:
value = "%s:%s" % (qv_name, qv_value)
qualifiers.setdefault("db_xref", []).append(value)
# Get locations
results = adaptor.execute_and_fetchall(
"SELECT location_id, start_pos, end_pos, strand"
" FROM location"
" WHERE seqfeature_id = %s"
" ORDER BY rank", (seqfeature_id,))
locations = []
# convert to Python standard form
# Convert strand = 0 to strand = None
# re: comment in Loader.py:
# Biopython uses None when we don't know strand information but
# BioSQL requires something (non null) and sets this as zero
# So we'll use the strand or 0 if Biopython spits out None
for location_id, start, end, strand in results:
if start:
start -= 1
if strand == 0:
strand = None
if strand not in (+1, -1, None):
raise ValueError("Invalid strand %s found in database for "
"seqfeature_id %s" % (strand, seqfeature_id))
if end < start:
import warnings
from Bio import BiopythonWarning
warnings.warn("Inverted location start/end (%i and %i) for "
"seqfeature_id %s" % (start, end, seqfeature_id),
BiopythonWarning)
locations.append((location_id, start, end, strand))
# Get possible remote reference information
remote_results = adaptor.execute_and_fetchall(
"SELECT location_id, dbname, accession, version"
" FROM location join dbxref using (dbxref_id)"
" WHERE seqfeature_id = %s", (seqfeature_id,))
lookup = {}
for location_id, dbname, accession, version in remote_results:
if version and version != "0":
v = "%s.%s" % (accession, version)
else:
v = accession
# subfeature remote location db_ref are stored as a empty string when
# not present
if dbname == "":
dbname = None
lookup[location_id] = (dbname, v)
feature = SeqFeature.SeqFeature(type=seqfeature_type)
# Store the key as a private property
feature._seqfeature_id = seqfeature_id
feature.qualifiers = qualifiers
if len(locations) == 0:
pass
elif len(locations) == 1:
location_id, start, end, strand = locations[0]
# See Bug 2677, we currently don't record the location_operator
# For consistency with older versions Biopython, default to "".
feature.location_operator = \
_retrieve_location_qualifier_value(adaptor, location_id)
dbname, version = lookup.get(location_id, (None, None))
feature.location = SeqFeature.FeatureLocation(start, end)
feature.strand = strand
feature.ref_db = dbname
feature.ref = version
else:
sub_features = feature.sub_features
assert sub_features == []
for location in locations:
location_id, start, end, strand = location
dbname, version = lookup.get(location_id, (None, None))
subfeature = SeqFeature.SeqFeature()
subfeature.type = seqfeature_type
subfeature.location = SeqFeature.FeatureLocation(start, end)
# subfeature.location_operator = \
# _retrieve_location_qualifier_value(adaptor, location_id)
subfeature.strand = strand
subfeature.ref_db = dbname
subfeature.ref = version
sub_features.append(subfeature)
# Locations are in order, but because of remote locations for
# sub-features they are not necessarily in numerical order:
strands = set(sf.strand for sf in sub_features)
if len(strands) == 1 and -1 in strands:
# Evil hack time for backwards compatibility
# TODO - Check if BioPerl and (old) Biopython did the same,
# we may have an existing incompatibility lurking here...
locs = [f.location for f in sub_features[::-1]]
else:
# All forward, or mixed strands
locs = [f.location for f in sub_features]
feature.location = SeqFeature.CompoundLocation(
locs, seqfeature_type)
# TODO - See Bug 2677 - we don't yet record location_operator,
# so for consistency with older versions of Biopython default
# to assuming its a join.
feature.location_operator = "join"
seq_feature_list.append(feature)
return seq_feature_list
def _retrieve_location_qualifier_value(adaptor, location_id):
value = adaptor.execute_and_fetch_col0(
"SELECT value FROM location_qualifier_value"
" WHERE location_id = %s", (location_id,))
try:
return value[0]
except IndexError:
return ""
def _retrieve_annotations(adaptor, primary_id, taxon_id):
annotations = {}
annotations.update(_retrieve_qualifier_value(adaptor, primary_id))
annotations.update(_retrieve_reference(adaptor, primary_id))
annotations.update(_retrieve_taxon(adaptor, primary_id, taxon_id))
annotations.update(_retrieve_comment(adaptor, primary_id))
# Convert values into strings in cases of unicode from the database.
# BioSQL could eventually be expanded to be unicode aware.
str_anns = {}
for key, val in annotations.items():
if isinstance(val, list):
val = [_make_unicode_into_string(x) for x in val]
elif isinstance(val, unicode):
val = str(val)
str_anns[key] = val
return str_anns
def _make_unicode_into_string(text):
if isinstance(text, unicode):
return str(text)
else:
return text
def _retrieve_qualifier_value(adaptor, primary_id):
qvs = adaptor.execute_and_fetchall(
"SELECT name, value"
" FROM bioentry_qualifier_value JOIN term USING (term_id)"
" WHERE bioentry_id = %s"
" ORDER BY rank", (primary_id,))
qualifiers = {}
for name, value in qvs:
if name == "keyword":
name = "keywords"
# See handling of "date" in Loader.py
elif name == "date_changed":
name = "date"
elif name == "secondary_accession":
name = "accessions"
qualifiers.setdefault(name, []).append(value)
return qualifiers
def _retrieve_reference(adaptor, primary_id):
# XXX dbxref_qualifier_value
refs = adaptor.execute_and_fetchall(
"SELECT start_pos, end_pos, "
" location, title, authors,"
" dbname, accession"
" FROM bioentry_reference"
" JOIN reference USING (reference_id)"
" LEFT JOIN dbxref USING (dbxref_id)"
" WHERE bioentry_id = %s"
" ORDER BY rank", (primary_id,))
references = []
for start, end, location, title, authors, dbname, accession in refs:
reference = SeqFeature.Reference()
# If the start/end are missing, reference.location is an empty list
if (start is not None) or (end is not None):
if start is not None:
start -= 1 # python counting
reference.location = [SeqFeature.FeatureLocation(start, end)]
# Don't replace the default "" with None.
if authors:
reference.authors = authors
if title:
reference.title = title
reference.journal = location
if dbname == 'PUBMED':
reference.pubmed_id = accession
elif dbname == 'MEDLINE':
reference.medline_id = accession
references.append(reference)
if references:
return {'references': references}
else:
return {}
def _retrieve_taxon(adaptor, primary_id, taxon_id):
a = {}
common_names = adaptor.execute_and_fetch_col0(
"SELECT name FROM taxon_name WHERE taxon_id = %s"
" AND name_class = 'genbank common name'", (taxon_id,))
if common_names:
a['source'] = common_names[0]
scientific_names = adaptor.execute_and_fetch_col0(
"SELECT name FROM taxon_name WHERE taxon_id = %s"
" AND name_class = 'scientific name'", (taxon_id,))
if scientific_names:
a['organism'] = scientific_names[0]
ncbi_taxids = adaptor.execute_and_fetch_col0(
"SELECT ncbi_taxon_id FROM taxon WHERE taxon_id = %s", (taxon_id,))
if ncbi_taxids and ncbi_taxids[0] and ncbi_taxids[0] != "0":
a['ncbi_taxid'] = ncbi_taxids[0]
# Old code used the left/right values in the taxon table to get the
# taxonomy lineage in one SQL command. This was actually very slow,
# and would fail if the (optional) left/right values were missing.
#
# The following code is based on a contribution from Eric Gibert, and
# relies on the taxon table's parent_taxon_id field only (ignoring the
# optional left/right values). This means that it has to make a
# separate SQL query for each entry in the lineage, but it does still
# appear to be *much* faster. See Bug 2494.
taxonomy = []
while taxon_id:
name, rank, parent_taxon_id = adaptor.execute_one(
"SELECT taxon_name.name, taxon.node_rank, taxon.parent_taxon_id"
" FROM taxon, taxon_name"
" WHERE taxon.taxon_id=taxon_name.taxon_id"
" AND taxon_name.name_class='scientific name'"
" AND taxon.taxon_id = %s", (taxon_id,))
if taxon_id == parent_taxon_id:
# If the taxon table has been populated by the BioSQL script
# load_ncbi_taxonomy.pl this is how top parent nodes are stored.
# Personally, I would have used a NULL parent_taxon_id here.
break
if rank != "no rank":
# For consistency with older versions of Biopython, we are only
# interested in taxonomy entries with a stated rank.
# Add this to the start of the lineage list.
taxonomy.insert(0, name)
taxon_id = parent_taxon_id
if taxonomy:
a['taxonomy'] = taxonomy
return a
def _retrieve_comment(adaptor, primary_id):
qvs = adaptor.execute_and_fetchall(
"SELECT comment_text FROM comment"
" WHERE bioentry_id=%s"
" ORDER BY rank", (primary_id,))
comments = [comm[0] for comm in qvs]
# Don't want to add an empty list...
if comments:
return {"comment": comments}
else:
return {}
class DBSeqRecord(SeqRecord):
"""BioSQL equivalent of the Biopython SeqRecord object."""
def __init__(self, adaptor, primary_id):
self._adaptor = adaptor
self._primary_id = primary_id
(self._biodatabase_id, self._taxon_id, self.name,
accession, version, self._identifier,
self._division, self.description) = self._adaptor.execute_one(
"SELECT biodatabase_id, taxon_id, name, accession, version,"
" identifier, division, description"
" FROM bioentry"
" WHERE bioentry_id = %s", (self._primary_id,))
if version and version != "0":
self.id = "%s.%s" % (accession, version)
else:
self.id = accession
# We don't yet record any per-letter-annotations in the
# BioSQL database, but we should set this property up
# for completeness (and the __str__ method).
try:
length = len(self.seq)
except:
# Could be no sequence in the database!
length = 0
self._per_letter_annotations = _RestrictedDict(length=length)
def __get_seq(self):
if not hasattr(self, "_seq"):
self._seq = _retrieve_seq(self._adaptor, self._primary_id)
return self._seq
def __set_seq(self, seq):
self._seq = seq
def __del_seq(self):
del self._seq
seq = property(__get_seq, __set_seq, __del_seq, "Seq object")
def __get_dbxrefs(self):
if not hasattr(self, "_dbxrefs"):
self._dbxrefs = _retrieve_dbxrefs(self._adaptor, self._primary_id)
return self._dbxrefs
def __set_dbxrefs(self, dbxrefs):
self._dbxrefs = dbxrefs
def __del_dbxrefs(self):
del self._dbxrefs
dbxrefs = property(__get_dbxrefs, __set_dbxrefs, __del_dbxrefs,
"Database cross references")
def __get_features(self):
if not hasattr(self, "_features"):
self._features = _retrieve_features(self._adaptor,
self._primary_id)
return self._features
def __set_features(self, features):
self._features = features
def __del_features(self):
del self._features
features = property(__get_features, __set_features, __del_features,
"Features")
def __get_annotations(self):
if not hasattr(self, "_annotations"):
self._annotations = _retrieve_annotations(self._adaptor,
self._primary_id,
self._taxon_id)
if self._identifier:
self._annotations["gi"] = self._identifier
if self._division:
self._annotations["data_file_division"] = self._division
return self._annotations
def __set_annotations(self, annotations):
self._annotations = annotations
def __del_annotations(self):
del self._annotations
annotations = property(__get_annotations, __set_annotations,
__del_annotations, "Annotations")
|
updownlife/multipleK
|
dependencies/biopython-1.65/build/lib.linux-x86_64-2.7/BioSQL/BioSeq.py
|
Python
|
gpl-2.0
| 22,906
|
from sedot import SEDOT_BASE
import os
import time
import rfc822
class NoStatusError(Exception):
def __init__(self, package, timestamp=None):
self.value = package
self.package = package
self.timestamp = timestamp
def __str__(self):
print repr(self.value)
class SyncStatus:
def __init__(self, package, timestamp):
self.package = package
self.timestamp = timestamp
self.data = {}
self.success = False
self.done = True
self.code = -1
self.time = None
self.start = None
self.finish = None
self.status = -1
self._load()
def _load(self):
global SEDOT_BASE
self.dir = os.path.join(SEDOT_BASE, "log", "sync", self.package, self.timestamp)
if not os.path.isdir(self.dir):
raise NoStatusError(self.package, self.timestamp)
try:
f = open(os.path.join(self.dir, "status.txt"))
except IOError:
raise NoStatusError(self.package, self.timestamp)
for line in f.readlines():
line = line.strip()
pos = line.find(" ")
key = line[:pos]
val = line[pos+1:]
self.data[key] = val
if key == "status":
if val == "200":
self.success = True
elif val == "100":
self.done = False
self.status = int(val)
if key in 'time':
self.time = self._parse_time(val)
if key == 'start':
self.start = self._parse_time(val)
if key == 'finish':
self.finish = self._parse_time(val)
if key == 'code':
self.code = int(val)
# Backward compatibility
if not self.time and self.finish:
self.time = self.finish
def _parse_time(self, txt):
return rfc822.parsedate(txt)
class PackageStatus:
def __init__(self, package):
self.package = package
self.success = None
self.last = None
self._load()
def _load(self):
global SEDOT_BASE
self.dir = os.path.join(SEDOT_BASE, "log", "sync", self.package)
if not os.path.isdir(self.dir):
raise NoStatusError(self.package)
# TODO: check for "current" symlink first.
dirs = os.listdir(self.dir)
dirs.sort(reverse=True)
for timestamp in dirs:
# Skip the "current" symbolic link
if os.path.islink(os.path.join(self.dir, timestamp)):
continue
try:
status = SyncStatus(self.package, timestamp)
except NoStatusError:
continue
if status.status == 301: # Unable to gain lock
continue
if self.last == None:
self.last = status
if status.success:
self.success = status
break
|
fajran/sedot
|
lib/python/sedot/status.py
|
Python
|
gpl-2.0
| 2,386
|
import sys
import traceback
import logging
from virttest import openvswitch
from virttest import versionable_class
from autotest.client.shared import error
from autotest.client.shared import utils
@error.context_aware
def run_load_module(test, params, env):
"""
Run basic test of OpenVSwitch driver.
"""
_e = None
ovs = None
try:
try:
error.context("Remove all bridge from OpenVSwitch.")
ovs = versionable_class.factory(openvswitch.OpenVSwitchSystem)(test.tmpdir)
ovs.init_system()
ovs.check()
for br in ovs.list_br():
ovs.del_br(br)
ovs.clean()
for _ in range(int(params.get("mod_loaditer", 100))):
utils.run("modprobe openvswitch")
utils.run("rmmod openvswitch")
except Exception:
_e = sys.exc_info()
raise
finally:
try:
if ovs:
if ovs.cleanup:
ovs.clean()
except Exception:
e = sys.exc_info()
if _e is None:
raise
else:
logging.error("Cleaning function raised exception too: \n" +
"".join(traceback.format_exception(e[0],
e[1],
e[2])))
raise _e[0], _e[1], _e[2]
|
spiceqa/virt-test
|
openvswitch/tests/load_module.py
|
Python
|
gpl-2.0
| 1,478
|
# distbuild/proxy_event_source.py -- proxy for temporary event sources
#
# Copyright (C) 2012, 2014-2015 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
import errno
import logging
import socket
import distbuild
class ProxyEventSource(object):
'''Proxy event sources that may come and go.'''
def __init__(self):
self.event_source = None
def get_select_params(self):
if self.event_source:
return self.event_source.get_select_params()
else:
return [], [], [], None
def get_events(self, r, w, x):
if self.event_source:
return self.event_source.get_events(r, w, x)
else:
return []
def is_finished(self):
return False
|
perryl/morph
|
distbuild/proxy_event_source.py
|
Python
|
gpl-2.0
| 1,324
|
## This file is part of Invenio.
## Copyright (C) 2011, 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import time
import invenio.authorlist_config as cfg
from invenio.dbquery import run_sql
def now():
"""
Returns a unix epoch time stamp as integer.
"""
return int(time.time())
def clone(paper_id, user_id):
"""
Clones a whole paper data having the given id and returns the paper
information of the clone as a dictionary. If the paper_id was a falsy value
(None usually) or the id of the paper to be cloned does not exist in the
database. The function will create a new empty record, save it and return it
instead.
"""
data = {}
clone_id = clone_paper(paper_id, user_id)
if (clone_id == 0):
data = load(None)
clone_id = save(None, user_id, data)
else:
clone_references(paper_id, clone_id)
clone_affiliations(paper_id, clone_id)
clone_authors(paper_id, clone_id)
load_paper(clone_id, data)
data[cfg.JSON.PAPER_ID] = clone_id
return data
def clone_paper(paper_id, user_id):
"""
Clones the general paper information - i.e. title, collaboration and
experiment number. Furthermore, the last modified timestamp will be set
to the current time. All of this is only done, if the requested paper id
was found in the database, otherwise 0 is returned. This function
should NOT be called alone as long as you are really sure that you want
to do this. Refer to clone() instead.
"""
return run_sql("""INSERT INTO aulPAPERS (id, id_user, title, collaboration,
experiment_number, last_modified) SELECT %s, id_user, title,
collaboration, experiment_number, %s FROM aulPAPERS
WHERE id = %s;""", (None, now(), paper_id,))
def clone_references(paper_id, clone_id):
"""
Clones the references of the paper with the given id and assigns the new
clone id instead. Returns the clone id again for convenience reasons. The
function should NOT be used alone as long as you are really sure that you
want to do this. Have a look on clone() instead.
"""
run_sql("""INSERT INTO aulREFERENCES (item, reference, paper_id)
SELECT item, reference, %s FROM aulREFERENCES
WHERE paper_id = %s;""", (clone_id, paper_id,))
return clone_id
def clone_affiliations(paper_id, clone_id):
"""
Clones the affiliations of the given paper id and assigns the clone id
instead. Returns the clone id for convenience reasons. The function should
NOT be used alone as long as you are really sure that you want to do this.
Have a look on clone() instead.
"""
run_sql("""INSERT INTO aulAFFILIATIONS (item, acronym, umbrella,
name_and_address, domain, member, spires_id, paper_id)
SELECT item, acronym, umbrella, name_and_address,
domain, member, spires_id, %s FROM aulAFFILIATIONS
WHERE paper_id = %s;""", (clone_id, paper_id,))
return clone_id
def clone_authors(paper_id, clone_id):
"""
Clones the authors of the paper with the passed id and assigns the new clone
id instead. It also invokes the cloning of the affiliations of the authors.
The clone id will be returned again for convenience reasons. The function
should NOT be used alone as long as you are really sure that you want to do
this. Have a look on clone() instead.
"""
run_sql("""INSERT INTO aulAUTHORS (item, family_name, given_name,
name_on_paper, status, paper_id)
SELECT item, family_name, given_name, name_on_paper,
status, %s FROM aulAUTHORS
WHERE paper_id = %s;""", (clone_id, paper_id,))
clone_author_affiliations(paper_id, clone_id)
clone_author_identifiers(paper_id, clone_id)
return clone_id
def clone_author_affiliations(paper_id, clone_id):
"""
Clones the affiliations of the authors of the paper with the given id and
assigns the new clone id to them. Returns the clone_id again for convenience
reasons. Should NOT be used alone but only as part of clone() as long as you
are not really sure what you are doing.
"""
run_sql("""INSERT INTO aulAUTHOR_AFFILIATIONS (item, affiliation_acronym,
affiliation_status, author_item, paper_id)
SELECT item, affiliation_acronym, affiliation_status,
author_item, %s FROM aulAUTHOR_AFFILIATIONS
WHERE paper_id = %s;""", (clone_id, paper_id,))
return clone_id
def clone_author_identifiers(paper_id, clone_id):
"""
Clones the identifiers of the authors of the paper with the given id and
assigns the new clone id to them. Returns the clone_id again for convenience
reasons. Should NOT be used alone but only as part of clone() as long as you
are not really sure what you are doing.
"""
run_sql("""INSERT INTO aulAUTHOR_IDENTIFIERS (item, identifier_number,
identifier_name, author_item, paper_id)
SELECT item, identifier_number, identifier_name,
author_item, %s FROM aulAUTHOR_IDENTIFIERS
WHERE paper_id = %s;""", (clone_id, paper_id,))
return clone_id
def delete(paper_id):
"""
Deletes the paper with the given id completely from the database. There is
no backup copy so better we sure that you want to do this :). Returns the
id of the deleted paper again for convenience reasons.
"""
data = {cfg.JSON.PAPER_ID : paper_id}
delete_paper(paper_id)
delete_references(paper_id)
delete_affiliations(paper_id)
delete_authors(paper_id)
delete_author_affiliations(paper_id)
delete_author_identifiers(paper_id)
return data
def delete_paper(paper_id):
"""
Deletes the general informations of a paper without making any backup copy
and safety net for the paper with the given id. Should NOT be used alone
unless you are sure that you want to do this. Refer to delete() instead.
Returns the paper_id for convenience reasons.
"""
run_sql("""DELETE FROM aulPAPERS WHERE id = %s;""", (paper_id,))
return paper_id
def delete_references(paper_id):
"""
Deletes the paper references from the database with the given id. It does
not create any backup copy. Should NOT be used unless you are really sure
that you want to do this. Refer to delete() instead. Returns the paper_id
for convenience reasons.
"""
run_sql("""DELETE FROM aulREFERENCES WHERE paper_id = %s;""", (paper_id,))
return paper_id
def delete_affiliations(paper_id):
"""
Deletes the affiliations of the paper with the given paper id completely
from the database. There is no safety net or backup copy. Should NOT be used
alone unless you are sure that you want to do this. Refer to delete()
instead. Returns the paper id for convenience reasons.
"""
run_sql("""DELETE FROM aulAFFILIATIONS WHERE paper_id = %s;""", (paper_id,))
return paper_id
def delete_authors(paper_id):
"""
Deletes the authors of the paper with the given id completely from the
database. There is no backup copy or safety net, make sure you want to do
this. This function should NOT be used alone unless you know what you are
doing. Refer to delete() instead. Returns the paper id for convenience
reasons.
"""
run_sql("""DELETE FROM aulAUTHORS WHERE paper_id = %s;""", (paper_id,))
return paper_id
def delete_author_affiliations(paper_id):
"""
Deletes the affiliations of each author that is part of the paper of the
passed id. There is no backup copy or safety net, so make sure you want to
call this function. This function should NOT be called alone unless you know
what you are doing. Refer to delete() instead. Returns the paper id for
convenience reasons.
"""
run_sql("""DELETE FROM aulAUTHOR_AFFILIATIONS
WHERE paper_id = %s;""", (paper_id,))
return paper_id
def delete_author_identifiers(paper_id):
"""
Deletes the identifiers of each author that is part of the paper of the
passed id. There is no backup copy or safety net, so make sure you want to
call this function. This function should NOT be called alone unless you know
what you are doing. Refer to delete() instead. Returns the paper id for
convenience reasons.
"""
run_sql("""DELETE FROM aulAUTHOR_IDENTIFIERS
WHERE paper_id = %s;""", (paper_id,))
return paper_id
def itemize(id_user):
"""
Returns the general information of all papers ordered descending by the last
modification date. Each items is represented by a dictionary having the keys
as can be found in the authorlist_config.
"""
data = {}
papers = run_sql("""SELECT id, title, collaboration, experiment_number,
last_modified FROM aulPAPERS WHERE id_user = %s
ORDER BY last_modified DESC;""" % (id_user))
out_papers = data.setdefault('data', [])
for paper in papers:
paper_id, title, collaboration, experiment_number, last_modified = paper
out_papers.append({cfg.JSON.PAPER_ID : paper_id,
cfg.JSON.PAPER_TITLE : title,
cfg.JSON.COLLABORATION : collaboration,
cfg.JSON.EXPERIMENT_NUMBER : experiment_number,
cfg.JSON.LAST_MODIFIED : last_modified})
return data
def load(paper_id):
"""
Loads all data of a paper data set with the given paper id. If the paper id
is a falsy value or is not yet in the database the function will create a
basic empty paper object and return it including the requested id (a falsy
value will just be reused without any modification). The returned object is
a dictionary using the standard keys as defined in authorlist_config.
"""
data = {}
load_id = load_paper(paper_id, data)
data[cfg.JSON.PAPER_ID] = load_id
load_references(paper_id, data)
load_affiliations(paper_id, data)
load_authors(paper_id, data)
return data
def load_paper(paper_id, data):
"""
Loads only the general paper information of the given id and adds them to
the passed data dictionary. Should NOT be used alone as long as you are not
sure what you are doing. Refer to load() instead. Returns the paper id for
convenience reasons.
"""
paper = run_sql("""SELECT title, collaboration, experiment_number,
last_modified FROM aulPAPERS
WHERE id = %s;""", (paper_id,))
if (not paper):
# TODO add message here
data[cfg.JSON.PAPER_TITLE] = ''
data[cfg.JSON.COLLABORATION] = ''
data[cfg.JSON.EXPERIMENT_NUMBER] = ''
data[cfg.JSON.LAST_MODIFIED] = now()
return paper_id
title, collaboration, experiment_number, last_modified = paper[ 0 ]
data[cfg.JSON.PAPER_TITLE] = title
data[cfg.JSON.COLLABORATION] = collaboration
data[cfg.JSON.EXPERIMENT_NUMBER] = experiment_number
data[cfg.JSON.LAST_MODIFIED] = last_modified
return paper_id
def load_references(paper_id, data):
"""
Lodas only the reference information of the paper with the given id and adds
them to the passed data dictionary. Should NOT be used alone as long as you
are not sure what you are doing. Refer to load() instead. Returns the passed
id for convenience reasons.
"""
references = run_sql("""SELECT reference FROM aulREFERENCES
WHERE paper_id = %s;""", (paper_id,))
reference_ids = [reference[0] for reference in references]
data[cfg.JSON.REFERENCE_IDS] = reference_ids
return paper_id
def load_affiliations(paper_id, data):
"""
Loads only the affiliations information of the paper with the given id and
adds them to the passed data dictionary. Should NOT be used alone as long as
you do not know what you are doing. Refer to load() instead. Returns the
passed id for convenience reasons.
"""
result = run_sql("""SELECT item, acronym, umbrella, name_and_address, domain,
member, spires_id FROM aulAFFILIATIONS
WHERE paper_id = %s ORDER BY item;""", (paper_id,))
affiliations = data.setdefault(cfg.JSON.AFFILIATIONS_KEY, [])
for affiliation in result:
item, acronym, umbrella, name, domain, member, spires_id = affiliation
affiliations.append([item + 1, '', acronym, umbrella, name,
domain, bool(member), spires_id])
return data
def load_authors(paper_id, data):
"""
Loads the authors information of the paper with the passed id and adds them
to the passed data dicitionary. This function will automatically also load
all affiliations of the respective author. Should NOT be used alone as long
as you do not know what you are doing. Refer to load() instead. Returns the
passed id for convenience reasons.
"""
result = run_sql("""SELECT item, family_name, given_name, name_on_paper,
status FROM aulAUTHORS
WHERE paper_id = %s ORDER BY item;""", (paper_id,))
authors = data.setdefault(cfg.JSON.AUTHORS_KEY, [])
for author in result:
item, family_name, given_name, paper_name, status = author
author_affiliations = load_author_affiliations(paper_id, item)
author_identifiers = load_author_identifiers(paper_id, item)
authors.append([item + 1, '', family_name, given_name, paper_name,
status, author_affiliations, author_identifiers])
return data
def load_author_affiliations(paper_id, author_id):
"""
Loads the affiliations of the author with the passed id that is part of the
author lists of the paper with the given id. Should NOT be used alone as
long as you are not sure what you are doing. Refer to load() instead. In
this case the paper id is NOT returned but the author affiliations.
"""
result = run_sql("""SELECT affiliation_acronym, affiliation_status
FROM aulAUTHOR_AFFILIATIONS WHERE author_item = %s
AND paper_id = %s ORDER BY item;""", (author_id, paper_id,))
author_affiliations = []
for author_affiliation in result:
acronym, status = author_affiliation
author_affiliations.append([acronym, status])
return author_affiliations
def load_author_identifiers(paper_id, author_id):
"""
Loads the identifiers of the author with the passed id that is part of the
author lists of the paper with the given id. Should NOT be used alone as
long as you are not sure what you are doing. Refer to load() instead. In
this case the paper id is NOT returned but the author affiliations.
"""
result = run_sql("""SELECT identifier_number, identifier_name
FROM aulAUTHOR_IDENTIFIERS WHERE author_item = %s
AND paper_id = %s ORDER BY item;""", (author_id, paper_id,))
author_identifiers = []
for author_identifier in result:
number, name = author_identifier
author_identifiers.append([number, name])
return author_identifiers
def save(paper_id, user_id, in_data):
"""
Saves the passed data dictionary using the standard authorlist_config keys
in the database using the passed paper_id. If the id is falsy or not yet in
the database a new data set is created, otherwise the old data set will be
overwritten. Returns a dictionary the holds the id of the saved data set.
"""
out_data = {}
new_paper_id = save_paper(paper_id, user_id, in_data)
if (paper_id is None):
paper_id = new_paper_id
out_data[cfg.JSON.PAPER_ID] = paper_id
save_references(paper_id, in_data)
save_affliations(paper_id, in_data)
save_authors(paper_id, in_data)
return out_data
def save_paper(paper_id, user_id, data):
"""
Saves the general paper information from the passed data dictionary using
the standard authorlist_config keys of the paper with the given id. Updates
the last modified timestamp. Should NOT be used alone as long as you are not
sure what you are doing. Refer to save() instead. Returns the paper if of
the dataset.
"""
if (not paper_id):
paper_id = None
timestamp = now()
paper_title = data.get(cfg.JSON.PAPER_TITLE)
if not paper_title:
paper_title = 'Untitled paper'
data_tuple = ( # insert values
paper_id,
user_id,
paper_title,
data[cfg.JSON.COLLABORATION],
data[cfg.JSON.EXPERIMENT_NUMBER],
timestamp,
# update values
paper_title,
data[cfg.JSON.COLLABORATION],
data[cfg.JSON.EXPERIMENT_NUMBER],
timestamp)
return run_sql("""INSERT INTO aulPAPERS (id, id_user, title, collaboration,
experiment_number, last_modified)
VALUES (%s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
title = %s,
collaboration = %s,
experiment_number = %s,
last_modified = %s;""", data_tuple)
def save_references(paper_id, data):
"""
Saves the references of the passed data dictionary using the standard
authorlist_config keys of the paper data set with the given id. Should NOT
be used alone as long as you are not sure what you are doing. Refer to
save() instead. Returns the paper id.
"""
reference_ids = data[cfg.JSON.REFERENCE_IDS]
# Insert or update old references
for index, reference in enumerate(reference_ids):
data_tuple = (# insert values
index,
reference,
paper_id,
# update values
reference)
run_sql("""INSERT INTO
aulREFERENCES (item, reference, paper_id)
VALUES (%s, %s, %s)
ON DUPLICATE KEY UPDATE
reference = %s;""", data_tuple)
# Delete old references that are out of bounds - i.e. have a higher index
# than the length of the reference list
run_sql("""DELETE FROM aulREFERENCES WHERE item >= %s AND paper_id = %s;""",
(len(reference_ids), paper_id))
return paper_id
def save_affliations(paper_id, data):
"""
Saves the affiliations of the passed data dictionary using the standard
authorlist_config keys to the data set of the paper with the given id.
Should NOT be used alone as long as you are not sure what you are doing.
Refer to save() instead. Returns the paper_id for convenience reasons.
"""
affiliations = data[cfg.JSON.AFFILIATIONS_KEY]
for index, affiliation in enumerate(affiliations):
data_tuple = (# insert values
index,
affiliation[cfg.JSON.ACRONYM],
affiliation[cfg.JSON.UMBRELLA],
affiliation[cfg.JSON.NAME],
affiliation[cfg.JSON.DOMAIN],
affiliation[cfg.JSON.MEMBER],
affiliation[cfg.JSON.INSPIRE_ID],
paper_id,
# update values
affiliation[cfg.JSON.ACRONYM],
affiliation[cfg.JSON.UMBRELLA],
affiliation[cfg.JSON.NAME],
affiliation[cfg.JSON.DOMAIN],
affiliation[cfg.JSON.MEMBER],
affiliation[cfg.JSON.INSPIRE_ID])
run_sql("""INSERT INTO
aulAFFILIATIONS (item, acronym, umbrella, name_and_address,
domain, member, spires_id, paper_id)
VALUES(%s, %s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
acronym = %s,
umbrella = %s,
name_and_address = %s,
domain = %s,
member = %s,
spires_id = %s;""", data_tuple)
# Delete old affiliations that are out of bounds - i.e. have a higher index
# than the length of the affiliations list
run_sql("""DELETE FROM aulAFFILIATIONS WHERE item >= %s AND paper_id = %s;""",
(len(affiliations), paper_id))
return paper_id
def save_authors(paper_id, data):
"""
Saves the authors of the passed data dictionary using the standard
authorlist_config keys in the database of the paper with the given id.
Should NOT be used alone as long as you do not know what you are doing.
Refer to save() instead. Returns the paper_id.
"""
authors = data[cfg.JSON.AUTHORS_KEY]
for index, author in enumerate(authors):
data_tuple = (# insert values
index,
author[cfg.JSON.FAMILY_NAME],
author[cfg.JSON.GIVEN_NAME],
author[cfg.JSON.PAPER_NAME],
author[cfg.JSON.STATUS],
paper_id,
# update values
author[cfg.JSON.FAMILY_NAME],
author[cfg.JSON.GIVEN_NAME],
author[cfg.JSON.PAPER_NAME],
author[cfg.JSON.STATUS])
run_sql("""INSERT INTO
aulAUTHORS (item, family_name, given_name, name_on_paper,
status, paper_id)
VALUES(%s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
family_name = %s,
given_name = %s,
name_on_paper = %s,
status = %s;""", data_tuple)
save_author_affiliations(paper_id, index, len(authors),
author[cfg.JSON.AFFILIATIONS])
save_author_identifiers(paper_id, index, len(authors),
author[cfg.JSON.IDENTIFIERS])
# Delete old authors that are out of bounds - i.e. have a higher index
# than the length of the affiliations list
run_sql("""DELETE FROM aulAUTHORS WHERE item >= %s AND paper_id = %s;""",
(len(authors), paper_id))
return paper_id
def save_author_affiliations(paper_id, author_id, number_of_authors, data):
"""
Saves the affiliations of the passed author using the data dictionary and
the standard authorlist_config keys and the paper id. Deletes also all old
entries that are 'out of bounds' facilitating the number_of_authors
paramter. Should NOT be used alone as long as you do not exactly know what
you are doing. Refer to save() instead. Returns the paper id.
"""
for index, affiliation in enumerate(data):
data_tuple = (# insert values
index,
affiliation[cfg.JSON.AFFILIATION_ACRONYM],
affiliation[cfg.JSON.AFFILIATION_STATUS],
author_id,
paper_id,
# update values
affiliation[cfg.JSON.AFFILIATION_ACRONYM],
affiliation[cfg.JSON.AFFILIATION_STATUS])
run_sql("""INSERT INTO
aulAUTHOR_AFFILIATIONS (item, affiliation_acronym,
affiliation_status, author_item,
paper_id)
VALUES(%s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
affiliation_acronym = %s,
affiliation_status = %s;""", data_tuple)
# Delete entries that the author does not have anymore
run_sql("""DELETE FROM aulAUTHOR_AFFILIATIONS WHERE item >= %s
AND author_item = %s AND paper_id = %s;""",
(len(data), author_id, paper_id))
# Delete entries of non existing author
run_sql("""DELETE FROM aulAUTHOR_AFFILIATIONS WHERE author_item >= %s
AND paper_id = %s;""",
(number_of_authors, paper_id))
return paper_id
def save_author_identifiers(paper_id, author_id, number_of_authors, data):
"""
Saves the identifiers of the passed author using the data dictionary and
the standard authorlist_config keys and the paper id. Deletes also all old
entries that are 'out of bounds' facilitating the number_of_authors
paramter. Should NOT be used alone as long as you do not exactly know what
you are doing. Refer to save() instead. Returns the paper id.
"""
for index, identifier in enumerate(data):
data_tuple = (# insert values
index,
identifier[cfg.JSON.IDENTIFIER_NUMBER],
identifier[cfg.JSON.IDENTIFIER_NAME],
author_id,
paper_id,
# update values
identifier[cfg.JSON.IDENTIFIER_NUMBER],
identifier[cfg.JSON.IDENTIFIER_NAME])
run_sql("""INSERT INTO
aulAUTHOR_IDENTIFIERS (item, identifier_number,
identifier_name, author_item,
paper_id)
VALUES(%s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
identifier_number = %s,
identifier_name = %s;""", data_tuple)
# Delete entries that the author does not have anymore
run_sql("""DELETE FROM aulAUTHOR_IDENTIFIERS WHERE item >= %s
AND author_item = %s AND paper_id = %s;""",
(len(data), author_id, paper_id))
# Delete entries of non existing author
run_sql("""DELETE FROM aulAUTHOR_IDENTIFIERS WHERE author_item >= %s
AND paper_id = %s;""",
(number_of_authors, paper_id))
return paper_id
def get_owner(paper_id):
"""Returns the id_user of a paper"""
result = run_sql("SELECT id_user FROM aulPAPERS WHERE id = %s;" % \
(paper_id))[0][0]
if result:
return result
return None
|
Panos512/invenio
|
modules/webauthorlist/lib/authorlist_dblayer.py
|
Python
|
gpl-2.0
| 27,103
|
import os.path
from PyQt5.QtWidgets import QMessageBox
from PyQt5.QtGui import QIcon
from enki.core.core import core
class Plugin:
def __init__(self):
self._action = core.actionManager().addAction('mHelp/aVimTutor',
'Vim mode tutorial',
QIcon(":/enkiicons/vim.png"))
self._action.triggered.connect(self._onTriggered)
def terminate(self):
core.actionManager().removeAction(self._action)
def _onTriggered(self):
path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'vimtutor.md'))
try:
with open(path) as vimtutor_file:
text = vimtutor_file.read()
except Exception as ex:
QMessageBox.warning(core.mainWindow(),
"Failed to read vimtutor text",
str(ex))
return
document = core.workspace().createEmptyNotSavedDocument()
document.qutepart.text = text
document.qutepart.detectSyntax(language='Markdown')
|
hlamer/enki
|
enki/plugins/vimtutor/__init__.py
|
Python
|
gpl-2.0
| 1,123
|
#!/usr/bin/python
import sys, os
from tools import benchmark, ipc
def main() :
if len(sys.argv) < 2 :
print >> sys.stderr, "No folder was specified!"
sys.exit(1)
if not os.path.exists( sys.argv[1] ) :
print >> sys.stderr, sys.argv[1], "not a valid path!"
sys.exit(1)
if not os.path.isdir( sys.argv[1] ) :
print >> sys.stderr, sys.argv[1], "not a directory!"
sys.exit(1)
inputs = ipc.extract_inputs( sys.argv[1] )
command = './compute.py %(domain_pddl)s %(instance_pddl)s'
logname = '%(domain_name)s-%(instance_name)s-heuristics.log'
results = []
for domain_name, instance_name, domain_pddl, instance_pddl in inputs :
log = benchmark.Log( logname%locals() )
rv, time = benchmark.run( command%locals(), 1800, 2048, log )
results.append( [ str(rv), str(time) ] )
if rv != 0 :
results[-1] += ipc.parse_report() + ['?']
else :
results[-1] += ipc.parse_report( )
with open( '%s.heuristics.csv'%inputs[0][0], 'w' ) as outstream :
for res in results :
res = [ str(field) for field in res ]
print >> outstream, ",".join(res)
if __name__ == '__main__' :
main()
|
miquelramirez/lwaptk-v2
|
examples/fodet/heuristics/compute-batch.py
|
Python
|
gpl-3.0
| 1,113
|
# -*- test-case-name: mamba.test.test_decorators -*-
# Copyright (c) 2012 Oscar Campos <oscar.campos@member.fsf.org>
# See LICENSE for more details
"""
.. module:: decorators
:platform: Unix, Windows
:synopsys: Decorators
.. moduleauthor:: Oscar Campos <oscar.campos@member.fsf.org>
"""
import cPickle
import functools
from collections import OrderedDict
def cache(size=16):
"""
Cache the results of the function if the same positional arguments
are provided.
We only store the size provided (if any) in MB, after that we should
perform FIFO queries until the size of the cache is lower than
the provided one.
If the size is 0 then an unlimited cache is provided
.. admonition:: Notice
The memory size of the int_cache is just an approximation
"""
int_cache = OrderedDict()
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if args in int_cache:
return int_cache.get(args)
result = func(*args, **kwargs)
int_cache.update({args: result})
if size != 0:
while len(cPickle.dumps(int_cache)) >= size * 1024 * 1024:
int_cache.popitem(False)
return result
return wrapper
return decorator
def unlimited_cache(func):
"""
Just a wrapper over cache decorator to alias :meth:`@cache(size=0)`
"""
@functools.wraps(func)
@cache(size=0)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
|
PyMamba/mamba-framework
|
mamba/core/decorators.py
|
Python
|
gpl-3.0
| 1,575
|
__author__ = 'zak'
import json
import uuid
import ast
import pprint
import datetime
from di_utils import *
from clatoolkit.models import LearningRecord,SocialRelationship
from xapi.statement.builder import socialmedia_builder, pretty_print_json
from xapi.statement.xapi_settings import xapi_settings
from xapi.oauth_consumer.operative import LRS_Auth
#from dataintegration.core.utils import check_ifnotinlocallrs
def insert_share(user, post_id, share_id, comment_message, comment_created_time, unit, platform, platform_url, tags=(),
parent_user=None, parent_external_user=None):
if check_ifnotinlocallrs(unit, platform, share_id):
# Setup statement builder parameters and lrs using default lrs. TODO: Institutions required in xapi maybe?
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
_parent_user = parent_user if parent_user else parent_external_user
statement_id = get_uuid4()
# #lrs.xapi = the "transaction" uuid
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=xapi_settings.VERB_SHARED, platform=platform, user=user,
# platformid=share_id, platformparentid=post_id, parent_user=parent_user,
# parent_user_external=parent_external_user, message=comment_message,
# datetimestamp=comment_created_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=xapi_settings.VERB_SHARED,
platform=platform, user=user, platformid=share_id, platformparentid=post_id,
parent_user=parent_user, datetimestamp=comment_created_time)
lrs.save()
#Send xapi to lrs or cache for later
stm = socialmedia_builder(statement_id=statement_id, verb=xapi_settings.VERB_SHARED, platform=platform,
account_name=account_name, account_homepage=platform_url, object_type=xapi_settings.OBJECT_NOTE,
object_id=share_id, message=comment_message, parent_id=post_id, parent_object_type=xapi_settings.OBJECT_NOTE,
timestamp=comment_created_time, unit=unit, tags=tags )
jsn = stm.to_json()
#Transfer xapi to lrs TODO: Handle caching for failed sends
# print 'Sending xapi..'
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
# print 'Tried to send xapi to lrs: status %s, response: %s' % (status,content)
sr = SocialRelationship(verb=xapi_settings.VERB_SHARED, from_user=user, to_user=parent_user,
to_external_user=parent_external_user, platform=platform, message=comment_message,
datetimestamp=comment_created_time, unit=unit, platformid=share_id)
sr.save()
def insert_post(user, post_id, message, created_time, unit, platform, platform_url, tags=()):
# verb = 'created'
verb = xapi_settings.VERB_CREATED
#TODO: update for lrs connection as it happens
if check_ifnotinlocallrs(unit, platform, post_id, user, verb):
#Setup statment builder with param and build lrs using defualt rs
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
statement_id = get_uuid4()
# #lrs.xapi = the "transaction" uuid
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform, user=user,
# platformid=post_id, message=message, datetimestamp=created_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform,
user=user, platformid=post_id, datetimestamp=created_time)
lrs.save()
#Transfer xapi to lrs of cache for later
stm = socialmedia_builder(statement_id=statement_id, verb=verb, platform=platform, account_name=account_name,
account_homepage=platform_url, object_type=xapi_settings.OBJECT_NOTE,
object_id=post_id, message=message, timestamp=created_time, unit=unit, tags=tags)
jsn = stm.to_json()
# print 'sending xapi... '
# print jsn
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
# print 'in insert_post(): Response status/code from LRS: %s/%s' % (status, content)
for tag in tags:
if tag[0] == "@":
# If the user exists, use their user object else reference them as an external user
if username_exists(tag[1:], unit, platform):
to_user = get_user_from_screen_name(tag[1:], platform)
external_user = None
else:
to_user = None
external_user = tag[1:]
sr = SocialRelationship(verb=xapi_settings.VERB_MENTIONED, from_user=user, to_user=to_user,
to_external_user=external_user, platform=platform, message=message,
datetimestamp=created_time, unit=unit, platformid=post_id)
sr.save()
def insert_like(user, object_id, message, unit, platform, platform_url, parent_id, parent_object_type, created_time=None,
parent_user=None, parent_user_external=None):
verb = xapi_settings.VERB_LIKED
if check_ifnotinlocallrs(unit, platform, object_id, user, verb):
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
statement_id = get_uuid4()
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform, user=user,
# platformid=object_id, message=message, platformparentid=object_id, parent_user=parent_user,
# parent_user_external=parent_user_external, datetimestamp=created_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform, user=user,
platformid=object_id, platformparentid=object_id, parent_user=parent_user,
datetimestamp=created_time)
lrs.save()
sr = SocialRelationship(verb=verb, from_user=user, to_user=parent_user,
to_external_user=parent_user_external, platform=platform, message=message,
datetimestamp=created_time, unit=unit, platformid=object_id)
sr.save()
# Send xAPI to LRS
stm = socialmedia_builder(statement_id=statement_id, verb=verb, platform=platform,
account_name=account_name, account_homepage=platform_url,
object_type=xapi_settings.OBJECT_NOTE, object_id=object_id, message=message,
parent_id=parent_id, parent_object_type=parent_object_type,
timestamp=created_time, unit=unit)
jsn = stm.to_json()
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
def insert_comment(user, post_id, comment_id, comment_message, comment_created_time, unit, platform, platform_url,
parent_user=None, parent_user_external=None, other_contexts = []):
if check_ifnotinlocallrs(unit, platform, comment_id):
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
statement_id = get_uuid4()
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=xapi_settings.VERB_COMMENTED, platform=platform,
# user=user, platformid=comment_id, platformparentid=post_id, parent_user=parent_user,
# parent_user_external=parent_user_external, message=comment_message,
# datetimestamp=comment_created_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=xapi_settings.VERB_COMMENTED, platform=platform,
user=user, platformid=comment_id, platformparentid=post_id, parent_user=parent_user,
datetimestamp=comment_created_time)
lrs.save()
sr = SocialRelationship(verb=xapi_settings.VERB_COMMENTED, from_user=user, to_user=parent_user,
to_external_user=parent_user_external, platform=platform, message=comment_message,
datetimestamp=comment_created_time, unit=unit, platformid=comment_id)
sr.save()
stm = socialmedia_builder(statement_id=statement_id, verb=xapi_settings.VERB_COMMENTED, platform=platform,
account_name=account_name, account_homepage=platform_url, object_type=xapi_settings.OBJECT_NOTE,
object_id=comment_id, message=comment_message, parent_id=post_id, parent_object_type=xapi_settings.OBJECT_NOTE,
timestamp=comment_created_time, unit=unit, other_contexts = other_contexts)
jsn = stm.to_json()
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
# Diigo is the only one that's supposed to call this method, but caller is commented out...
# Is this method not needed anymore??
def insert_bookmark(usr_dict, post_id,message,from_name,from_uid, created_time, course_code, platform, platform_url, tags=[]):
if check_ifnotinlocallrs(course_code, platform, post_id):
stm = socialmedia_builder(verb='created', platform=platform, account_name=from_uid, account_homepage=platform_url, object_type='Bookmark', object_id=post_id, message=message, timestamp=created_time, account_email=usr_dict['email'], user_name=from_name, course_code=course_code, tags=tags)
jsn = ast.literal_eval(stm.to_json())
stm_json = pretty_print_json(jsn)
lrs = LearningRecord(xapi=stm_json, course_code=course_code, verb='created', platform=platform, username=get_username_fromsmid(from_uid, platform), platformid=post_id, message=message, datetimestamp=created_time)
lrs.save()
def insert_commit(user, parent_id, commit_id, message, committed_time, unit, platform, platform_url,
tags=[], other_contexts = []):
if check_ifnotinlocallrs(unit, platform, commit_id):
verb = xapi_settings.VERB_CREATED
object_type = xapi_settings.OBJECT_COLLECTION
parent_obj_type = xapi_settings.OBJECT_COLLECTION
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
statement_id = get_uuid4()
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform, user=user,
# platformid=commit_id, platformparentid=parent_id, message=message, datetimestamp=committed_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform, user=user,
platformid=commit_id, platformparentid=parent_id, datetimestamp=committed_time)
lrs.save()
stm = socialmedia_builder(statement_id=statement_id, verb=verb, platform=platform,
account_name=account_name, account_homepage=platform_url, object_type=object_type,
object_id=commit_id, message=message, parent_id=parent_id, parent_object_type=parent_obj_type,
timestamp=committed_time, unit=unit, tags=tags, other_contexts = other_contexts)
jsn = stm.to_json()
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
def insert_file(user, parent_id, file_id, message, committed_time, unit, platform, platform_url, verb,
tags=[], other_contexts = []):
if check_ifnotinlocallrs(unit, platform, file_id):
obj = xapi_settings.OBJECT_FILE
parent_obj_type = xapi_settings.OBJECT_COLLECTION
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
statement_id = get_uuid4()
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform,
# user=user, platformid=file_id, platformparentid=parent_id,
# message=message, datetimestamp=committed_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform,
user=user, platformid=file_id, platformparentid=parent_id, datetimestamp=committed_time)
lrs.save()
stm = socialmedia_builder(statement_id=statement_id, verb=verb, platform=platform,
account_name=account_name, account_homepage=platform_url, object_type=obj,
object_id=file_id, message=message, parent_id=parent_id, parent_object_type=parent_obj_type,
timestamp=committed_time, unit=unit, tags=tags, other_contexts = other_contexts)
jsn = stm.to_json()
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
# def insert_file(user, commit_id, file_id, message, committed_time, unit, platform, verb):
# if check_ifnotinlocallrs(unit, platform, file_id):
# lrs = LearningRecord(xapi=None, unit=unit, verb=verb, platform=platform, user=user, platformid=file_id,
# platformparentid=commit_id, message=message, datetimestamp=committed_time)
# lrs.save()
def insert_issue(user, issue_id, verb, object_type, parent_object_type, message, from_name, from_uid, created_time,
unit, parent_id, platform, platform_id, account_homepage, shared_displayname=None, tags=[], other_contexts = []):
if check_ifnotinlocallrs(unit, platform, platform_id):
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
statement_id = get_uuid4()
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform, user=user,
# platformid=platform_id, platformparentid=parent_id,
# message=message, datetimestamp=created_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform, user=user,
platformid=platform_id, platformparentid=parent_id, datetimestamp=created_time)
lrs.save()
stm = socialmedia_builder(statement_id=statement_id, verb=verb, platform=platform,
account_name=from_uid, account_homepage=account_homepage, object_type=object_type,
object_id=issue_id, message=message, parent_id=parent_id, parent_object_type=parent_object_type,
timestamp=created_time, unit=unit, tags=tags, other_contexts = other_contexts)
jsn = stm.to_json()
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
"""
for tag in tags:
if tag[0]=="@":
socialrelationship = SocialRelationship(
verb = "mentioned", fromusername=get_username_fromsmid(from_uid,platform),
tousername=get_username_fromsmid(tag[1:],platform),
platform=platform, message=message, datetimestamp=created_time,
course_code=course_code, platformid=platform_id)
socialrelationship.save()
"""
def insert_task(user, task_id, task_name, task_created_time, unit, platform, platform_url, parent_id=None, other_contexts = []):
if check_ifnotinlocallrs(unit, platform, task_id):
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
statement_id = get_uuid4()
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=xapi_settings.VERB_CREATED, platform=platform,
# user=user, platformid=task_id, message=task_name, datetimestamp=task_created_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=xapi_settings.VERB_CREATED, platform=platform,
user=user, platformid=task_id, datetimestamp=task_created_time)
lrs.save()
stm = socialmedia_builder(statement_id=statement_id, verb=xapi_settings.VERB_CREATED, platform=platform,
account_name=account_name, account_homepage=platform_url, object_type=xapi_settings.OBJECT_TASK,
object_id=task_id, message=task_name, parent_id=parent_id, timestamp=task_created_time, unit=unit,
other_contexts = other_contexts)
jsn = stm.to_json()
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
#maybe we can capture commenting behaviours between user/card somehow?
def insert_added_object(user, target_id, object_id, object_text, obj_created_time, unit, platform, platform_url,
obj_type, parent_user_external = None, other_contexts = []):
if check_ifnotinlocallrs(unit, platform, object_id):
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
statement_id = get_uuid4()
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=xapi_settings.VERB_ADDED,
# platform=platform, user=user, platformid=object_id, platformparentid=target_id,
# message=object_text, datetimestamp=obj_created_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=xapi_settings.VERB_ADDED,
platform=platform, user=user, platformid=object_id, platformparentid=target_id,
datetimestamp=obj_created_time)
lrs.save()
stm = socialmedia_builder(statement_id=statement_id, verb=xapi_settings.VERB_ADDED, platform=platform,
account_name=account_name, account_homepage=platform_url,
object_type=obj_type, object_id=object_id, message=object_text, parent_id=target_id,
parent_object_type=xapi_settings.OBJECT_TASK, timestamp=obj_created_time,
unit=unit, other_contexts = other_contexts)
jsn = stm.to_json()
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
def insert_updated_object(user, object_id, object_message, obj_update_time, unit, platform, platform_url,
obj_type, parent_id=None, obj_parent_type=None, other_contexts = []):
if check_ifnotinlocallrs(unit, platform, object_id):
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
statement_id = get_uuid4()
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=xapi_settings.VERB_UPDATED,
# platform=platform, user=user, platformid=object_id, platformparentid=parent_id,
# message=object_message, datetimestamp=obj_update_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=xapi_settings.VERB_UPDATED,
platform=platform, user=user, platformid=object_id, platformparentid=parent_id,
datetimestamp=obj_update_time)
lrs.save()
stm = socialmedia_builder(statement_id=statement_id, verb=xapi_settings.VERB_UPDATED, platform=platform,
account_name=account_name, account_homepage=platform_url,
object_type=obj_type, object_id=object_id, message=object_message, parent_id=parent_id,
parent_object_type=obj_parent_type, timestamp=obj_update_time,
unit=unit, other_contexts = other_contexts)
jsn = stm.to_json()
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
def insert_closedopen_object(user, object_id, object_message, obj_update_time, unit, platform, platform_url,
obj_type, verb, parent_id=None, obj_parent_type=None, other_contexts = [], platform_id = None):
check_id = object_id
if platform_id:
check_id = platform_id
if check_ifnotinlocallrs(unit, platform, check_id):
lrs_client = LRS_Auth(provider_id = unit.get_lrs_id())
account_name = user.userprofile.get_username_for_platform(platform)
statement_id = get_uuid4()
# lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb,
# platform=platform, user=user, platformid=object_id, platformparentid=parent_id,
# message=object_message, datetimestamp=obj_update_time)
lrs = LearningRecord(statement_id=statement_id, unit=unit, verb=verb, platform=platform,
user=user, platformid=object_id, platformparentid=parent_id,
datetimestamp=obj_update_time)
lrs.save()
stm = socialmedia_builder(statement_id=statement_id, verb=verb, platform=platform,
account_name=account_name, account_homepage=platform_url,
object_type=obj_type, object_id=object_id, message=object_message, parent_id=parent_id,
parent_object_type=obj_parent_type, timestamp=obj_update_time,
unit=unit, other_contexts = other_contexts)
jsn = stm.to_json()
status,content = lrs_client.transfer_statement(user.id, statement=jsn)
|
uts-cic/CLAtoolkit
|
clatoolkit_project/dataintegration/core/importer.py
|
Python
|
gpl-3.0
| 21,943
|
# $HeadURL$
__RCSID__ = "$Id$"
""" SystemLoggingDBCleaner erases records whose messageTime column
contains a time older than 'RemoveDate' days, where 'RemoveDate'
is an entry in the Configuration Service section of the agent.
"""
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC import S_OK
from DIRAC.ConfigurationSystem.Client.PathFinder import getDatabaseSection
from DIRAC.FrameworkSystem.DB.SystemLoggingDB import SystemLoggingDB
from DIRAC.Core.Utilities import dateTime, toString, day
class SystemLoggingDBCleaner(AgentModule):
def initialize(self):
self.SystemLoggingDB = SystemLoggingDB()
self.period = int( self.am_getOption( "RemoveDate", '30' ) ) * day
return S_OK()
def execute(self):
""" The main agent execution method
"""
limitDate = toString( dateTime() - self.period )
limitDate = limitDate[:limitDate.find('.')]
commonString = 'FROM MessageRepository WHERE messageTime <'
cmd = "SELECT count(*) %s '%s'" % ( commonString, limitDate )
result = self.SystemLoggingDB._query( cmd )
if not result['OK']:
return result
recordsToErase=result['Value'][0][0]
if recordsToErase == 0:
self.log.info('No records to erase')
return S_OK('No records to erase')
else:
cmd = "DELETE LOW_PRIORITY %s '%s'" % ( commonString, limitDate )
result = self.SystemLoggingDB._update( cmd )
if not result['OK']:
self.log.error( 'Could not erase the requested records',
'those older than %s' % limitDate )
return result
else:
self.log.info('%s records have been erased' % recordsToErase )
return result
|
marcelovilaca/DIRAC
|
FrameworkSystem/Agent/SystemLoggingDBCleaner.py
|
Python
|
gpl-3.0
| 1,703
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Thu May 2 15:49:03 2013
# by: The Resource Compiler for PyQt (Qt v5.0.2)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x36\xe2\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\
\x95\x2b\x0e\x1b\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd9\x03\x03\
\x0e\x1c\x24\x7c\x1a\xa6\xff\x00\x00\x20\x00\x49\x44\x41\x54\x78\
\xda\xed\x9d\x77\x98\x9d\x65\x9d\xf7\x3f\xf7\x53\x4e\x9f\x99\x33\
\x33\x99\x4c\x66\x12\x32\xa9\xa4\x03\x91\x90\x10\x6a\xe8\x48\xd9\
\x05\xa5\x88\xc8\xba\xeb\xcb\xb2\xae\x04\x51\x57\x5f\x57\x5d\x5d\
\x76\x5d\x45\x51\x56\x50\x71\x59\x75\xa5\x59\xc0\x12\x3a\x16\x88\
\x94\x00\x09\x29\xa4\x4d\xea\xb4\x4c\xa6\xcf\x9c\x39\xbd\x3d\xf5\
\x7e\xff\x38\xf3\x3c\x99\x41\x94\x34\x7d\xdf\xeb\xbd\xb8\xaf\x2b\
\x57\x32\xc9\xc9\x79\xca\xf7\xfe\xb5\xef\xaf\xdc\xf0\xee\x7a\x77\
\xbd\xbb\xde\x5d\xef\xae\x77\xd7\xbb\xeb\xdd\xf5\xee\x7a\x77\xfd\
\x7f\xbf\xc4\xb1\xfc\xe7\xba\xba\xba\xab\x0c\xc3\x08\x4e\x99\x32\
\xe5\x73\x2b\x57\xae\x3c\x51\x51\x14\xb6\x6d\xdb\x46\x6d\x6d\xed\
\xa1\x0b\xb8\x02\x09\x20\xe4\x1f\x5c\x4d\x55\xd5\x94\xa2\x28\xf9\
\x72\xb9\xac\xbf\xfa\xea\xab\xb3\xfe\x6f\xbf\x8c\xda\xda\xda\x9f\
\x5d\x77\xdd\x75\x57\x97\xcb\x65\x17\x40\x41\xe0\x48\x89\xf8\x23\
\x6f\xc9\x71\x1c\x02\x81\x00\xaf\xbf\xfe\xba\xb3\x60\xc1\x82\xc2\
\xc0\xc0\x40\x6c\xcb\x96\x2d\xd3\x2d\xcb\x4a\x1e\xed\x3d\x68\xc7\
\xf2\x00\x8b\x16\x2d\xfa\xe1\xcc\x99\x33\xeb\x37\x6f\xde\x4c\xa9\
\x54\xc2\xb6\x6d\xea\xeb\xeb\x69\x68\x68\x40\x08\x41\xbf\x95\x62\
\xda\x05\x45\xaa\xa2\x3a\x4e\x3a\x88\x93\xd1\x71\x73\x3a\x76\x4a\
\x43\x16\x35\x80\xb0\x94\xf2\x0f\xbe\x77\xda\xb4\x69\xa2\xb7\xb7\
\x57\xfe\x25\x40\xa8\xaa\xaa\xba\x60\xc5\x8a\x15\x6b\x74\x5d\x37\
\x1d\xc7\xa9\xe9\xef\xed\xd3\x6d\x0b\x84\x22\x31\x30\x09\xc5\x04\
\xb2\xa0\x4d\xd8\x4c\x42\x08\xf6\xee\xdd\xcb\xd2\xa5\x4b\xbd\x8d\
\x85\xa6\x69\xb1\x58\x2c\x86\xeb\xba\xa7\x85\x42\xa1\x6c\x6d\x6d\
\xed\xae\x81\x81\x81\xec\x9f\x1d\x90\x45\x8b\x16\xb1\x77\xef\xde\
\x90\xe3\x38\x28\x8a\xe2\x94\x4a\x25\x5c\xd7\xc5\x34\x4d\x6c\xdb\
\xc6\xb2\x2c\xca\xe5\x32\x42\x08\x8a\xdd\x09\xba\xba\xc3\x48\x69\
\x22\x30\x91\xaa\x8b\x40\x80\xa8\x3c\xf0\x38\x19\x95\x67\x9c\x71\
\x86\x04\xc8\x66\xb3\xb4\xb6\xb6\x8a\xbf\x94\x54\x94\xcb\xe5\xe6\
\xc9\x93\x27\x57\x9b\xa6\x89\x82\xe0\x60\xb8\x87\x85\x17\x2a\xd8\
\xc3\x41\xec\x44\x10\x67\x34\x88\x8c\x0a\xa4\xa9\x20\x2d\xa5\xf2\
\xd2\x34\x0d\x55\x55\x31\x4d\x13\x45\x51\x70\x5d\x17\xc3\x30\x30\
\x4d\x53\x5e\x75\xd5\x55\xbf\x11\x42\xf0\xda\x6b\xaf\x5d\x00\xfc\
\xfe\xcf\x0e\x48\x47\x47\xc7\x9c\xd3\x4e\x3b\xad\xad\xb6\xb6\x96\
\x42\xa1\x80\x69\x9a\x13\x00\xf1\x6e\x4e\x51\x14\xa6\x4c\x6d\x22\
\x1c\x0e\xa3\x08\x41\xab\x71\x80\xe5\x1f\x82\x40\x40\xc1\x1e\xd5\
\xb1\x7a\xa3\x98\x83\x01\x9c\xe1\x20\x42\xa9\x28\x05\x29\x25\xc1\
\x60\xf0\x2f\xab\xb3\x85\xf0\x37\x92\x86\x8a\x3b\x3f\x43\x20\x5e\
\x43\x20\x5e\x82\x13\x4b\x13\x3e\x3b\xf4\xe8\x24\xdc\xa2\x8a\xeb\
\xba\x48\x29\x31\x0c\x03\x21\x04\x52\x4a\x2c\xcb\xc2\x75\x5d\x61\
\x18\x06\x9a\xa6\x01\xb8\x7f\x11\x95\x55\x2e\x97\x1b\x1a\x1a\x1a\
\x08\x85\x42\xe4\xf3\x79\x0c\xc3\xf0\x6f\xce\x71\x1c\x1c\xc7\xf1\
\x6f\x34\x10\x08\x54\xc0\x11\x82\x5e\x7b\x84\xf3\x62\x4d\xb8\x46\
\x90\x25\x4b\x4e\x61\xe7\xe4\x57\x89\x08\x89\xa2\xe8\xa4\x5f\xae\
\xa2\xb0\x27\x8c\x94\x12\xd7\x75\xff\xec\x20\x44\xa3\xd1\xb3\xae\
\xb9\xe6\x9a\x75\xa5\x52\xa9\xf2\x42\x5d\x29\x43\xf5\x42\xb8\x29\
\x9d\x19\xbb\x4e\x61\x74\x97\xac\x48\xf0\x5b\xe4\xb4\x6d\xef\x2e\
\x66\xcc\x98\xe1\x03\x32\x5e\x42\xca\xe5\xb2\xbf\x31\xa5\x94\x94\
\xcb\xe5\x8f\x57\x55\x55\x5d\x57\x55\x55\xf5\x40\x7f\x7f\xff\xa6\
\x3f\xab\x0d\xb1\x6d\x1b\xc3\x30\x7c\x69\xf0\x6e\xc4\x71\x1c\x7f\
\xb7\x09\x21\x50\x55\x15\xcb\xb2\x90\xae\x4b\x7d\x3a\xc4\xc1\xa7\
\x15\x4e\x08\x2e\x26\x57\x3f\x05\x3b\x31\x17\xac\x30\x49\x3a\x29\
\x0c\x98\x38\x79\xc7\x07\xe4\xec\xb3\xcf\xfe\x1f\x29\x65\x20\x99\
\x4c\x86\x7b\x7b\x7b\xff\x36\x9b\xcd\xe6\x8f\x27\x20\x52\xca\xb0\
\xaa\xaa\x15\x89\x96\x92\x5d\x93\x77\x8a\xf3\xaf\xac\xc3\x95\x36\
\x76\x56\x43\x64\xc3\xd8\x59\x0d\x27\xab\x51\xde\x1f\x41\x3a\xc2\
\x97\xa6\x72\xb9\x8c\xa2\x54\x54\x97\x07\x88\x27\x21\x8e\xe3\x54\
\x9e\x57\x4a\xb9\x6a\xd5\xaa\xab\x9b\x9b\x9b\x79\xf0\xc1\x07\xd7\
\x03\x7f\x1e\x40\xa6\x4d\x9b\xb6\xb0\x54\x2a\xcd\xb7\x6d\x1b\x29\
\x25\x8e\xe3\x4c\x50\x59\x8e\xe3\x00\xf8\x2a\x4b\xd3\x34\xff\x33\
\x0b\x1b\x66\x13\x4d\x55\x91\x24\x41\x77\x7f\x37\xf1\xf7\x14\xd0\
\xcc\x00\xd1\x44\x9c\x48\xbd\x86\xac\x11\x48\x43\x01\x57\x80\xe0\
\x23\x00\xe1\x70\x98\xdd\xbb\x77\x7f\x14\x38\x26\x40\x9a\x9a\x9a\
\x18\x18\x18\x18\x0f\x88\x34\x4d\x13\xd3\x34\x31\x5d\x9b\x58\xb5\
\x0e\xb6\xc2\xf2\xc9\xef\xa7\xaa\x25\xc4\x4b\x9d\xbf\x62\xfa\xbc\
\x26\x86\x8c\x2e\xf2\x1d\x2a\x4e\xa9\x02\x80\xf7\x9c\xe3\xed\x86\
\xf7\x67\x4f\x32\xc6\xde\x87\xc8\x66\xb3\x44\xa3\x51\x14\x45\x91\
\x7f\x36\x95\x55\x5b\x5b\xbb\x66\xd9\xb2\x65\xf3\xd2\xe9\xb4\x6f\
\x2f\x1c\xa7\xb2\xb3\x3d\x2f\x2b\x18\x0c\x32\x66\xf0\x09\x06\x83\
\xb8\xae\x8b\x65\x59\xbe\xb1\x57\x51\xd8\x17\xee\xe0\xc2\x05\xd5\
\x40\xf1\x2d\x0a\x5d\x92\x5e\x57\x43\xb9\x2d\x02\x80\x65\x59\xc7\
\x45\x22\x06\x06\x06\x68\x6a\x6a\x9a\x04\x2c\x01\x1c\xc3\x30\x4e\
\x4e\x24\x12\x64\xb3\x59\x6c\x5c\xec\x11\x9b\x9d\xcf\x17\xb8\xf2\
\x7d\xf3\x79\xfa\xd5\x35\xcc\x6b\xbe\x82\xf7\xd4\xaf\xe4\xbf\xb6\
\x7f\x8a\x6c\x2a\x87\x2e\xc3\xbe\x9b\xeb\x01\x02\x50\x28\x14\x70\
\x1c\x07\xd7\x75\x29\x95\x4a\x9e\xed\xc0\xf3\x1c\x3d\x4d\xf1\x67\
\x03\x44\x4a\xa9\x14\x0a\x05\x1f\x08\xdb\xb6\xc9\xe5\x72\xd8\xb6\
\x4d\x3e\x9f\xf7\x75\x6b\xa9\x54\x42\x51\x14\xc2\xe1\x30\xb6\x6d\
\x63\xdb\xf6\xa1\x87\x91\x02\xc2\x15\xf1\xff\xc8\xc2\xbb\x91\x8a\
\x4b\xda\xed\x64\xe3\xd6\x2e\x92\x55\xeb\x18\x1d\xfb\x9c\xb7\x23\
\x8f\xd7\x2a\x95\x4a\x37\x9f\x7f\xfe\xf9\x77\xaa\xaa\x8a\x40\x90\
\x30\x33\x4c\x3f\xbd\x1a\x77\x38\x8c\x33\xda\x82\x48\xc3\x8f\xbe\
\xf3\x53\x14\x55\xf0\x1a\x5b\x78\x55\x6e\x26\xa0\xcd\x27\x9f\x6c\
\x27\x1a\x55\x91\x52\x7a\xb6\xc1\x07\xa4\x58\x2c\xfa\xef\x22\x9b\
\xcd\x12\x8b\xc5\x7c\x30\xc6\x49\x4b\x63\x2c\x16\x9b\x95\xcf\xe7\
\x3b\xff\x1c\x80\xf8\x2f\xd8\xb2\x2c\x6c\xdb\xf6\x5f\x9e\xa6\x69\
\x3e\x10\xaa\xaa\xfa\xde\x4b\xb1\x58\xf4\x8d\xbd\x27\x21\x58\x92\
\xd1\x5e\x83\xa9\xef\x99\xcb\x75\x3f\x5a\xca\xcf\x3e\xb2\x81\xa6\
\x93\xd2\xac\xed\xc9\xb2\x27\xfb\x2a\xf9\x9c\xe9\xab\x3e\xa0\xce\
\xbb\xfe\xf4\xe9\xd3\x93\x07\x0f\x1e\x74\x8f\xd2\x6e\x44\xbd\x0d\
\x53\x74\x4d\xb4\x73\xfa\xa9\x9d\x1f\xc1\x92\x83\x48\x1b\x44\x36\
\x86\x9d\x51\x71\x52\x3a\xf9\x2d\xd5\xa0\x48\x1c\x69\xfb\xb6\xc2\
\x93\x90\x54\x2a\x85\xaa\xaa\x38\x8e\xe3\xc5\x1f\x08\x21\xd0\x34\
\x0d\x45\x51\xc8\xe5\x72\x04\x83\x41\x22\x91\x08\xa6\x69\xca\x93\
\x4f\x3e\xf9\x9b\xaa\xaa\xde\xf1\xca\x2b\xaf\x54\xfd\x39\x00\x91\
\x1e\x10\xde\x4e\x51\x14\x05\xdb\xb6\x99\x34\x69\x12\x37\xdd\x74\
\x13\xdf\xf9\xce\x77\xa8\xad\xad\x65\xfa\xf4\xe9\x74\x75\x75\x91\
\xcd\x66\x7d\x63\xed\x38\x0e\x96\x10\xa8\xdb\x5c\x8a\xf9\xa9\xdc\
\xf4\xab\x0f\x50\x1b\x38\x95\x9b\xd6\xdd\x88\xae\xa9\xa8\x76\x08\
\x4d\x6b\x24\x5e\xe3\xc7\x28\xf2\xca\x2b\xaf\xdc\xe7\xfd\xff\xf5\
\xeb\xd7\xcf\x07\xf6\x1d\xad\x94\x78\x2a\xc4\x74\x4d\xe2\xe1\x00\
\x85\x8c\xcb\x4d\x27\xdf\x81\xab\x16\x59\xb3\xf7\x5e\x26\x37\x34\
\x32\x9c\xef\x65\xf4\xd5\x30\x4a\x40\x4e\x50\xc9\xde\x86\x54\x55\
\xd5\x07\x24\x16\x8b\x21\x84\xf0\x37\xa1\xa2\x28\xfe\x35\x02\x81\
\x00\x96\x65\x09\x21\x04\xe1\x70\xb8\x70\xb8\xf7\xa8\x1c\xce\x87\
\xa6\x4c\x99\x32\x69\xca\x94\x29\x2f\xb9\xae\x3b\xcd\xb3\x05\xd9\
\x6c\x25\x08\xbd\xea\xaa\xab\x68\x6c\x6c\xe4\x57\xbf\xfa\x15\xcd\
\xcd\xcd\x00\xac\x5d\xbb\x96\xdb\x6f\xbf\x9d\x50\x28\x84\xa6\x69\
\xbe\x71\x37\x0c\x83\x72\xa9\x84\xd0\x04\xa1\x58\x90\xda\xda\x5a\
\xba\xd5\x21\x16\x7c\xbc\xcc\xac\x8f\x16\x69\xb9\x75\x94\xc9\xd7\
\x24\x89\x9f\x6e\x10\x69\x16\x04\x83\x01\x11\x0c\x06\x09\x06\x83\
\xd4\xd7\xd7\xe3\x38\xce\x31\xe9\x30\xc3\x30\xbc\x00\x0e\x45\x28\
\x9c\x14\x7e\x3f\xa1\x6a\x9b\xe7\xdf\x78\x9d\x45\xda\xf5\xac\x3e\
\xe3\x6e\x6a\x42\xf5\x94\xcb\xe5\xca\xbd\x96\xcb\x98\xa6\x49\x36\
\x9b\xf5\x37\x56\x20\x10\xf0\xa5\xe1\xb2\xcb\x2e\x23\x12\x89\x50\
\x5f\x5f\xcf\xdc\xb9\x73\x7d\x60\x34\x4d\xf3\xb5\x87\xe7\x7d\x1d\
\x57\x40\x92\xc9\x64\xed\x05\x17\x5c\x70\x6e\x28\x14\x8a\x58\x96\
\xe5\x5f\xb8\x5c\x2e\xf3\xe1\x0f\x7f\x18\xc3\x30\xb8\xff\xfe\xfb\
\x59\xbe\x7c\xb9\xaf\x3b\xbd\x1b\x5f\xb0\x60\x01\xb7\xdd\x76\xdb\
\x04\xe3\xee\xc5\x2d\xa6\x61\xe2\x04\x4c\x34\x4d\x25\x9b\x74\xb8\
\x6a\xe1\xc7\xa8\x9b\x1a\x23\xb2\x38\x4b\xed\x15\xc3\xe8\x0b\x52\
\x18\xa5\x31\x6f\xc8\x34\x8f\xd8\x40\x36\x34\x34\x5c\x1e\x8d\x46\
\x6f\x8a\xc5\x62\x37\x4a\x29\x97\x0c\x0d\x0d\x31\x38\x38\x48\x62\
\x68\x84\x81\x37\xb3\xf4\xed\x18\xe0\xb9\x35\x6b\xd9\xf1\xfa\x0e\
\xf6\x6d\xda\xc3\xfd\x0f\x7c\x97\x9d\x2f\x76\x91\x2f\x64\x7d\xe0\
\x3c\x0f\xca\xf3\xa2\xce\x3e\xfb\x6c\xaa\xab\xab\x91\x52\x92\x48\
\x24\x7c\xa3\x9e\x48\x24\x18\x93\x06\x5f\x75\x65\xb3\x59\x5f\x5d\
\x1f\x57\xa3\xee\xf9\xdf\xde\xc5\x15\x45\xe1\xfc\xf3\xcf\xa7\xb5\
\xb5\x95\xc7\x1f\x7f\x9c\xba\xba\x3a\x9e\x7a\xea\x29\x9e\x79\xe6\
\x19\x42\xa1\x10\x57\x5d\x75\x15\xa7\x9f\x7e\x3a\xa9\x54\x8a\xfb\
\xef\xbf\x9f\xbe\xbe\x3e\x0c\xc3\x20\x99\x4c\xfa\xd1\xb8\x69\x9a\
\x20\x41\x6a\x92\x72\xc9\xe1\xae\xf7\x3e\xc5\xa3\xad\xdf\xe0\xa6\
\xf9\x5f\x65\xed\x8e\x17\x50\x1b\xfa\x78\xcd\xde\x56\xb1\x23\x4a\
\x65\x67\x1e\xe9\x9a\x3c\x79\xf2\xb7\x97\x2d\x5b\x36\xcb\xb2\x2c\
\x54\xa1\x50\xac\x4e\xcb\x58\xb5\x26\xdc\xac\x8e\x9d\xd4\xe9\xdb\
\x36\x0c\x02\xea\xb5\x69\x8c\xa6\x73\x24\x64\x96\xa6\xc0\x0a\xf6\
\x8f\xbe\x44\x53\xa0\xc9\x7f\x91\x9e\x4d\xb4\x2c\x8b\x48\x24\xe2\
\x7b\x91\x1b\x37\x6e\x24\x16\x8b\x91\x48\x24\xa8\xae\xae\x46\x08\
\xc1\xb4\x69\xd3\x18\x1e\x1e\x26\x9f\xcf\xfb\x31\xdb\x91\x48\xb6\
\x72\xb8\x80\x78\x60\x14\x8b\x45\xa6\x4d\x9b\xc6\xbf\xfc\xcb\xbf\
\xa0\x69\x1a\xcf\x3d\xf7\x1c\x7d\x7d\x7d\xc4\x62\x31\x82\xc1\xa0\
\xef\xee\x6e\xdb\xb6\x0d\x4d\xd3\x26\xf8\xff\x9e\xcd\x39\x24\x21\
\x95\x28\x3f\xa0\x46\x98\xd6\x38\x85\x84\xd8\x85\xb4\x42\x48\x09\
\x8e\xb4\x70\x1d\x07\xc3\x34\x7c\xd1\xb7\xde\xe2\x07\x37\x35\x35\
\xbd\xd3\x7d\x3b\xe5\x72\x99\x72\xb9\xcc\xee\x6c\x17\x91\x73\x07\
\x45\xe8\xac\x7e\x42\x97\x75\x50\xf5\xe1\x7d\xd4\x7c\xf0\x00\xa1\
\x73\xfb\x11\x33\x93\x95\xeb\x58\x26\x96\x7b\x28\x9e\xf0\x24\xbd\
\xbe\xbe\x1e\x4d\xd3\xd0\x75\x9d\x97\x5e\x7a\xc9\xb7\x29\xba\xae\
\xfb\x52\x14\x0e\x87\x31\x0c\xc3\x07\xcf\xb3\x35\x63\x80\x34\xad\
\x5c\xb9\x52\xd6\xd7\xd7\xcf\x3f\x6e\x46\xdd\x8b\x3b\x4c\xd3\xa4\
\x54\x2a\x31\x30\x30\x40\x24\x12\x61\x68\x68\x08\xc7\x71\xe8\xef\
\xef\xf7\xe9\xe8\xd1\xd1\xd1\x0a\x97\x35\x65\x0a\x9f\xfd\xec\x67\
\x71\x1c\x87\x60\x30\x88\xaa\x1e\x72\x1f\xbd\x77\xeb\xb4\x97\xd8\
\xf0\xef\x2a\xa7\xfe\xdb\xa9\x00\x3c\xcf\x87\xde\xf6\xfa\xd9\x6c\
\x96\xf9\xf3\xe7\x77\x79\xee\xf0\xd0\xd0\xd0\xbf\x0d\x0c\x0c\xdc\
\xf1\x4e\x5e\xa1\x07\xa6\x29\x2d\x14\x14\xc8\x4c\xe6\x8e\xcb\xbf\
\x47\x7b\x7a\x33\xdf\x7b\xf9\xdf\x58\xbc\x64\x0e\xdd\x75\x9d\x64\
\x77\x69\xbe\x34\x18\x86\x41\x2a\x95\xf2\x8d\xf3\xf5\xd7\x5f\xcf\
\x9a\x35\x6b\xc8\x64\x32\xd4\xd5\x55\x9c\xbe\xd1\xd1\x51\xff\x1a\
\xf5\xf5\xf5\x98\xa6\x49\x38\x1c\x66\x68\x68\x08\x55\x55\x89\xc7\
\xe3\xe8\xba\x4e\x2e\x97\x03\x60\xee\xdc\xb9\xec\xda\xb5\xcb\x39\
\x2e\x80\x58\x96\x55\xf0\x5e\xa0\xa2\x28\x14\x8b\x45\x3e\xf3\x99\
\xcf\x50\x5d\x5d\xed\xe7\x3e\xc6\x7b\x22\xde\x9f\x93\xc9\x24\x81\
\x40\xc0\x07\xe4\x87\x3f\xfc\x21\x86\x61\xf0\xf1\x8f\x7f\xdc\xe7\
\xbb\x54\x54\x96\x2c\x59\x52\x51\x89\xd2\x21\x39\xf7\x00\x73\x16\
\x46\x70\x72\x2a\xd6\x70\x08\x7b\x28\x88\x35\x18\x44\x48\x05\xa1\
\xbb\xfe\x35\x86\x86\x86\x0e\xdb\x90\xdb\x96\x8d\xab\x3a\x08\x45\
\x70\xdd\x82\x4f\xb3\xbe\xf7\x49\x9c\x91\x69\x5c\x37\xf7\x4b\x9c\
\x7f\xea\x99\xdc\xfe\xe8\xe5\xbe\x6b\xab\xaa\xaa\x1f\x03\x79\x6c\
\xc3\x96\x2d\x5b\x2a\xe4\xe3\x98\x73\x32\x69\xd2\x24\xa4\x94\x08\
\x21\x28\x95\x4a\xfe\xc6\x54\x55\x95\x50\x28\x44\xb1\x58\x24\x12\
\x89\xf8\xa1\x80\x65\x59\xfe\xf7\x1f\x33\x20\xb5\xb5\xb5\xb7\x2c\
\x5d\xba\xf4\x2e\x8f\x2e\x39\xe1\x84\x13\x48\xa5\x52\x04\x02\x01\
\xd2\xe9\xb4\xaf\x82\xa4\x94\x28\x8a\xe2\xb3\x9f\xde\xee\x1a\xa3\
\x16\x5c\x65\x2c\x9a\x9a\x36\x6d\x1a\x8e\xe3\x50\x28\x14\x10\x08\
\x6c\x69\x51\x2c\x16\x29\x97\xcb\xf4\x59\x49\x5a\xa6\x3b\xc8\x48\
\x89\xc6\x49\xd3\x49\x4d\xed\x43\x0a\x07\xd3\x35\x51\x4a\x61\x86\
\x1e\x99\x5c\x49\x74\x1d\x01\x45\x62\x59\x16\xb6\x65\x21\x15\x89\
\xa2\x09\x1a\xaa\x9b\x39\x58\xfa\x3d\xc9\x24\x64\xdd\x7e\xea\x4e\
\x30\x08\x07\x23\x5e\xcc\x33\x41\xe5\x78\x2a\xb6\xa7\xa7\x87\x50\
\x28\x84\xeb\xba\xac\x5e\xbd\x9a\xad\x5b\xb7\xd2\xd9\xd9\xe9\x7b\
\x52\xaa\x5a\x61\x80\x6b\x6a\x6a\xc8\x66\xb3\xa8\xaa\x4a\x2c\x16\
\xf3\xd5\x9e\x17\xbb\x1d\x8e\x53\xf2\x8e\x80\x98\xa6\x39\xb5\xa9\
\xa9\xa9\xa6\xa7\xa7\x07\xd3\x34\xb9\xe7\x9e\x7b\xd8\xb3\x67\x0f\
\x37\xde\x78\x23\x3f\xf8\xc1\x0f\x48\x24\x12\xd4\xd6\xd6\x12\x0c\
\x06\x79\xe9\xa5\x97\x58\xb8\x70\x21\xbb\x76\xed\xa2\xa7\xa7\x87\
\xb9\x73\xe7\xb2\x6d\xdb\x36\xf6\xef\xdf\x7f\xfb\x82\x05\x0b\xbe\
\xf3\xec\xb3\xcf\x72\xd1\x45\x17\x51\x2e\x97\x7d\x95\x25\x74\xc5\
\xf7\xbe\x5c\xc7\x45\x55\x04\xa1\xec\x7c\x3e\x71\xd1\x9d\x68\x32\
\xca\x17\x9e\xf8\x08\x1f\x3e\xfb\x46\x7e\xb6\xfd\x6e\x2c\xcb\xac\
\xe4\x52\xfe\xc8\x83\x0d\x0c\x0c\x30\x6b\xd6\xac\x3b\xa3\xd1\xe8\
\x54\x00\xe9\xca\x29\xc9\xd1\x14\xd2\x75\x41\xb7\xe9\xfc\xa5\xc2\
\x77\x5f\xf8\x16\x69\xf7\x20\x46\x36\x88\x4d\x89\xee\x75\x9d\x74\
\x0d\xe6\x30\xcd\xb0\x0f\x48\x38\x1c\x66\xc6\x8c\x19\x0c\x0e\x0e\
\x62\x9a\x66\xaa\x54\x2a\xf5\x38\x8e\xb3\x44\x51\x14\x71\xd7\x5d\
\x77\x49\xd7\x75\x85\xb7\x01\x3d\x1b\x0b\xf8\x86\x1c\x20\x95\x4a\
\x11\x8b\xc5\xfc\x4d\x7a\xb8\x34\x8a\x76\x38\x06\xdd\x34\x4d\x0a\
\x85\x02\xaa\xaa\xf2\xa3\x1f\xfd\x88\x6f\x7f\xfb\xdb\xfc\xf4\xa7\
\x3f\x65\xfd\xfa\xf5\xa8\xaa\x8a\xae\xeb\x68\x9a\x46\x2a\x95\x62\
\xef\xde\xbd\xe4\xf3\x79\xca\xe5\x32\x89\x44\xc2\x53\x5f\x21\x55\
\x55\xd9\xb0\x61\x03\xaf\xbf\xfe\xba\xef\xab\x8f\xbd\x34\xdf\xdf\
\xb7\xb1\x71\x71\x59\xd6\x70\x19\x8f\x6c\xfb\x2a\xb3\xb4\xf3\xb9\
\xf6\xa4\xd5\xb4\xd4\x4c\x41\xa0\x54\xd4\x9c\x22\xde\xc9\xb3\xfa\
\xfb\xe9\xd3\xa7\xd7\x0b\x21\x28\x38\x65\x58\x38\x4c\x7d\x3c\x8c\
\xd3\x1f\xc1\xea\x0b\x93\x19\x4d\x22\x88\x11\xaa\x98\x65\x8a\x43\
\x92\x46\x71\x22\x1d\x66\x87\xcf\x38\x54\x57\x57\x13\x8f\xc7\x49\
\xa5\x52\x98\xa6\xf9\xec\x9e\x3d\x7b\x6e\x8a\xc7\xe3\xd7\x5a\x96\
\x35\xcb\xb2\xac\xaf\x5d\x70\xc1\x05\xac\x58\xb1\x82\x47\x1f\x7d\
\xd4\xb7\xab\x5e\x10\xe9\x51\x4a\x8a\xa2\x30\x3c\x3c\x4c\x34\x1a\
\x3d\x22\x5e\xeb\xb0\x00\xf1\xbe\xcc\x73\xf5\x4e\x3b\xed\x34\x5f\
\x2d\x8d\xb7\x1f\x42\x08\x7a\x7b\x7b\xfd\x3f\x1f\x38\x70\xc0\xf3\
\xbc\xd2\xfb\xf7\xef\xff\xab\xb1\xe8\x95\x50\x28\xf4\xf8\x89\x27\
\x9e\xa8\x7e\xf1\x8b\x5f\xe4\x73\x9f\xfb\x1c\xa6\x69\x56\xf2\x29\
\xaa\x8d\x10\x10\xd0\x22\x38\xae\x8d\x61\x9a\x64\x64\x8a\xae\x4c\
\xe2\x10\x85\x21\xf0\x77\xe6\xdb\x2d\x8f\x85\x05\x78\x23\xbf\x9f\
\xeb\x4f\xaf\xc1\xb2\x73\x94\x5a\x06\xa9\xd6\xc2\x08\x47\x45\x1a\
\x2a\xc5\x3d\x51\xf2\xdb\x62\x20\x2a\x3b\x3d\x95\x4a\xf9\xd7\x28\
\x95\x4a\xa4\x52\xa9\x09\xcf\x98\x4e\xa7\x7f\x01\x9c\xb8\x6a\xd5\
\xaa\xaf\xdd\x79\xe7\x9d\xe8\xba\x8e\x61\x18\x74\x75\x75\xb1\x74\
\xe9\x52\x7e\xf9\xcb\x5f\xf2\xbe\xf7\xbd\x8f\x2d\x5b\xb6\xb0\x67\
\xcf\x1e\x2e\xbd\xf4\x52\x86\x86\x86\x58\xbb\x76\xad\xbf\xa9\x17\
\x2c\x58\xf0\x66\x6b\x6b\xeb\xaa\x6c\x36\xbb\xe5\x98\x25\xc4\x33\
\x70\x93\x27\x4f\x66\x68\x68\xc8\xb7\x15\xde\x4d\xbb\xae\xeb\xff\
\xf2\x78\xac\xb1\x07\x7c\x42\xd7\xf5\xbd\xc3\xc3\xc3\xaf\x7a\xdf\
\xd9\xd2\xd2\x22\x9f\x7b\xee\x39\x5e\x7c\xf1\x45\x96\x2f\x5f\xce\
\x86\x0d\x1b\x2a\xbb\x2c\xe4\x32\xb4\xce\xe2\xf9\xf8\xd3\xc8\xc8\
\x28\xfd\xca\x1b\x18\x56\x89\xaa\x1a\x9d\x83\x43\x06\x85\xa2\xed\
\xdf\x53\x2c\x16\x3b\x77\xf1\xe2\xc5\x5f\x02\x84\x61\x18\x76\x5b\
\x5b\xdb\x57\x3c\xbe\xc9\xf7\x8e\x85\x44\x22\x59\x20\xae\x67\xc5\
\x7b\x4e\x21\x6d\xf6\xb3\x61\xdf\x6b\x64\x42\xfb\x29\x06\x6c\x2c\
\xcb\x44\x22\xd1\x34\xcd\x27\x32\x5d\xd7\x95\xaa\xaa\x0a\xc7\x71\
\x88\xc7\xe3\x24\x12\x89\xf1\xe8\x2b\x43\x43\x43\xec\xde\xbd\x9b\
\xe6\xe6\x66\x3c\x97\x7a\xe3\xc6\x8d\x00\xbc\xf6\xda\x6b\x94\xcb\
\x65\x74\x5d\xe7\xb5\xd7\x5e\xa3\xa1\xa1\xc1\xbf\x5f\xc3\x30\xa8\
\xa9\xa9\x89\x59\x96\xa5\x1f\xab\x97\x25\x6d\xdb\x46\x55\x55\x1e\
\x7e\xf8\x61\xa6\x4e\x9d\xca\x23\x8f\x3c\xc2\xce\x9d\x3b\x09\x85\
\x42\x24\x93\x49\xa2\xd1\x28\x83\x83\x83\x04\x02\x01\x62\xb1\x18\
\x23\x23\x23\x64\x32\x19\xd2\xe9\x34\x6d\x6d\x6d\x57\xbf\xdd\x2e\
\x7e\xf6\xd9\x67\xa9\xad\xad\x25\x97\xcb\xf9\x34\x45\xa9\x54\xa4\
\x2e\xb9\x08\x07\x0b\x91\xa9\xa1\xcf\x19\x22\x34\xbd\x8c\x99\x0d\
\xd0\xc8\x3c\x26\xaf\xa8\xe4\xb5\xa5\x25\x40\x61\x95\x10\xac\x1a\
\x8b\x69\x4a\x1e\x20\xe3\x25\x44\x00\x03\xfd\x79\xfe\xf3\x6f\x3e\
\xce\xc7\x7f\x7d\x36\x5f\x3e\xe7\x69\x6a\xec\x79\xa8\x35\x29\xbe\
\xb5\xe9\xab\x18\x86\x09\xa2\xc2\x59\x79\x2a\x54\xd3\x34\x31\x38\
\x38\xd8\x39\x38\x38\xb8\x4d\x08\xe1\x06\x02\x81\xb5\x6f\xdd\xa0\
\xab\x57\xaf\xc6\xb6\x6d\x42\xa1\xd0\x04\x35\x74\xf0\xe0\xc1\x09\
\x3f\x1f\x38\x70\xc0\xb7\x21\x85\x42\xe1\x0f\x3e\x7f\x54\x80\x28\
\x8a\x32\xc7\xb2\x2c\x54\x55\xa5\x54\x2a\x51\x2c\x16\x99\x3b\x77\
\x2e\x6f\xbe\xf9\x26\x4b\x96\x2c\x61\x78\x78\x98\xda\xda\x5a\xb2\
\xd9\x2c\xe5\x72\x99\x48\x24\xc2\x39\xe7\x9c\xc3\x33\xcf\x3c\xf3\
\x47\x2f\xae\x69\x1a\x5f\xfa\xd2\x97\x7c\x43\xe8\xc5\x0b\x05\xa5\
\x4c\xa9\x54\x42\xa4\x05\x42\x08\x76\xe8\xfb\x78\xef\x19\x35\xd8\
\xd6\x78\xcf\x4a\xe2\x18\x0a\x99\x17\xea\x30\x87\x03\x80\xfc\x03\
\xb0\x7d\x9a\x45\x07\xc7\x86\x80\xae\xa3\x29\x01\x10\x90\xc8\x0f\
\xf2\x46\xff\x03\x48\x57\x8e\xa9\xc0\x8a\x94\x37\x34\x34\xf8\x36\
\x40\x51\x94\x5f\x67\x32\x99\xd5\x6f\x17\x8e\x79\xae\xbc\xb7\xeb\
\x0f\x33\xed\xed\x7f\xff\xb1\x02\x32\xbd\xb6\xb6\xf6\x46\xc3\x30\
\xa4\xa6\x69\xe2\xee\xbb\xef\xa6\xb1\xb1\x91\xbd\x7b\xf7\xe2\xba\
\x2e\x83\x83\x83\x18\x86\x41\x38\x1c\xa6\x50\x28\x20\xa5\x64\x74\
\x74\x94\x5d\xbb\x76\xfd\x49\xee\xa9\x58\x2c\xe6\xa5\x94\xde\xb5\
\x83\xe1\x70\x58\x77\x1c\x07\xdd\x52\x71\xdc\x43\x2a\x47\x09\x09\
\x2c\xc7\x66\x45\xf4\x16\x16\xcf\x5c\xc0\x96\xe4\xe3\xe4\x92\x1a\
\xbb\xdd\xe7\x70\xb4\x18\x96\x09\x12\x39\xc1\xa6\xb8\xae\x2b\x7d\
\x03\x1a\x12\xc4\x27\x85\x28\x18\x65\x02\x21\x95\x9a\xaa\x5a\x4c\
\xa5\x88\x16\xd2\x40\xa9\x00\x22\x91\xbe\xfa\x6d\x6e\x6e\x66\x68\
\x68\x88\xb7\x2b\x4d\x02\x08\x04\x02\xed\x1d\x1d\x1d\x8d\xf3\xe7\
\xcf\x1f\x9a\x32\x65\x0a\x37\xdc\x70\x03\x8f\x3d\xf6\x98\x6f\x7b\
\xbc\x80\xd7\xdb\x60\x1e\xc3\xa1\xeb\xba\xcf\x8a\x1f\x2b\x20\x11\
\x21\x04\xb9\x5c\x4e\xd4\xd7\xd7\x33\x3c\x3c\xcc\xe0\xc0\x10\x20\
\x10\x02\x1f\xf1\x72\xb9\xec\x07\x54\xde\x36\x8e\xc5\x62\xa2\x54\
\x2a\xbd\x6d\xf4\x2c\x84\xf0\x2b\xe9\x66\xce\x9c\xf9\xa0\xaa\xaa\
\x1f\xbe\xfc\xf2\xcb\xfd\x00\xec\x90\x2b\x29\x49\x0d\x0a\xfe\xee\
\xe6\x9b\xb9\xe9\x67\x4b\xf9\x97\xb3\x1f\xa3\x18\x2f\x30\xd3\x6c\
\xe6\x27\xbf\x79\x02\xc3\x90\x9e\x0d\x08\x5d\x74\xd1\x45\x23\x80\
\x14\x8e\xa8\xb3\x2c\x89\xa2\xc0\x22\x3b\xc2\xf0\xc3\x0a\xd7\x3f\
\x74\x2d\x52\x09\x71\xfd\x7f\x5d\x83\x65\x5b\x04\xb5\x08\xdb\x5b\
\x7b\xa9\x8a\xc4\x7d\x4a\xdd\x23\x3f\xff\x18\x18\xe3\xf2\x22\x32\
\x16\x8b\xf1\xe2\x8b\x2f\xb2\x71\xe3\x46\x5a\x5a\x5a\xd8\xba\x75\
\x2b\xe1\x70\x98\xf5\xeb\xd7\xfb\x36\x31\x12\x89\xd0\xd2\xd2\xc2\
\xae\x5d\xbb\x18\x1d\x1d\xf5\x93\x74\xc7\xac\xb2\xa4\x94\xe8\xba\
\xce\xbe\x7d\xfb\x90\xae\xa4\x66\xb1\x4e\x24\xac\x92\xeb\xb7\x29\
\x0e\x39\x48\xf7\x6d\xd5\x9c\x28\x97\xcb\x9d\x8a\xa2\x7c\xf3\xed\
\x9c\x84\xb7\xb0\x00\xfa\x07\x3e\xf0\x01\xe6\xcc\x99\xc3\x07\x3f\
\xf8\x41\x6e\xbd\xf5\x56\x3f\xcf\x20\x10\x54\x05\xe3\xd8\xb2\x44\
\xbc\xaa\x8a\xe6\xda\xa9\x3c\xbf\xe3\xd7\x3c\xd1\x77\x1f\xb6\xd5\
\x88\x69\xb9\x50\xd9\xe1\xa2\xa6\xa6\x66\x92\x40\xd0\x35\x6d\x0f\
\xa7\x5d\x14\xc6\x18\xac\xd4\x55\xd9\x83\x01\x9c\xac\x06\x96\x8e\
\x5b\x52\x50\x75\x0d\x4d\x53\x29\xe5\x4d\x6c\x23\xe5\x25\xdd\x64\
\x34\x1a\x15\x23\x23\x23\x84\x42\xa1\x77\xe4\xf8\x1c\xc7\xe1\x95\
\x57\x5e\x21\x1e\x8f\xe3\xba\x2e\xfd\xfd\xfd\x7e\xa2\xca\x4b\xc8\
\x79\xec\x6f\x5d\x5d\x1d\x03\x03\x03\x68\x9a\xe6\x05\x91\xb1\xe3\
\x02\xc8\xec\xd9\xb3\x2b\x14\x40\xb2\x22\x9a\x51\x29\x89\xd4\xcb\
\x09\xa5\x3b\x9e\x97\x35\xb6\xe3\x7a\x06\x06\x06\xfe\xeb\x70\xe9\
\x8d\xb9\x73\xe7\xfa\x51\xed\x21\x75\x27\xb0\x5d\x13\x45\x68\x80\
\xa0\x64\x15\xd1\x54\x8d\xa0\x16\xc5\x71\x5c\x4c\xd3\xf2\x83\x39\
\xcb\xb2\x28\x38\x65\x6a\xa7\x29\x48\x5b\x25\x5a\x1f\x46\x99\x62\
\x61\xcc\x4f\xf8\x9b\x60\xe0\xa1\x49\x48\x5b\xf1\x3d\x41\x8f\x5b\
\x53\x55\x55\x74\x75\x75\x3d\xe0\xed\x11\x5d\xd7\x7f\xfd\xa7\xee\
\xd7\x75\x5d\x3e\xf9\xc9\x4f\xfa\xdf\x33\x7e\xed\xdb\x77\x28\x7f\
\xd6\xdf\xdf\xef\x7b\xa1\xf1\x78\x1c\xc7\x71\xa8\xaa\xaa\xfa\x72\
\x3a\x9d\x7e\xe1\x98\x00\x59\xb1\x62\x05\x97\x5d\x76\x19\x67\x9f\
\x7d\x36\xdf\xfb\xde\xf7\x68\x6d\x6d\xe5\xe4\x93\x4f\xe6\xf5\xd7\
\x5f\x67\xee\xdc\xb9\x64\x32\x19\x5a\x5b\x5b\x09\x87\xc3\xcc\x9c\
\x39\x93\x37\xde\x78\xe3\xb0\x29\xf2\x40\x20\xc0\x0b\x2f\xbc\xc0\
\xde\xbd\x7b\xe9\xed\xed\xf5\x19\x54\x21\x04\x85\xd6\x1c\x43\x46\
\x23\xff\xeb\xa5\x5b\x18\xcc\xea\x7c\xea\xb1\x4f\x60\x18\x36\x79\
\x27\x8e\x5d\x14\x44\x22\x87\xa4\xae\xa2\x1e\x25\xca\xeb\x93\x39\
\xf8\x86\xe0\xc1\x07\x7e\xc4\xfe\x9e\x2e\x7e\xbb\xf7\xa7\xfc\xed\
\xaa\x8f\x72\xf7\xba\x5b\x31\x4d\x0b\x69\x1f\x02\x42\xd7\x75\x9f\
\x4a\x1f\x19\x19\xf9\xc8\xe1\xde\x73\x3e\x9f\x27\x12\x89\x1c\x02\
\xc3\x01\xcb\x74\xc7\xee\x05\x84\x2a\x50\xd4\x31\x6d\x30\xf6\x91\
\xd1\xd1\x51\x5f\x3d\x1e\x8b\x84\xb8\xde\x2e\x88\xc7\xe3\x84\xc3\
\x61\x9f\x74\x0b\x04\x02\x2c\x5d\xba\x94\xee\xee\x6e\xaa\xab\xab\
\x7d\xa3\xa5\xeb\xfa\x3b\x5e\xf4\x2d\x2a\x2b\xa0\x28\x0a\x07\x0f\
\x1e\xf4\x8a\x06\x64\xb9\x5c\x16\x00\xe5\x62\x91\x69\x2b\x66\x92\
\xcb\xe5\x88\x69\x75\xa4\x67\x1c\xa0\x69\x46\x80\xaa\xb4\x8e\x93\
\x1d\xfb\x95\xd6\x70\x73\x1a\x62\xec\x49\xc2\xe1\x30\xe9\x54\x9a\
\xf5\x3d\xcf\xb2\xe9\xcd\x2e\x2e\x3e\xf9\x06\x1a\x63\xd3\x11\x28\
\x95\x58\xc7\xaa\x48\xd4\xbc\x79\xf3\xfc\x6c\xe6\xe1\x12\x95\x00\
\x35\x35\x35\x23\x1d\x1d\x1d\x4d\xa7\x9c\x72\xca\x40\x2a\x95\x42\
\x48\x70\x67\x0b\x96\x5f\x59\x43\x7e\xc0\xa1\x38\x6c\x93\x1b\x70\
\x28\x0e\xd9\x18\x19\x17\xcf\x24\x79\xef\xf0\x4f\xd9\xa8\x77\x04\
\x24\x1a\x8d\x2e\x07\xd8\xb6\x6d\x1b\x89\x44\x82\xe1\xe1\x61\x8a\
\xc5\x4a\xe9\xce\xee\xdd\xbb\xfd\xc0\xd0\x03\xcd\x75\x5d\x5e\x7e\
\xf9\xe5\x23\x4a\x26\x25\x93\xc9\x2f\x0e\x0d\x0d\x7d\x77\xcc\x8d\
\x9c\xbe\x60\xc1\x82\x87\x0f\x79\x23\x95\x0a\x96\x72\xb9\x4c\xca\
\xcd\x31\x75\xae\x89\xa8\xb2\x69\x9c\xd6\x44\xd9\xcd\x51\x72\x12\
\x28\x0a\xd8\x79\x95\xe1\x9f\x4e\xf6\x1f\x5c\x4a\x89\x57\xb2\x9e\
\x29\x8d\x92\x35\x12\x08\x45\x4c\x00\x24\x93\xc9\x70\xf5\xd5\x57\
\xf3\xf4\xd3\x4f\xfb\x31\xc8\xe1\xac\x4c\x26\x83\xae\xeb\x52\xd3\
\x34\x9a\x9a\x9a\x2a\x36\xa2\x7f\x80\x8e\x07\x15\x2c\xc7\x45\x28\
\x3a\xae\xab\xa2\x11\x40\x09\xbb\x7e\x4e\xc4\x53\xe5\xc7\x04\x88\
\xae\xeb\x7f\xe3\x65\xf8\xfa\xfa\xfa\xfc\xa4\xcc\xf8\xdf\x8f\x75\
\x15\x0a\x85\xbd\xc0\xde\xb1\x1f\xe7\x7b\x8c\x80\x10\x02\xa5\x50\
\x79\x89\xa6\x61\x62\x2b\x0e\xaa\xaa\xd0\xa2\xae\xe2\xef\x56\xde\
\x4a\x4d\xa0\x9e\x0f\x7c\xef\x7c\x6e\xbb\xea\x56\x7e\xb1\xfe\xfb\
\x13\xe8\x73\xdb\xb1\x59\x30\xe9\x34\x92\x53\x54\x06\x72\xdd\x04\
\xd5\xd3\x51\x50\x30\xcd\x32\xae\x55\x89\x83\x32\x99\x0c\x4f\x3f\
\xfd\xb4\xef\x96\x1e\xe9\x52\x14\x85\x07\x1e\x78\x80\xe1\xe1\x61\
\x84\x10\x3c\xfe\xf8\xe3\xc4\xe3\x71\x3a\x3b\x3b\x69\x6e\x6e\x66\
\xcb\x96\x2d\xcc\x9a\x35\x0b\xc7\x71\xd8\xb7\x6f\x1f\xcd\xcd\xcd\
\x74\x74\x74\x1c\x1b\x97\x25\xa5\x54\x63\xb1\x18\x7b\xf6\xec\x41\
\x08\x41\xf4\x7c\x9d\x15\x17\xd5\x20\x01\x23\x27\x49\xee\x35\x18\
\x6d\xb7\xc8\x74\x98\x98\x85\x43\xf9\x90\x9a\x9a\x9a\x11\xe0\x7d\
\x47\x5b\xa6\xfa\xd3\x9f\xfe\x94\x42\xa1\xc0\xe7\x3f\xff\x79\xbf\
\x30\xc1\x52\x4d\x84\x90\x9c\x3c\xe5\x6c\x7e\xd7\xfe\x00\xa9\xce\
\x06\xae\x58\xf8\xb7\xcc\xa8\x59\x84\xed\x54\xaa\x60\x3c\x09\xe9\
\xee\xee\xe6\x6b\xff\xfb\xbb\x95\xc4\x56\x29\xc5\xc6\x9f\xec\xe2\
\xc0\x20\x64\x53\x79\x8f\x02\x92\x8a\xa2\x08\xaf\x8e\xea\x9d\x76\
\xed\xdb\xd9\x55\x8f\x5d\x9e\x3f\x7f\x3e\x5d\x5d\x5d\x8c\x8c\x8c\
\x78\xcf\xce\xc9\x27\x9f\xcc\xe6\xcd\x9b\x19\x19\x19\x61\xd1\xa2\
\x45\xa4\x52\x29\x46\x47\x47\xd1\x34\xed\xd8\x24\xc4\x8b\xaa\x3d\
\xef\xc9\xd9\xa9\xf1\x66\x47\x96\x7c\xbf\x83\x55\x94\x28\x9e\xf1\
\x52\x55\x54\x55\xfa\xb9\x91\x40\x20\xe0\x0e\x0c\x0c\x1c\x55\xd3\
\xca\xbc\x79\xf3\x68\x6e\x6e\x66\x74\x74\x94\xe6\xe6\xe6\x4a\x85\
\xbd\x65\xe2\xe0\x00\x02\x4d\x09\x60\x4b\x0b\xdb\xb1\x18\x48\x0f\
\xd3\x95\x0e\xa3\x09\xdd\x8f\x5f\x34\x4d\x03\xcd\xa1\xa4\xa6\x70\
\xb2\x1a\x8a\xd4\xc9\xe6\xb2\xc4\xf4\x38\xb9\x5c\x07\x8a\xa2\xa0\
\xeb\xba\x28\x14\x0a\x3b\x37\x6d\xda\xf4\x4f\x42\x08\x55\x51\x94\
\x23\x2e\x91\x54\x55\x95\xaf\x7c\xe5\x2b\xcc\x9f\x3f\x9f\xcd\x9b\
\x37\xa3\xeb\x3a\x1d\x1d\x1d\xb8\xae\xcb\xab\xaf\xbe\x8a\xa6\x69\
\x24\x93\x49\xda\xdb\xdb\xc7\x9b\x00\xa4\x94\x35\xc7\x22\x21\x48\
\x29\xf9\xd1\x8f\x7e\xc4\xac\x59\xb3\xb8\xf7\xde\x7b\x2b\x65\x93\
\x73\x4a\x7e\x92\xaa\x58\x2c\x22\xa5\x64\xd2\xa4\x49\x74\x75\x75\
\x91\xc9\x64\x8e\xb8\x3a\x64\x42\x85\x79\x5b\x1b\x2f\xbd\xf4\x12\
\x2d\x2d\x2d\x1c\x38\x70\x80\x78\x3c\x5e\x49\xc3\x62\xa3\x6b\x21\
\x52\xc6\x20\xd3\xa7\xcc\xa3\x7e\x5a\x1d\x9b\x7a\xba\xa9\x8d\x4c\
\x1e\x8b\xba\x0f\xb9\xc0\x7a\x5c\x52\xf7\xfe\x01\x46\x9e\xae\xc3\
\xe8\xad\xd0\x2b\x8e\xe3\xf8\xe5\x3b\x63\xbf\x27\x8b\xc5\xe2\xf3\
\x47\x73\x8f\x63\x84\xab\xab\xeb\xba\xb2\x73\xe7\xce\x4a\x0b\x85\
\x00\x55\x55\x90\x12\x62\x91\x18\x96\x6b\x55\x1c\x2c\x81\x1f\xab\
\x8d\xd5\x78\x2d\x38\x26\x09\x99\x37\x6f\x1e\xb3\x67\xcf\xe6\x84\
\x13\x4e\x60\xd9\xb2\x65\x6c\xdd\xba\x95\x65\xcb\x96\xf1\xe4\x93\
\x4f\x72\xf1\xc5\x17\xb3\x7b\xf7\x6e\x36\x6c\xd8\xc0\x99\x67\x9e\
\x49\x73\x73\x33\xcf\x3e\xfb\xec\x9f\xa4\xc7\xdf\x69\x45\x22\x11\
\xbe\xf5\xad\x6f\x21\x84\x60\x68\x68\xa8\x52\x01\x68\x98\xd8\xaa\
\xcb\x86\xff\x1e\x64\x93\xf9\x55\xe6\x35\x2e\xc1\x95\x2e\x3b\xfa\
\x36\xd1\xfa\x68\x1b\x1d\x23\xbd\x14\x86\x0e\xbd\x2c\x4d\x53\x59\
\xf7\xb5\x61\x8c\xd1\x51\x9c\x92\x64\xc6\x8c\x19\xe8\xba\xce\xd1\
\xaa\xa8\xb7\xf1\x0c\x87\x76\xee\xdc\x79\xf1\xac\x59\xb3\x5e\x28\
\x95\x4a\xb8\x2a\x4c\xbd\x21\xca\xc9\xcb\x63\xd8\x8e\x43\x53\x78\
\x1e\xc3\x46\x3b\x8e\xe1\x72\xf0\xd5\x12\xfb\x9e\xc8\xa3\xa8\x15\
\xa7\x27\xe2\xf9\xea\x47\x0b\x48\x67\x67\x27\xbf\xfc\xe5\x2f\x39\
\xe5\x94\x53\x78\xe5\x95\x57\xc8\xe5\x72\xf4\xf4\xf4\x60\x18\x06\
\x4f\x3e\xf9\xa4\x5f\x99\xf1\x9b\xdf\xfc\x86\x40\x20\xe0\xa7\x3e\
\x8f\x72\x8d\x0e\x0d\x0d\x7d\x43\xd7\x75\x6b\xec\xc1\x3f\x98\x4a\
\xa5\x66\xe4\xf3\x79\x2c\x53\x32\x2b\x3c\x83\x19\x33\xa6\x57\xa4\
\x54\x1a\x2c\xbb\x7e\x16\x55\x91\x32\x0d\xc3\x27\x63\x0d\x06\x71\
\x46\x2b\x69\x27\xa1\x48\x50\x24\x4c\x83\x5c\x2e\xc7\xc8\xc8\x08\
\xd1\x68\x94\x78\x3c\xce\x69\xa7\x9d\xc6\xcb\x2f\xbf\x7c\x5c\x1c\
\x12\x3f\xcd\x2b\xa1\xd4\x6a\xb3\x63\x7f\x8e\x7c\xaf\x83\x6d\x6c\
\xc4\x2e\x4a\xec\x72\xa5\x3f\x51\x0f\x68\xbe\x23\x70\xac\xd4\x49\
\x34\x1a\x8d\xf2\xf4\xd3\x4f\xf3\xc4\xe3\x4f\x1c\xf2\x46\x84\xc7\
\x49\x29\x7e\x4f\x8b\x50\x2a\x89\xac\xea\xea\x6a\x69\xdb\xf6\xd1\
\xea\xac\x91\xde\xde\xde\xff\xed\xfd\x50\x5f\x5f\xbf\x5c\x51\x94\
\x19\xaa\xa2\x80\x03\xae\xe3\x92\xcf\xe7\x11\xc0\x66\xab\x8d\x4b\
\x5a\xa2\xb8\x4a\x8e\xe0\xd4\x22\x8a\x2c\xa1\x6a\x0a\x76\x5a\xa7\
\xbc\x2f\x46\x7e\x5b\xd4\x67\x01\xbc\xea\xc1\x7c\x3e\x5f\xc9\xe5\
\x8b\xe3\xd3\x31\x67\x59\x16\xb7\xdc\x72\x0b\x00\x2f\xae\x7d\x11\
\x51\x56\xd0\xb4\x02\x42\x17\x18\xba\x81\xac\x96\x7e\x6c\x94\x48\
\x24\x0e\x2b\x6b\xa8\xbd\x83\x41\xaf\xf7\x92\xf9\xae\x0a\x93\xe6\
\x05\xd0\x75\x81\xb4\x75\xe6\x4e\x5e\x48\x47\x6a\x07\x28\x50\x4e\
\x39\xa4\xbb\x7c\x1d\x2e\x6c\xdb\xee\xe0\xf8\x2c\x25\x10\x08\x30\
\x7d\xe1\xc2\x4a\x46\xcf\x1a\xa3\xd6\xbd\x88\x58\x11\xac\xac\xbe\
\x85\xf7\x9d\x7e\x2d\x3b\x86\x5f\xe6\x85\x2d\x2f\xa1\x35\x0f\xd3\
\xd6\x37\xe0\x13\x85\x5e\xc2\xca\x8b\x05\xd6\xaf\x5f\xef\xf1\x55\
\xc7\xbc\xce\x3e\xfb\x6c\x2e\xb8\xe0\x02\xe6\xcc\x99\x43\xa1\x50\
\xa0\xb7\xb7\x97\x8b\x2f\xbe\x98\xc7\x1f\x7f\x9c\xf3\xce\x3b\x8f\
\x62\xb1\xc8\xab\xaf\xbe\xca\xfc\xf9\xf3\x89\xc5\x62\x3c\xf5\xd4\
\x53\x7e\x09\xee\x51\x01\xa2\x28\x8a\xa8\xaf\xaf\xa7\x90\x2f\x60\
\x45\x24\x4b\x3f\x52\x47\x75\x8d\x86\x74\x25\xc9\xb6\x7e\xea\xf6\
\x55\x31\xbc\xdd\xc0\x1a\x75\x88\xc5\x82\xbe\x27\x31\x34\x34\xf4\
\xe6\xf1\x78\x60\xd7\x75\xb9\xfd\xf6\xdb\xa9\xae\xae\xe6\x8a\x2b\
\xae\xe0\xa2\x8b\x2e\xaa\xe4\x20\x24\x10\x82\x4c\xba\xc4\x65\x67\
\x5c\xcf\x2d\xbf\x3c\x93\x9f\xff\xcd\x2e\x4e\x08\x2d\x25\xe1\xec\
\x67\xf7\xfa\x3b\x30\x0c\xcd\x0f\x5a\xcb\xe5\x32\xd1\x68\x94\x60\
\x30\x78\x58\xae\xe7\xe1\xae\x6d\xdb\xb6\x31\x75\xea\x54\x06\x06\
\x06\x50\x55\x95\xda\xda\x5a\x76\xec\xd8\x81\x10\x82\x8e\x8e\x0e\
\xbf\x16\x78\xe7\xce\x9d\xac\x5c\xb9\xf2\xd8\xe9\x77\x8f\x7b\x29\
\x95\x4b\xe8\x8e\xce\x9b\x77\xa7\x91\x16\x58\xc5\x43\x39\x74\xa1\
\x4c\xd4\x8d\x47\x42\x9b\x1c\xce\x32\x4d\x93\xfa\xfa\x7a\xbf\xe2\
\xdc\xb2\xac\x4a\x27\x6f\x08\x0c\xc3\x25\x14\x0c\xe0\x52\xa9\x3f\
\xeb\x1e\x3a\xc8\xc6\xfc\x63\x28\xa8\x13\x92\x47\x1e\xf7\xd4\xd6\
\xd6\x36\xb7\x58\x2c\x8e\x00\xb2\xa1\xa1\xc1\x38\x1e\x36\xe4\xea\
\xab\xaf\x46\xd3\xb4\x3f\x48\x56\xb5\xb7\xb7\xfb\xc4\xa2\x94\x92\
\x5f\xfc\xe2\x17\x95\xf7\x75\xac\xf4\xfb\x25\x97\x5c\xc2\x87\x3e\
\xf4\x21\x5c\xd7\xe5\xc9\x27\x9f\xa4\xab\xab\x8b\x68\x34\xca\xc1\
\x83\x07\x89\xc5\x62\x54\x55\x55\x71\xe0\xc0\x01\x72\xb9\x1c\x8d\
\x8d\x8d\xf4\xf4\xf4\x1c\xb7\x1d\x28\x84\xe0\x87\x3f\xfc\x21\xcb\
\x96\x2d\xe3\x3f\xfe\xe3\x3f\x0e\x05\x7f\x08\xa4\x0b\xf1\x78\x88\
\x64\x26\x4d\x4d\xa0\xbe\xe2\x56\x4a\x9b\x80\x88\x20\x5d\x89\x65\
\xd9\x7e\xad\x98\x57\xd6\xa9\xeb\x7a\xce\x75\xdd\x0c\x70\x44\xfc\
\xd5\x1f\x91\x5e\x73\x7c\xbe\xc7\x09\x40\x75\x5c\xc5\x71\x5d\x26\
\x87\x67\x50\x72\xd3\x14\xac\x2c\x42\x48\x8c\xac\x8b\x5b\x92\x7e\
\xf6\xf0\xa8\x01\x19\x6b\xc0\xa4\xb5\xb5\x95\x1b\x6e\xb8\x81\x6d\
\xdb\xb6\x51\x2a\x95\xa8\xab\xab\x63\xd9\xb2\x65\x6c\xdc\xb8\x91\
\x60\x30\x48\x38\x1c\x26\x93\xc9\x1c\xf7\xb6\x66\x29\xa5\x5b\x55\
\x55\x25\x77\xec\xd8\x51\x29\x59\xf0\x4a\x50\x25\x48\x21\x89\x45\
\xc2\x3c\xbe\xfb\x7e\xbe\x75\xe5\x6f\xf8\xcd\xfe\x87\x78\xa9\xe3\
\x65\xce\x3a\xe9\x74\x5e\x73\x37\x62\x18\xa6\xcf\xe8\x7a\x54\x8c\
\x6d\xdb\xb3\xea\xeb\xeb\x03\xa3\xa3\xa3\x3d\xc7\x7a\x6f\xb6\x6d\
\xb7\x09\x21\x2a\xc5\x82\x08\x46\x96\x48\xfe\xee\xf6\x66\x24\x2e\
\x51\x33\x42\x50\x0f\x72\x70\xb8\x80\x55\x90\xbc\x7e\x57\x0a\xe9\
\x54\x34\xce\x31\xb9\xbd\x42\x08\xbe\xf9\xcd\x6f\x72\xfb\xed\xb7\
\xf3\xc9\x4f\x7e\x92\xb6\xb6\x36\x5c\xd7\x9d\xd0\x5f\x38\xd6\x9f\
\x8d\x94\x92\x9e\x9e\x9e\xe3\x0a\x48\x2a\x95\xba\xe8\xf9\xe7\x9f\
\xa7\xa5\xa5\xa5\x7d\xce\x9c\x39\xb3\xb3\xd9\x6c\xa5\x6e\x58\x28\
\x14\x6a\x24\x25\x33\xca\x7e\xf9\x1c\xdf\x79\x56\x67\xd5\xe9\xa7\
\x90\x50\x76\xf0\xcc\xbe\x5d\x0c\xb5\xe5\x48\xa5\x2a\x01\x6b\x28\
\x14\xa2\xa5\xa5\x85\x52\xa9\xc4\xb5\xd7\x5e\xfb\x7a\x5b\x5b\xdb\
\x86\x75\xeb\xd6\xad\x3c\x1e\x0e\x87\xef\xfa\x0a\x85\xba\x83\x92\
\x7d\x4f\x15\x18\xda\x6e\x90\xef\xdb\x38\x41\x35\xa9\x42\x03\x8d\
\xb7\xcd\x9f\x1c\xb1\x0d\x89\xc7\xe3\xdc\x77\xdf\x7d\x7e\xc2\xc8\
\x2b\x34\x00\xd0\x35\x1d\x5d\x3b\xe4\x0a\x7b\xe5\xa3\xfb\xf7\xef\
\x9f\x71\x3c\xed\x88\x57\x2d\xef\xbd\x00\x81\x40\x35\x5c\xb4\x30\
\xe4\x7b\xe1\xf5\xce\x9f\xf3\xe4\x4f\x7e\x4c\x79\x44\x62\x95\x5c\
\xca\x69\xd7\x8f\x85\x54\x55\x25\x9d\x4e\x13\x0a\x85\x18\x19\x19\
\x39\xa2\x5e\x8d\xc3\x21\x18\x2f\xbc\xf0\x42\x6e\xbc\xf1\x46\xbe\
\xf5\xad\x6f\xa1\xf6\xa9\x10\xcc\xd1\x38\x1b\xbf\x7f\xbf\x5c\x2e\
\x53\x55\x55\x45\x2a\x95\xc2\xeb\x8b\x3f\x6a\x40\x62\xb1\x98\xdc\
\xb4\x69\x53\xa5\x92\x23\x06\x0b\xcf\x8f\xa1\x07\x04\x4a\xa9\x9e\
\x0b\x16\x5f\xca\xef\x3a\x1f\x41\x0b\xa8\x8c\xee\x33\x19\xd8\x5c\
\xf6\x12\x4e\x52\xd7\xf5\x29\xc7\x13\x10\xd7\x75\x1d\x2f\x69\xa5\
\xaa\x2a\x8b\x17\x2f\xa6\xb3\xbd\x83\xd7\xfe\xa9\x5c\xd9\x1b\x0a\
\xa0\x54\x28\x12\xe1\xba\x84\x42\xd2\xa7\x2a\xbc\x11\x18\x63\x3d\
\x7f\xc7\xb5\x91\xb4\x58\x2c\xf2\xe9\x4f\x7f\x9a\x54\x2a\xc5\x27\
\x3f\xf9\x49\x1e\x7a\xe8\x21\x4e\x5f\x79\x3a\xdd\xdd\xdd\xcc\x9b\
\x37\x8f\xdf\xfe\xf6\xb7\xac\x5c\xb9\x12\x5d\xd7\xc9\x66\xb3\xec\
\xd8\xb1\xc3\xaf\x9a\x3f\x2a\x40\x42\x63\x4f\x66\xdb\x36\xb2\xda\
\x65\xc6\x39\x35\x68\x08\xac\xb2\xc1\xeb\x23\x3f\x43\xb5\xc3\x24\
\x5a\x0d\x92\xfb\x2b\xc1\xcf\x98\xdb\x2b\x86\x87\x87\x8f\xc9\xaa\
\xc7\xe3\xf1\x96\xb9\x73\xe7\x1e\x08\x04\x02\x15\x4f\x4f\x51\x71\
\x84\x4d\xa9\x54\xe2\xdb\xdf\xfe\x36\x4d\x4d\x4d\x24\x12\x09\x7e\
\xf7\xbb\xdf\xf9\x05\x04\x00\x23\x23\x23\x4c\x9d\x3a\x95\x64\x32\
\xc9\xc8\xc8\x08\xaa\xaa\x92\x4c\x26\x09\x85\x42\x94\xcb\x65\x02\
\x81\xc0\x71\x05\x24\x14\x0a\xb1\x63\xc7\x0e\x66\xcf\x9e\xcd\x0b\
\x2f\xbc\xe0\xf7\xd0\x8c\x8e\x8e\xfa\x95\x90\x83\x83\x83\xc4\xe3\
\x71\xbf\x78\xf0\x9d\x58\x8c\x77\xb4\x21\x9e\x1b\x5b\x9d\x83\x0d\
\x77\x64\xdf\x32\xaa\xa8\xa2\x46\x82\x11\x7d\x82\x18\x1f\x87\x9d\
\x17\x98\x3d\x7b\x36\xb6\x6d\xa3\x0a\x95\x56\x75\x3f\x67\xdd\x10\
\x64\xfd\x7f\x38\xbe\x2b\x3c\x6f\xde\x3c\x1e\x7b\xec\x31\x9a\x9b\
\x9b\xa9\xad\xad\x65\xde\xbc\x79\xdc\x73\xcf\x3d\x68\x9a\xc6\xc2\
\x85\x0b\xd1\x75\x9d\x5f\xff\xfa\xd7\x7e\x56\xd0\xab\x5e\x3f\x9e\
\x80\x84\xc3\x61\xbe\xfa\xd5\xaf\x4e\x50\x43\xad\xad\xad\x00\xec\
\xdf\xbf\xdf\xa7\xe8\xc7\xab\xde\x63\x02\x44\x4a\xc9\x3f\xfe\xe3\
\x3f\x32\x77\xee\x5c\x6a\x6b\x6b\x79\xe2\x89\x27\x38\x78\xf0\x20\
\x9a\xa6\xf9\x85\xd4\x73\xe7\xce\xa5\xbd\xbd\x9d\x44\x22\x41\x73\
\x73\x33\x07\x0e\x1c\x38\x2e\xee\xae\xd7\x68\xa3\x09\x95\x4c\x3c\
\x03\x72\x32\x91\x48\x84\xaf\x7f\xfd\xeb\x9c\x7a\xea\xa9\x7e\xed\
\x97\xc7\x24\xe8\xba\x4e\xb1\x58\x64\xd7\xae\x5d\x7e\x4f\xbc\xe7\
\xf7\x8f\xa7\x2c\x8e\x17\x20\xf1\x78\xfc\x1b\x5e\x40\x08\x20\x15\
\x89\xa6\x09\x5c\x17\xa2\xa1\x28\x05\x23\x57\x69\x10\x12\xe0\xda\
\x87\x18\xdf\x77\xba\xbe\xf6\x4e\x5c\xcd\xfc\xf9\xf3\xc9\xe5\x72\
\x5c\x7a\xe9\xa5\xfc\xfe\xf7\xbf\x67\xce\x9c\x39\xd4\xd7\xd7\x33\
\x73\xe6\x4c\x1e\x7a\xe8\x21\x74\x5d\x67\xd2\xa4\x49\x78\xcd\xa0\
\xc7\xc3\xcb\x1a\x3f\xa1\x47\x52\x29\x42\x28\x17\x25\xf9\xb4\x41\
\x44\x53\x59\xbb\xb6\x52\xdd\xa9\xab\x41\x14\xad\x52\x23\xe6\x35\
\x00\x79\x76\xc2\x8b\xc8\xbd\xde\x72\x2f\x4d\x6b\x59\xd6\xa4\x50\
\x28\x74\x9d\x10\x22\x00\x88\xba\xba\xba\x57\xfb\xfa\xfa\xba\x8e\
\xc2\xae\x4d\xaf\xaf\xaf\xaf\xd8\x04\x45\xa0\x9d\xaf\x73\xe9\xb5\
\x93\x28\x96\x0d\x2e\x3b\xf1\x66\xb6\x8c\x3e\x45\x62\x28\x49\xae\
\xdf\x61\xc3\x37\x93\x3e\xcb\x1c\x8d\x46\x87\x8f\x1a\x90\x60\x30\
\xc8\xbd\xf7\xde\xcb\xe7\x3e\xf7\x39\x3e\xf6\xb1\x8f\xd1\xdf\xdf\
\xef\x8b\xbd\x37\xdb\x24\x95\x4a\x61\x59\x16\xa5\x52\xc9\xcf\x85\
\xc8\xb1\xc8\xf0\xad\x0d\x34\x47\x0a\x88\x61\x18\x38\x28\xa8\xaa\
\x42\x6f\xa7\xc1\xdd\xf7\x7e\x95\x6f\x3c\xfb\x2f\x7c\xe1\xfc\x2f\
\x13\x8f\xc4\xf9\x9f\xad\x5f\xa4\xfd\x37\x39\x8c\x11\xc5\xaf\xec\
\x38\xe1\x84\x13\x68\x6c\x6c\x64\xfb\xf6\xed\xd4\x35\x55\x71\xc2\
\xcc\x46\x96\x4f\x5a\x86\xea\xea\x15\x07\x40\x30\x47\x22\x1f\xf3\
\xd2\xd0\x3f\xfc\xe1\x0f\xff\x06\xe8\x3a\x8a\x7b\x94\xe3\x55\xb4\
\x18\x81\xe1\x5d\x65\x12\x7b\x2d\xbe\xfa\xdd\x7b\x49\xee\xb3\x70\
\x4d\x89\xa2\x0b\x14\x4d\xf1\x35\x8e\x10\x42\x39\x26\x1b\x92\x48\
\x24\xb8\xfd\xf6\xdb\x0f\xeb\x26\xbd\x1e\x43\x21\xc4\xa2\xa6\x43\
\x9e\x9f\xf6\x00\x00\x16\x56\x49\x44\x41\x54\xa6\xa6\x53\x06\x06\
\x06\xb6\x1d\x43\xe0\x55\x01\x5d\x2a\xa8\x42\x05\x14\x96\x2e\x3b\
\x85\x86\xb6\x2a\x96\xaf\x38\x8d\x29\xb5\x4d\xac\x29\xa9\x54\x6d\
\x8c\xa0\xe4\x85\xaf\xd3\x67\xcc\x98\xe1\x79\x88\x4c\x3a\xdd\xa5\
\xf9\xec\x34\xd2\x4d\xe3\x00\xc6\x40\x00\xa3\x2d\x42\xb9\x33\x82\
\x10\x15\xde\x4d\x55\xd5\xa3\x76\x40\x54\x55\x65\xe1\xc2\x85\x9c\
\x74\xd2\x49\x6c\xd8\xb0\x81\x81\x35\x15\xa3\x1e\x70\x1c\x26\x37\
\xba\x7e\xac\xe6\x6d\x62\x6f\xb0\xdb\x51\x03\x32\x3c\x3c\xdc\xe5\
\xba\xee\x5c\xaf\x80\xa0\xe9\xac\x30\x91\x88\x82\xb4\x75\xde\x77\
\xd2\x2d\x3c\xb3\xef\x87\xd8\x18\xd8\x86\xe4\xc0\xef\x8b\x7e\x0a\
\xb5\xaa\xaa\x2a\x52\x2c\x16\xe3\x47\xf2\x70\x53\xa6\x4c\x79\x5f\
\xb1\x58\xbc\x10\x70\xa5\x94\xf1\xce\xce\xce\x8a\xee\x97\x82\xb2\
\x61\x32\xd0\x53\xe2\xab\x89\x6f\xb2\xa7\x6d\x98\x3b\xb6\xfd\x2b\
\xb1\x50\x35\x1b\x0e\x1e\xc4\xee\x8b\xa2\x58\x41\x7f\x07\xb6\xb6\
\xb6\xfa\x6d\x69\x81\x62\x33\x77\x5e\xf4\x23\x36\xf6\xfe\x96\x1f\
\xbc\xf8\x9f\xcc\x9c\x51\xc7\x50\x39\x4f\x76\x77\xa5\xb6\x57\xd7\
\xf5\x63\xa2\xe2\x83\xc1\x20\xdf\xfc\xe6\x37\x89\xc5\x62\x34\x35\
\x35\xb1\x63\xc7\x0e\xce\x3d\xf7\x5c\xd6\xac\x59\xc3\xca\x95\x2b\
\x69\x6b\x6b\x43\x08\xc1\xac\x59\xb3\xc8\x64\x32\xbc\xf2\xca\x2b\
\xc7\xc6\xf6\x5a\x96\xd5\x16\x0c\x06\x2f\xf6\xf2\xd5\xd3\x2f\x09\
\x13\x8f\x6b\xe4\xfa\x6d\x7e\x33\x70\x1f\xb6\x22\x28\x0c\x38\xa4\
\xbb\x0f\x19\x4d\x5d\xd7\x7d\x37\xf4\x08\x49\xc4\x5b\xaf\xbb\xee\
\xba\xf3\xa5\x94\xb8\x48\x32\x55\xa3\x34\x4d\x0f\xe0\x66\xbc\xfa\
\x2b\x9d\x74\x22\xc7\xa9\x0d\xe7\x60\x16\x6d\x92\xc5\x24\xa7\x37\
\x5d\xc9\xb3\x5b\x9f\x21\x18\x74\x7c\x09\x55\x14\x05\xe9\x4a\xa4\
\x25\xb8\x76\xf9\x87\xf8\xef\x0d\xff\xca\xf2\xd8\x87\x58\xd9\x70\
\x23\x2b\x17\x2c\xe0\x3b\xad\x5f\x99\xd0\xfb\x7e\xb4\x80\x48\x29\
\x9d\xf1\x05\xe5\x9e\x93\xe3\xb9\xd9\x5e\xc7\x59\x77\x77\x37\x81\
\x40\x80\xda\xda\x5a\x7f\x9c\xd3\xb1\x90\x8b\x0a\x40\x75\x75\x35\
\xb6\x6d\xb3\xe3\x6b\x45\xac\x9c\x44\x68\x20\x14\x10\xaa\x57\x81\
\xa1\xa2\x69\x87\x22\xe3\xa3\x1c\xab\xa4\x27\x93\x49\x5c\xd7\x25\
\x63\x17\x99\x73\x45\x06\x5b\xad\xa8\x7d\x29\x6c\x74\x4d\x85\xa4\
\x82\x71\x20\x4a\xfe\x8d\xb1\xe1\x30\x63\xde\xd4\xf8\x12\xa0\x48\
\x38\x4c\x32\x98\xe3\xcc\x8f\x45\x38\xff\xfc\x0b\xf8\xd1\xce\xe7\
\x18\x4a\x8d\xb0\xb7\x7f\x27\xc5\x9a\xed\xa8\x54\x6a\x6c\x85\x7a\
\x74\xe3\x93\x00\x1a\x1b\x1b\x4f\x94\x52\xae\x08\x85\x42\x5c\x73\
\xcd\x35\xc4\x62\x31\x7f\x13\x6e\xdd\xba\x15\x45\x51\xd8\xb3\x67\
\x8f\x5f\xf4\x31\x30\x30\xe0\x17\x18\x1e\x2b\x20\x9c\x7c\xf2\xc9\
\x7c\xe9\x4b\x5f\xc2\xb6\x6d\x1e\x7e\xf8\x61\x0e\x1c\x38\x30\x61\
\x8e\x47\x3c\x1e\x67\x70\x70\xd0\x8f\x46\xbd\xc1\x34\x47\x9b\x81\
\x73\x5d\x17\xcb\xb1\x51\x84\x42\xba\x27\xcc\x23\x7f\xff\x3b\xfa\
\x0b\xfb\xb9\xe3\xb7\x7f\xc7\xec\xc6\x79\x74\x96\xdb\x30\xca\x21\
\x14\xfd\xd0\x44\x37\xdf\xf5\xf4\x8a\xf6\x14\x85\x40\x40\xc7\xb2\
\x0c\x34\x25\x88\xaa\x68\x04\xb4\x20\x21\xad\x42\x85\x1b\xa6\x81\
\xa2\x8a\xa3\x9e\xc7\x55\x2a\x95\x5a\x9a\x9b\x9b\xc3\x85\x42\x81\
\xaa\xaa\x2a\x9f\x32\x1a\xbf\xfe\x58\xb1\xa0\x17\x30\x1e\x35\x20\
\x2b\x56\xac\x60\xf7\xee\xdd\xfc\xf5\x5f\xff\x35\x67\x9c\x71\x06\
\x42\x08\x22\x91\x4a\x1b\xb1\x97\x0e\x2d\x16\x8b\x0c\x0f\x0f\xd3\
\xdc\xdc\xcc\xe0\xe0\xa0\x6f\x97\x8f\xf4\x41\xbd\xb9\x8d\x16\x15\
\x55\xb0\x7a\xd5\xbf\xf2\xf3\x3d\x5f\x27\x79\xa0\x9a\x05\xda\x55\
\xdc\x72\xc6\xdf\xf3\x99\x9e\xf7\x57\x5c\x5c\xa7\xa2\x72\x82\xc1\
\xa0\x3f\xdb\xd1\x75\x5d\x0c\xd3\xc4\x2a\x18\x64\xd7\x2c\xe6\x8b\
\xbf\xfa\x22\x8e\xb4\xd9\xcb\xe3\x94\x0d\x8b\xcc\xcb\x02\xe4\x0c\
\x66\xcf\x96\x20\x2a\xb9\x8a\xcb\x2f\xbf\xfc\x7f\x84\x10\xdf\x07\
\xd8\xbc\x79\x73\xc7\xde\xbd\x7b\x97\x1c\xae\x27\x98\x4e\xa7\x2b\
\x1d\xc4\x01\xb8\xe4\xdf\x1a\x50\x54\x01\xa8\x34\xd7\x9f\x40\x7f\
\xea\x00\xa3\xfb\x4c\x0e\xbc\x58\x24\xd5\x5e\x91\xe0\xaa\xaa\x2a\
\xf2\xf9\xfc\xfd\xc7\x14\x18\xfe\xfc\xe7\x3f\xe7\x03\x1f\xf8\x00\
\x77\xdf\x7d\x37\xeb\xd7\xaf\xf7\xfd\xfd\x7c\x3e\xef\xd3\x25\xa9\
\x54\x6a\x42\xaa\x54\x4a\x49\x63\x63\xe3\x97\x14\x45\xb9\xa5\xaf\
\xaf\xef\xc0\x91\x4a\x88\x83\x83\x22\x55\xa6\xd4\x4d\x61\xff\x40\
\x91\xa2\x05\x1d\xc3\xfb\x79\x72\xef\x7d\xd8\xb6\x85\x65\x55\x74\
\xa9\x69\x9a\x04\x83\x41\x4e\x3d\xf5\x54\xf6\xee\xdd\x4b\x22\x91\
\x20\x9d\x4e\x63\xba\x26\x56\xd9\xc6\x30\x2a\x53\x91\x76\xd7\x6e\
\xe6\x9c\x6b\x63\x38\xb9\xca\x1c\x45\x37\x13\xc4\xc9\x6a\x38\x19\
\x0d\x6b\x20\x18\x40\xfa\x33\x15\x83\x47\x60\x43\xd0\xf5\x4a\x3d\
\x98\xee\x2a\x0c\x6c\x36\x29\x0e\x3b\x14\x86\x1c\x5e\x1d\x48\x50\
\x4e\xb9\x95\xba\x35\x4d\xf8\xb5\x08\x63\x36\xcb\x3e\x26\x1b\xe2\
\xba\x2e\x0f\x3c\xf0\xc0\xa1\x1b\xf1\x02\x4d\x01\x99\x74\x06\xcf\
\xad\x16\xa2\xc2\xdb\x78\x63\xfd\xe6\xcc\x99\x73\x51\x26\x93\xa9\
\x07\xfe\x28\x20\xf3\xe6\xcd\x7b\x8f\xae\xeb\x42\x51\x14\x67\x78\
\x78\x38\x56\x2e\x55\x26\x7b\x5a\x8a\x4d\xbe\x3f\xc0\xae\x5d\xbb\
\x48\xe4\x0a\xa4\x07\x6c\xb2\x89\x2c\xa3\x07\x8b\xe4\x07\x2d\x4c\
\x4b\xa2\xc8\x0a\x80\x86\x61\xd0\xdc\xdc\xcc\x8e\x1d\x3b\x50\x94\
\x4a\xeb\xb4\x23\x0f\x4d\x1e\x32\x5c\x8b\x70\x2d\x28\x8e\xc6\xb4\
\xfa\x05\xc4\x67\x54\xb1\x73\x70\x03\x75\x91\xc9\x64\xe5\x10\x03\
\x0f\x36\xe0\x14\x0e\xa9\xbc\x23\x49\x2f\x57\x55\x55\xb1\x7a\xf5\
\x6a\xf6\xec\xd9\xc3\x81\x7d\x07\x08\x18\x65\x54\xc7\x21\x56\xef\
\x62\xd7\xd8\xfe\x98\x0e\x2f\x6e\xf3\xbc\xbf\x63\x89\x43\xfe\xfb\
\xe0\xc1\x83\x17\xdb\xb6\x3d\x53\x08\x41\x31\x26\x39\xf5\xf2\x6a\
\xf4\x80\xa0\x30\x52\xc5\xfb\xcf\xb8\x9e\x67\xf6\xfd\x00\x4d\xd5\
\x19\xda\x6e\x30\xb8\xb5\x84\x50\x84\x3f\xed\xe7\x1d\x1e\xb0\xe1\
\xb4\xd3\x4e\xdb\xac\xaa\xaa\x10\x42\x20\x91\x50\x5b\x82\x6c\x10\
\xe1\xa8\xb0\x11\x9e\xd8\xf8\xd4\x21\xde\x88\x1a\x76\x1c\x18\x64\
\xf4\x60\x0c\x47\xe6\xb0\x4d\xe9\x1b\x73\xaf\x16\xcc\x9b\xec\xe6\
\x9a\xae\xdf\x6a\x6d\x49\x07\x45\x55\xe8\xde\x6f\xf3\x95\x5b\xbf\
\xc2\x0f\xd6\x7e\x97\x4b\x5a\x6e\xe1\x8a\x25\x1f\xe4\x53\xcf\x5f\
\x80\x65\x5a\xd8\xa6\x73\x44\x80\x78\x69\xd9\x6f\x7f\xfb\xdb\x24\
\x12\x09\x6e\xbe\xf9\x66\x9e\x7b\xee\x39\xbf\x72\xb1\xbd\xbd\x9d\
\x65\xcb\x96\x11\x0e\x87\xd9\xb6\x6d\x1b\xdb\xb6\x6d\x63\xe9\xd2\
\xa5\x6c\xd8\xb0\xe1\x1d\xbf\xfb\x4f\x02\x92\xcb\xe5\xb6\x36\x37\
\x37\x0f\x1b\x86\x31\x53\x08\x81\xa1\x3b\xb4\x9c\x57\x8d\x9d\x92\
\xa4\x3b\x0c\x1e\xfd\xed\x03\x14\x07\x04\x56\xd1\xa2\x9c\x92\x84\
\xc2\x21\xdf\xf5\x7d\xa7\xf6\x30\x40\x2f\x97\xcb\xc2\xb6\x6d\x10\
\xb0\x35\xb2\x8b\xf7\x9e\x5f\x87\x2b\x5d\x1c\xc3\x45\xb1\x83\x48\
\x4b\xc1\xc9\xab\xe4\x5e\xab\xc1\x29\xaa\x3e\xb1\x98\x4c\x26\xfd\
\xe0\xd1\xa3\x47\x5c\xd7\xa5\xb3\xb3\x53\xbe\xf1\xc6\x1b\xe2\xd1\
\x47\x1f\x65\xdb\xb6\x6d\x15\xc7\x43\x3a\xa8\x9a\xc2\x09\xd5\x4b\
\x38\x98\xdf\xcd\x48\xf9\x20\xff\xeb\xc4\xdb\x09\x6a\x21\x42\xa1\
\x20\x96\x63\x63\x99\xbe\x1a\x7a\x47\x44\x6a\x6a\x6a\x3e\x71\xde\
\x79\xe7\x7d\xa3\xb5\xb5\x95\x47\x1e\x79\x84\x2f\x7f\xf9\xcb\xf4\
\xf5\xf5\xb1\x77\xef\x5e\x72\xb9\x9c\x4f\xd3\xac\x5d\xbb\x96\x99\
\x33\x67\xd2\xd3\xd3\x83\xe3\x38\x8c\x8e\x8e\x1e\x9f\xc1\x01\x95\
\x0d\x21\x38\xe9\xa4\x93\xe8\xee\xea\x66\xe3\xe7\xca\x48\x7b\xcc\
\x21\x16\x1a\x08\x15\xe9\x38\x84\x42\xba\xff\x72\xbc\x12\xfc\x77\
\xba\x01\xbf\x3c\x07\x49\xa0\xae\xd2\xd9\x74\xd6\xa4\x0f\xd3\x32\
\xb9\x85\xa7\x3b\xbe\xc7\xcc\xaa\x53\xd8\x9a\xfa\x0d\xf6\xc6\x30\
\xb6\x75\xa8\x8a\x64\x7c\x1b\x44\xa1\x50\xb8\x63\xff\xfe\xfd\xff\
\x3e\x69\xd2\xa4\x81\x91\x91\x91\xc6\xb5\x6b\xd7\xb2\x68\xd1\x22\
\x36\x6e\xdc\x58\xe9\xc8\x52\x5d\x92\xbf\x2f\xd3\x3b\x75\x90\x7b\
\x36\x3f\x40\x32\x97\xe0\x0b\xbf\xfb\x12\xaa\xa2\xd2\x9d\x05\x2b\
\x7f\x48\x32\x26\x4f\x9e\x7c\xc2\xb4\x69\xd3\x5e\x05\x54\xc7\x71\
\xd8\xb9\x73\xe7\x75\xc9\x64\xb2\xe7\x2d\x6c\x44\xad\xa2\x28\x9a\
\x94\x92\x4d\x9b\x36\x71\xd1\x45\x17\xf9\xe4\xe6\x5b\xd7\xfe\xfd\
\xfb\x7d\x82\x73\xeb\xd6\xad\xc4\xe3\xf1\x63\x07\xc4\xb2\x2c\x6e\
\xbd\xf5\x56\xce\x3b\xef\x3c\x0c\xc3\xe0\xb1\xc7\x1e\xf3\x07\x41\
\x7a\x9e\xd6\x9c\x39\x73\xd8\xbb\x77\xaf\x4f\x0d\x0c\x0c\x0c\x78\
\x37\xb9\x70\xda\xb4\x69\xc9\xde\xde\xde\xb7\xe3\x8a\x1c\x8f\xaf\
\x92\x02\x14\x45\xd0\xbd\xcf\xe6\xdb\x7f\x75\x13\xff\xf0\xe3\x2b\
\xf8\xe2\xc5\xdf\x27\x1e\x6e\x60\x74\x7b\x27\x83\xd6\x08\x86\xe1\
\xf8\x80\x7b\xc4\xe1\xb8\x2a\x49\xa9\xaa\x2a\xb7\xde\x7a\x2b\x9f\
\xff\xfc\xe7\xb9\xff\xfe\xfb\xfd\x09\x3c\xb6\x70\xd1\x8a\x1a\xb1\
\x13\xaa\x28\x65\x2c\x0a\x96\x60\x64\xd9\x56\xe2\xb1\x10\x93\x46\
\xc2\xd8\xf5\x41\xdc\xb2\x8a\x34\x14\xa4\xa9\x84\x91\xe2\x4c\x2f\
\xf7\xfd\xe6\x9b\x6f\x86\xde\x4e\x5d\x8d\x4f\x5b\x2b\xaa\x4a\xb8\
\x4a\x23\x10\x14\x87\x3a\xa6\x04\xd8\x25\x17\xbb\xa4\xf8\x36\x57\
\x51\x14\x4a\xa5\x92\x65\x9a\x66\xe2\x98\x00\x71\x1c\x87\xd9\xb3\
\x67\x93\x4c\x26\x39\xe5\x94\x53\x08\x85\x42\x9c\x72\xca\x29\x44\
\xa3\x51\xa2\xd1\x28\x3f\xf9\xc9\x4f\xa8\xa9\xa9\xe1\xcc\x33\xcf\
\x24\x9d\x4e\xb3\x6e\xdd\x3a\x02\x81\x00\xb6\x6d\xf3\x9e\xf7\xbc\
\xe7\xe1\xf6\xf6\xf6\x1f\xf7\xf6\xf6\xde\x34\x46\x36\x36\x01\xab\
\x01\xd3\x75\xdd\xea\xde\xde\xde\x4a\x01\x32\x60\x5a\x26\xf9\x6c\
\x8c\x3b\xef\xfc\x1a\x3d\xed\x59\x1e\x1f\x7a\x86\x4c\x21\xc3\x2b\
\x7d\x6f\x52\xe8\x0a\x13\xd3\x6b\x3d\xb6\x94\xaa\xaa\x2a\x72\xb9\
\xdc\x84\xd1\x17\x42\x08\xda\xda\xda\xb8\xe9\xa6\x9b\xc8\xe5\x72\
\x54\x55\x55\x55\x66\x09\x0b\x17\xdc\xb1\x39\xed\xc0\x90\x36\xc2\
\xc2\x85\x01\x6c\xcb\x84\xa6\x89\x23\x93\xb2\x9b\xc7\x2a\x1e\x85\
\xf4\x0b\x23\xfe\x18\xf1\xd9\xd0\xd0\x30\xd6\x82\x00\xb5\xe7\x04\
\x59\x7c\x5a\x8c\xa1\xed\x65\x46\xf7\x59\x8c\xee\x36\x71\xed\xb7\
\x75\x04\x0e\x0c\x0e\x0e\xde\x77\x4c\x80\x44\x22\x11\xee\xbc\xf3\
\x4e\xce\x3c\xf3\x4c\xee\xb9\xe7\x1e\x8a\xc5\xe2\x84\xec\x97\x69\
\x9a\xac\x5d\xbb\x16\xdb\xae\x64\xf4\x82\xc1\xa0\xff\xef\xde\xf0\
\x2e\x6f\x8d\x8c\x8c\xac\xba\xf2\xca\x2b\x3f\xef\x51\xf4\x19\xb3\
\x28\x6b\x67\x3b\x42\x26\xc3\xb8\xc9\x00\xd8\x92\x3d\x6f\x76\x32\
\x59\x59\xc0\xa6\x8d\x95\xe9\xdc\xd3\xb4\xe5\x74\xba\x1d\x7e\x8e\
\x3a\x12\x89\x10\x0c\x06\xfd\x91\x47\xe3\xa8\x97\x7b\x86\x87\x87\
\xab\x84\x10\x6e\xb9\x5c\x5e\x2e\x84\xb8\xb8\x5c\x2e\xe3\x28\x2e\
\xa1\x50\x78\xac\xe2\x51\x80\x2a\x31\x4d\x87\x0f\xcc\xbc\x93\xee\
\xec\x2e\x4e\x9a\xb5\x98\xff\x7a\xee\x3e\xec\xba\x76\x1c\x27\x82\
\x65\x5a\xa0\xc8\xb7\x8d\xe2\x03\x81\xc0\x6c\x29\x65\x9d\x37\x48\
\x61\xd1\xa2\x45\x04\x02\x01\xfa\x5e\xeb\x65\xe7\xcb\x12\x49\x00\
\x29\x75\x62\x91\x90\xaf\x52\xbd\x5f\x5e\x61\xc8\x31\xab\x2c\xdb\
\xb6\x83\x8a\xa2\xf0\xda\x6b\xaf\x55\xf4\xa1\x64\xc2\x64\x05\x21\
\xc0\xb6\x1c\x84\x52\x31\xe6\x5e\x2e\xa2\x50\x28\x4c\x18\x2c\xec\
\x3d\x93\x33\x6e\x50\x72\x76\x61\xa7\x98\x79\x46\x14\xcb\x35\x71\
\x1c\x17\x25\x17\xab\xc4\x07\xde\xec\xdc\x31\xf7\xd2\x9b\x17\x2c\
\xa5\xf4\xe9\xfe\xfa\xfa\x7a\x12\x89\x43\xd2\x9f\x4c\x26\xbf\xe6\
\x19\xfb\xc9\x93\x27\x7f\x4c\xd3\xb4\x8b\xa5\x94\x08\x1b\xb0\x5d\
\xff\x50\x00\xcb\x34\x48\xec\x93\x54\x35\x57\xf3\xfc\x9b\x3f\xc6\
\x3a\xf0\x31\x16\x28\x67\x12\x53\x56\xf0\x58\xef\x4f\xc9\x8d\x06\
\x40\x48\x6f\xf0\xd8\x29\x81\x40\xa0\x5e\x55\x55\x51\x2a\x95\x76\
\x5f\x72\xc9\x25\xed\x3d\x3d\x3d\x58\x96\x25\x6b\x6b\x6b\xc5\x3f\
\xff\xf3\x3f\xb3\x78\xf1\x62\xee\xbd\xf7\x5e\xf6\xed\xdb\xc7\xe2\
\xc5\x8b\x79\xf3\xcd\x37\x69\x69\x69\xa1\xbf\xbf\x9f\x74\x3a\xcd\
\x94\x29\x53\x90\x52\xb2\x7d\xfb\x76\xd2\xe9\xf4\xb1\x03\x72\xe0\
\xc0\x81\xbf\xcb\xe5\x72\x9b\xd4\x31\x7e\x22\x1f\x72\x39\xff\x96\
\x3a\xca\x59\x07\xbb\x24\x29\xa5\x5c\xd2\x5d\x16\x99\x03\x36\xee\
\x38\xa0\x26\x4d\x9a\xe4\xd1\x2b\x55\xb1\x58\x6c\x46\x3e\x9f\x3f\
\x00\xb8\xe3\x77\x4a\x38\xa2\x61\x14\x25\x97\xcf\xfc\x27\x26\xd7\
\xd5\xf2\xf3\x3d\xdf\x20\x56\xd7\x48\xa2\xf9\x00\xc9\xf5\x21\x5f\
\xf7\x3a\x8e\x43\x3a\x9d\x46\x4a\x49\x38\x1c\xc6\xb2\x2c\x06\x06\
\x06\xbc\x06\x98\xb7\xf3\x1c\x54\x4d\xd3\x58\xbd\x7a\x35\xf9\x7c\
\x9e\x1f\xff\xf8\xc7\x7e\x02\xad\xae\x5c\x45\x68\xeb\x1c\xee\xda\
\x7c\x17\xaa\x32\x87\xdf\x06\x9e\x20\xa4\xeb\x48\x43\x65\x8a\xb3\
\x98\xa6\x13\xbd\x9d\x26\xb9\xfa\xea\xab\x7f\xee\x55\x8e\xac\x59\
\xb3\x66\xaa\x57\x13\x6c\xdb\xb6\x37\x8f\xd7\x1f\x36\xe3\xba\x2e\
\x93\x27\x4f\x66\xfa\xf4\xe9\x94\xcb\x65\x66\xcc\x98\xc1\xba\x75\
\xeb\xfc\x64\xde\xce\x9d\x3b\x8f\x8f\x97\x65\x59\xd6\xd6\x40\x20\
\x20\x3d\x35\x13\x01\x76\xfe\xc0\x7c\x4b\x6e\x5d\x27\xa4\xeb\xa0\
\x1f\x8a\x62\x35\x4d\xc3\xb2\x2c\x1a\x1b\x1b\xff\x7a\xc9\x92\x25\
\x97\xbd\xf8\xe2\x8b\x5f\xb5\x2c\xeb\xe4\xbe\xbe\xbe\x43\x63\x9d\
\xde\x28\xa0\xb5\x57\xb3\xeb\xc4\x36\xbe\xb1\xf9\x19\x66\x4e\x99\
\x89\x1e\x9f\x42\xdb\xe0\x4e\xf2\x43\xae\x5f\x1c\xf0\x16\xd1\x2f\
\x77\x74\x74\x3c\x34\x96\xf1\x33\x34\x4d\x7b\xe5\xed\x72\x29\xef\
\x7b\xdf\xfb\xa8\xaf\xaf\xe7\xb6\xdb\x6e\xe3\xd1\x47\x1f\xf5\x53\
\xba\xae\x74\x29\x9b\x15\xb5\xbb\x21\xdd\xca\x7b\x6f\x0d\x4f\x18\
\x33\x65\x0d\x07\x30\x7b\xc2\x98\x07\xc3\x38\x39\x8d\xf1\xb3\x54\
\xbc\x2a\x7a\x2f\xc6\xb9\xed\xb6\xdb\x08\x06\x83\x94\x4a\x25\xa4\
\x94\x7c\xff\xfb\xdf\x9f\xd0\x7f\x22\xa5\x64\xf7\xee\xdd\xb4\xb6\
\xb6\x1e\x36\x89\x79\x58\xed\xa7\xae\xeb\xf2\xd1\x8f\x7e\x94\x73\
\xce\x39\x87\x8e\x8e\x0e\x76\xee\xdc\x49\x67\x67\xa7\x3f\xdb\xc3\
\x34\x4d\x4e\x3f\xfd\x74\xf6\xef\xdf\xcf\x9e\x3d\x7b\x98\x3d\x7b\
\x36\x1d\x1d\x1d\x7e\x26\xb1\x5c\x2e\xeb\xe7\x9e\x7b\xee\xbf\x2a\
\x42\x61\x58\x1b\x96\xd3\x4f\x12\xc2\x1c\x0c\x63\x0f\x07\x71\x53\
\x3a\x3b\x36\xee\xa5\x45\x3d\x11\x67\xc4\xa5\x77\x78\x90\x26\x6d\
\x29\xeb\x13\xaf\xfb\xbd\xe0\x5e\x71\xc0\x18\x91\x98\x4f\x24\x12\
\x1f\xfd\x93\x0f\xa5\x69\xbc\xf2\xca\x2b\xac\x59\xb3\x86\x91\x91\
\x11\x8a\xc5\xa2\x3f\x4f\xd7\x2b\x0b\x02\x28\x0a\x93\x68\x6d\x0c\
\x92\xd3\xb8\xfd\x92\x2f\xf1\x8b\xd6\xff\x64\x24\x32\x82\x3d\x37\
\x4b\x62\xb3\x45\xea\xe5\x2a\x54\xad\xd2\xb2\x7d\xd6\x59\x67\xad\
\xf3\x00\x29\x95\x4a\x84\xc3\x95\xf3\x4e\x8a\xb9\x4a\x39\xa9\x50\
\x15\x94\x3f\x5e\xd6\x2c\x01\x91\x4a\xa5\xa2\xc7\x05\x10\x29\x25\
\xd7\x5c\x73\x0d\x5b\xb6\x6c\x61\xd5\xaa\x55\x24\x12\x09\x16\x2d\
\x5a\xc4\xf6\xed\xdb\x69\x68\x68\xa0\xb3\xb3\x93\x9a\x9a\x1a\x16\
\x2e\x5c\x48\x20\x10\xa0\xbd\xbd\xdd\x1f\x00\xe9\xbd\x80\xfe\xfe\
\x7e\x1c\x29\xa9\xbe\x3e\x2f\xc2\x8b\x63\x84\x97\x66\x41\x91\x48\
\x53\xc1\x4e\x6b\xb8\x59\x9d\xd4\xf3\xb5\x38\x96\x8b\xe6\x1e\xaa\
\x2e\xf1\x06\xa7\xd5\xd5\xd5\x4d\x98\x35\xff\x4e\x91\x74\xb1\x58\
\xe4\x9c\x73\xce\xf1\xc1\xf4\xf4\x77\x5d\x5d\x9d\xff\xbd\x4d\x3d\
\x2a\x7b\xbf\x5c\x87\xed\x66\xf8\x9b\xff\xfc\x18\x02\x9d\x40\x40\
\x45\x1a\x51\x14\x5d\x22\x74\x13\xd5\x55\xbc\xd1\xb8\xb3\x0c\xc3\
\xc0\xb6\x6d\xaa\xab\xab\x2b\xc9\x27\x04\xb5\x4b\x82\xe8\x08\xf2\
\x03\x0e\x46\xc6\x9d\x30\x9c\xc8\x93\x0a\x4d\xd3\x44\x36\x9b\x7d\
\xc3\xb6\xed\xf7\x1e\x17\x40\x74\x5d\xe7\xfa\xeb\xaf\xe7\xae\xbb\
\xee\xe2\xeb\x5f\xff\x3a\xdd\xdd\xdd\x13\x4e\xd2\x29\x16\x8b\x74\
\x75\x75\x91\xcb\xe5\xfc\x29\x9d\xaa\xaa\x92\xcd\x66\xa9\xad\xad\
\xc5\x75\x5d\x7f\xc8\xc0\xc0\x03\x36\x6f\x28\xde\xb0\x49\x65\x1c\
\x65\x61\x52\xb4\xda\x68\x69\x69\x99\x90\xb7\xf7\x26\xea\x7c\xf4\
\xa3\x1f\xe5\xfe\xfb\xef\x3f\xac\x32\x23\xc3\x30\x72\xb6\x6d\x77\
\x7b\x75\x5d\x42\x88\xe9\xaa\xaa\x0a\x4f\xda\xfd\x89\xa8\x01\x8d\
\x9a\x49\x31\x2c\xcb\x62\xb4\x6c\x53\x7d\xe9\x10\xb5\x93\x74\x64\
\x49\xc3\x1c\x0a\x52\xec\x08\x40\x22\xea\x57\x4e\x7a\xf1\x47\x28\
\x14\xe2\x13\x9f\xf8\x04\xc9\x64\x92\xed\xdb\xb7\x57\x1a\x95\x6a\
\x2d\x64\xfc\x90\xf3\xe1\xfd\xee\x15\x5d\x94\x4a\xa5\x72\x22\x91\
\x48\x1d\x17\x40\xbc\xb9\x8a\x5f\xf8\xc2\x17\xfe\xe8\x67\xf2\xf9\
\xbc\x4f\xcd\x4b\x29\x19\x3b\xb1\xcc\x67\x81\x87\x86\x86\x2a\x85\
\xda\x93\xe3\xa8\xaa\x4a\x5b\x5b\x9b\x3f\xcb\x77\xf1\xe2\xc5\x63\
\xd4\x47\xd9\xcf\x71\xd8\xb6\xed\xe7\x0e\xaa\xaa\xaa\xfc\xf9\x86\
\x87\x03\x48\x3a\x9d\x7e\x70\xd3\xa6\x4d\x0f\x7a\x3f\x4f\x9f\x3e\
\xdd\x52\x55\x55\x3b\xf1\xc4\x13\xe9\xee\xee\xf6\x81\xf6\x5e\x98\
\x61\x18\x24\x7b\x46\x68\x5e\x7f\x12\xd9\x52\x89\x75\xeb\xd6\xa1\
\x69\x1a\xcb\x97\x2f\xa7\xa1\xa1\x81\x36\xa5\x7d\xc2\x09\x3a\x17\
\x5e\x78\x21\x93\x27\x4f\xe6\x33\x9f\xf9\x0c\x8f\x3c\xf2\x08\x2f\
\xbf\xfc\x32\x97\x5d\x76\x19\xbf\xff\xfd\xef\x99\x35\x6b\x16\xc3\
\xc3\xc3\x6c\xd8\xb0\x81\x58\x2c\xc6\xa9\xa7\x9e\xca\x73\xcf\x3d\
\x77\xd8\xa4\xe5\x61\x01\x92\x4a\xa5\xfe\x2a\x9b\xcd\x7e\xa3\xa6\
\xa6\x66\x51\x3e\x9f\xf7\xcb\xfb\xc7\x1b\xaa\xb7\x1a\x2d\xaf\xc8\
\xce\x03\xc5\xcb\x75\x54\x55\x55\x11\x08\x04\x68\x6a\x6a\x62\xe6\
\xcc\x99\x0c\x0c\x0c\x4c\x38\xbf\x6a\xfc\x31\x42\xe3\x8b\x93\xbd\
\xe1\x68\x47\x93\xfc\x92\x52\xb2\x6e\xdd\x3a\x86\x86\x86\x58\xbd\
\x7a\xb5\x1f\x4b\x79\x52\xe8\x79\x4a\xb6\x6d\x93\x4e\xa7\x39\xeb\
\xac\xb3\x88\xc5\x62\x24\x93\x49\x32\x99\xcc\xf8\x53\xd8\x30\x4d\
\x93\xbd\x7b\xf7\x72\xdb\x6d\xb7\x31\x32\x32\xc2\x8e\x1d\x3b\x70\
\x5d\x97\xed\xdb\xb7\xe3\xba\x2e\xfb\xf6\xed\xf3\x9f\xbd\x50\x28\
\x30\x38\x38\x78\x44\x2c\xf2\x61\x01\x92\x4c\x26\x7f\x5d\x57\x57\
\xf7\xcf\xcb\x97\x2f\x67\xf5\xea\xd5\x74\x77\x77\xb3\x79\xf3\x66\
\xba\xba\xba\x08\x04\x02\x7e\xe2\x7e\xea\xd4\xa9\xf4\xf5\xf5\x91\
\x4e\xa7\x99\x34\x69\x92\xcf\x7e\xfa\x25\x3d\xe3\x62\x10\xcb\xb2\
\xc8\xe7\xf3\x3e\x4d\x3e\xfe\x40\xb1\xf1\xb3\x73\xc7\x5e\x5c\x6a\
\x60\x60\xe0\xe1\xb1\xff\x37\x72\x34\x65\x45\x86\x61\xf8\x52\x9b\
\xcd\x66\xfd\xef\x1f\x3f\x74\xdf\x53\x4b\xa6\x69\xfa\x07\xd5\x78\
\xff\xe6\xfd\xbd\xe3\x38\x8c\x8c\x8c\xb0\x6a\xd5\x2a\x5f\x9d\x43\
\xa5\x9b\xca\x1f\xeb\x31\x6e\x83\xbe\xf1\xc6\x1b\x47\xd4\x42\x77\
\x24\xe7\x87\x88\x8f\x7f\xfc\xe3\x0c\x0e\x0e\x72\xed\xb5\xd7\x62\
\xdb\x36\xb3\x67\xcf\xa6\xaf\xaf\x0f\xdb\xb6\xa9\xab\xab\x43\x88\
\x4a\x32\xa6\xbd\xbd\x9d\x62\xb1\x48\x30\x18\xf4\x87\x24\x7b\xb6\
\xc0\x03\xc4\x2f\xf3\x19\x03\x61\xbc\x24\xa9\xaa\x4a\x20\x10\xa0\
\xa5\xa5\xc5\x9b\x3b\x35\xd2\xd9\xd9\xf9\x89\xf1\x31\xce\xf8\xa0\
\xf0\x70\x54\xee\xc5\x17\x5f\xcc\x45\x17\x5d\xc4\xf0\xf0\xb0\xaf\
\xb2\x42\xa1\x90\x9f\x8a\xf6\xd4\xd7\x78\x06\x60\xfc\xe1\x67\xde\
\xdc\xc9\xb1\x0d\x3a\x81\x4c\x1c\x5f\x72\xfb\x76\xd7\x96\x52\x76\
\x49\x29\xfb\x8e\x37\x20\x7c\xfa\xd3\x9f\xe6\xa1\x87\x1e\xe2\x67\
\x3f\xfb\x19\x2f\xbe\xf8\xe2\x84\xf3\x38\x0c\xc3\x20\x1a\x8d\xfa\
\x65\xf7\xde\x91\x0e\x9e\x6b\xec\xa9\xaf\xf1\xa7\xd5\x8c\x3f\xe9\
\xcd\x7b\x21\xc5\x62\xd1\xaf\x32\x9c\x3a\x75\x2a\xe9\x74\xfa\x0f\
\x28\x87\x23\x01\xc3\x53\x59\x4d\x4d\x4d\xec\xde\xbd\xdb\x57\xb7\
\x9e\x5a\x1c\x7f\x14\x85\xf7\x67\x4f\x5a\xc7\xd7\x54\x99\xa6\x89\
\xae\xeb\xdc\x71\xc7\x1d\x5c\x70\xc1\x05\xfc\xfb\xbf\xff\xbb\x97\
\x5f\xf7\x59\x6b\x4f\xda\xab\xaa\xaa\xfc\x41\x3d\x52\x4a\x76\xed\
\xda\x35\xeb\xb8\x4b\x88\xe7\x3e\x7e\xf0\x83\x1f\x9c\xd0\x53\x38\
\xde\x76\x78\x93\x9d\x3d\xfd\x59\x55\x55\xe5\x9f\xc4\xe3\xed\x4a\
\x8f\x23\xf2\xa4\xc5\xf3\x46\x3c\x8a\xc4\xa3\xab\xa5\x94\x13\x06\
\x80\x1d\xcb\x4a\xa7\xd3\xb3\x36\x6f\xde\x8c\x65\x59\xf9\xe9\xd3\
\xa7\x77\xac\x5a\xb5\xaa\xf6\x53\x9f\xfa\x14\x37\xdf\x7c\x33\xf9\
\x7c\xde\xef\xb5\xf7\xdc\x5a\x6f\x1c\xdf\x78\x9a\x3f\x93\xc9\x30\
\x7b\xf6\x6c\xce\x3d\xf7\x5c\xa2\xd1\x28\xe7\x9c\x73\x0e\x1b\x36\
\x6c\xe0\x92\x4b\x2e\x61\xcd\x9a\x35\x5c\x79\xe5\x95\x3c\xf9\xe4\
\x93\x2c\x5c\xb8\x90\xda\xda\x5a\x54\x55\xe5\xb9\xe7\x9e\x9b\x30\
\xed\xfa\x78\x03\xf2\x8b\xee\xee\xee\xed\x8a\xa2\xac\x7e\xef\x7b\
\xdf\x8b\xa2\x28\xb4\xb7\xb7\x63\x18\x86\x3f\xef\xdc\xd3\xb7\x35\
\x35\x35\x38\x8e\xe3\x0f\x94\x14\x42\x90\x4a\xa5\xfc\x01\xf5\xe3\
\x55\x96\x57\xd1\xe7\x7d\x6e\xac\x5a\x5d\x3a\x8e\x23\x6c\xdb\xa6\
\xa6\xa6\x86\x4c\x26\xa3\x1e\x23\x20\x7e\x4e\xa3\x58\x2c\xba\x77\
\xdd\x75\x17\xeb\xd7\xaf\xa7\xba\xba\x9a\x91\x91\x11\x7f\xf7\x8f\
\xb7\x67\x1e\xd5\x32\x3e\xd1\x96\xcb\xe5\xd8\xbd\x7b\x37\x0d\x0d\
\x0d\x78\xe7\xff\x7a\x6e\xef\xd6\xad\x5b\xb1\x6d\x9b\x9e\x9e\x1e\
\x7f\x10\x8d\x37\x92\xe9\x88\xec\xdd\xe1\x7c\xc8\x3b\xab\x0f\xd0\
\x96\x2c\x59\x62\x6d\xdd\xba\x95\x9d\x3b\x77\xd2\xd3\xd3\xc3\x96\
\x2d\x5b\x58\xb1\x62\x05\x3f\xf8\xc1\x0f\xb8\xf0\xc2\x0b\xe9\xec\
\xec\x24\x18\x0c\x32\x7f\xfe\x7c\x3a\x3b\x3b\x79\xf5\xd5\x57\xc9\
\x66\xb3\x0c\x0f\x0f\x13\x8b\xc5\xfc\x72\x53\xef\x04\x01\x29\xa5\
\xdf\x9f\x18\x8d\x46\xfd\x03\x51\x06\x07\x07\x13\xe5\x72\xf9\xe5\
\x31\xbb\xb4\x67\x64\x64\xe4\x8b\x1c\x87\x75\xe2\x89\x27\x26\x6e\
\xbc\xf1\xc6\xfa\x4b\x2f\xbd\x94\xcf\x7e\xf6\xb3\x0c\x0c\x0c\x50\
\x2a\x95\xfc\x39\xed\x85\x42\x81\x29\x53\xa6\xf8\x07\x9c\x79\xe3\
\x6f\x55\x55\x65\xe6\xcc\x99\x64\x32\x19\xff\xb4\x87\xb7\x73\x1e\
\xde\x9a\xe6\xb5\x6d\x9b\x37\xdf\x7c\x53\x1c\x57\x09\x19\xa7\xc3\
\x15\xcb\xb2\x78\xf8\xe1\x87\xb9\xec\xb2\xcb\x78\xf4\xd1\x47\x49\
\x26\x93\xbc\xf0\xc2\x0b\x48\x29\x19\x1c\x1c\x64\xdf\xbe\x7d\x95\
\xb3\xa5\xba\xbb\x27\x64\x10\xbd\xc1\x2f\xe9\x74\x9a\x60\x30\xe8\
\x0f\x4f\x8e\x46\xa3\xbe\xd4\x18\x86\xc1\xec\xd9\xb3\xc9\x64\x32\
\x28\x8a\xb2\x2b\x93\xc9\x5c\xc3\x71\x5e\xba\xae\xf3\xe4\x93\x4f\
\xf2\xd4\x53\x4f\x91\xcf\xe7\x99\x3c\x79\x32\x9f\xfd\xec\x67\xf9\
\xf4\xa7\x3f\xed\x0f\x4b\xf3\x46\x8d\xc7\xe3\x71\x9f\x36\xff\xfc\
\xe7\x3f\xcf\x7d\xf7\xdd\xe7\x8f\x05\xf4\xc8\x4e\x4f\x9d\xd7\xd5\
\xd5\x91\xcf\xe7\x49\xa5\x52\x34\x36\x36\x32\x3c\x3c\x8c\x65\x59\
\x7d\x8a\xa2\xe4\xff\x2c\x2a\xcb\x5b\xa1\x50\x88\x7b\xee\xb9\x87\
\xfb\xee\xbb\xcf\x8f\x13\x3c\x75\xd3\xdd\xdd\x8d\x10\xc2\x1f\x76\
\xe6\xdd\x6c\x55\x55\x95\x4f\xc5\x7b\x27\x2d\x7b\xb6\xc6\x3b\xef\
\xd0\x8b\x3f\xf6\xec\xd9\x73\x5c\xfb\x00\xdf\xba\x46\x47\x47\x5f\
\x09\x87\xc3\xd5\x63\xf7\xb6\xf2\x1b\xdf\xf8\x46\xe4\xb5\xd7\x5e\
\xf3\xef\xcd\xb3\x61\xe3\x8f\xde\x70\x5d\x97\x55\xab\x56\x31\x75\
\xea\x54\x96\x2d\x5b\xc6\xd3\x4f\x3f\x4d\x75\x75\x35\x5b\xb6\x6c\
\xa1\xb9\xb9\x99\xad\x5b\xb7\xb2\x72\xe5\x4a\x0a\x85\x02\xed\xed\
\xed\x1c\x3c\x78\x90\x68\x34\x4a\x6f\x6f\xef\xe5\xdd\xdd\xdd\xdb\
\x8f\xa4\x0b\xe0\x48\xad\xa5\x36\x73\xe6\xcc\x27\xa3\xd1\x68\x40\
\xd7\xf5\x0b\x4f\x3c\xf1\x44\x42\xa1\x10\xaa\xaa\xb2\x6f\xdf\x3e\
\xbf\x05\x39\x1c\x0e\xfb\xd3\x0d\x42\xa1\x90\x7f\x1c\x50\x26\x93\
\x21\x97\xcb\x4d\xe0\xb9\x82\xc1\x20\x17\x5e\x78\x21\x2f\xbf\xfc\
\x32\xde\xbc\x75\x21\x04\xed\xed\xed\xaf\x0f\x0c\x0c\x9c\x79\x3c\
\xc1\x18\xb3\x47\xfe\xcf\x73\xe7\xce\xdd\x7b\xed\xb5\xd7\xce\xbb\
\xf9\xe6\x9b\xb9\xfc\xf2\xcb\x59\xb6\x6c\x19\x2d\x2d\x2d\x3c\xf9\
\xe4\x93\xbe\x6a\xfd\x87\x7f\xf8\x07\x1e\x79\xe4\x11\x0c\xc3\xe0\
\x86\x1b\x6e\x60\xd3\xa6\x4d\x8c\x8e\x8e\xfa\xae\xb0\xd7\x93\xa2\
\xeb\xba\x7f\x0c\x47\x30\x18\x44\xd7\x75\xba\xbb\xbb\x97\x1d\x3c\
\x78\x70\xcb\x91\xdc\xe3\x91\xf6\x9f\xd9\x5d\x5d\x5d\x97\xb7\xb6\
\xb6\x5e\x17\x0a\x85\x38\xf3\xcc\x33\xf9\xc2\x17\xbe\x40\x34\x1a\
\xa5\xb9\xb9\x99\xa9\x53\xa7\x52\x55\x55\xc5\x8c\x19\x33\xa8\xa9\
\xa9\xa1\xae\xae\x8e\x48\x24\x82\xeb\xba\x7e\xa0\xb5\x72\xe5\x4a\
\x02\x81\x00\x0d\x0d\x0d\xc4\xe3\x71\x82\xc1\x20\xd1\x68\x94\xb1\
\x81\xcd\x6f\x6e\xdf\xbe\xfd\x9f\xb6\x6f\xdf\xfe\x19\xe0\x2b\xc7\
\x5b\x3a\xc6\x83\x31\x26\xa1\xca\x0b\x2f\xbc\xc0\x25\x97\x5c\x82\
\x10\x82\x7b\xee\xb9\xc7\x3f\x9f\x70\xc3\x86\x0d\x18\x86\xc1\x15\
\x57\x5c\x41\x22\x91\x40\xd7\x75\x7e\xf9\xcb\x5f\xd2\xdd\xdd\x4d\
\xa1\x50\xa0\x58\x2c\x4e\x38\x39\xc7\x73\xed\x43\xa1\x90\xf4\xcb\
\x91\x5c\xf7\x88\xdd\xc3\xa3\x3a\x2d\x5a\xd3\xb4\x80\x10\x82\x07\
\x1f\x7c\x90\x87\x1e\x7a\xe8\x0f\xfe\xfd\xe0\xc1\x83\x7f\xd4\x39\
\x78\xab\x1b\xeb\x38\x0e\xbf\xfd\xed\x6f\xbd\x49\x6f\xbb\x72\xb9\
\xdc\x7f\x02\xef\x58\xb6\x7f\x3c\x56\x36\x9b\x95\x9e\x8a\x12\x42\
\xf0\xfc\xf3\xcf\x73\xfa\xe9\xa7\xb3\x63\xc7\x0e\x36\x6c\xd8\x40\
\x7d\x7d\x3d\x0f\x3e\xf8\x20\x0f\x3e\xf8\x20\x77\xdf\x7d\xb7\xef\
\xba\x7b\x41\xaf\x77\xc2\x90\x97\xbe\x1e\x53\x6f\xa2\xad\xad\xed\
\x5c\xc7\x71\xf6\xd4\xd5\xd5\x25\x8e\xf4\x9e\x8e\xca\xc1\x0f\x85\
\x42\xb1\xba\xba\xba\xcf\x85\x42\x21\x17\xb8\x2a\x1c\x0e\x2f\x3e\
\x1c\xbd\xef\x4d\x7a\xf0\xa6\x3c\x7b\x99\x36\xcf\xdf\x4f\xa7\xd3\
\x6b\x7a\x7a\x7a\xde\xcf\x5f\x7e\x45\x16\x2f\x5e\x5c\xf0\xfa\x45\
\x74\x5d\x67\xe9\xd2\xa5\xec\xdb\xb7\xcf\x77\xc9\x0f\xc7\x7d\xf5\
\xf8\xb1\xed\xdb\xb7\xcf\x2d\x14\x0a\xed\x47\x73\x23\xc7\x3c\x38\
\xaa\xb9\xb9\xf9\xe7\x4d\x4d\x4d\xd7\x1e\xce\x5c\x73\x0f\x14\x8f\
\xce\xae\xa9\xa9\xa1\x50\x28\x14\x5b\x5b\x5b\xa3\xe3\xbe\x8f\xfe\
\xfe\xfe\xbf\x34\x20\xc1\x96\x96\x96\x6d\xf1\x78\x5c\xe4\xf3\xf9\
\x48\x63\x63\xe3\x09\x52\x4a\x0a\x85\xc2\x61\x8f\x90\x2d\x14\x0a\
\x98\xa6\x79\x9f\xeb\xba\x56\xb9\x5c\xfe\x8f\xa1\xa1\xa1\xd1\xff\
\x2b\x80\x00\x81\xb1\xef\x09\x86\x42\xa1\xa5\x87\x1b\xd7\x78\x51\
\x7b\x38\x1c\x4e\x64\x32\x99\x5d\xfc\x3f\xb2\x82\xc1\x60\x6d\x38\
\x1c\xfe\x47\xd7\x75\x9d\x23\x60\x08\x64\x20\x10\x48\x9e\x7c\xf2\
\xc9\xff\xf3\xc2\x0b\x2f\x48\xde\x5d\xef\xae\x77\xd7\xbb\xeb\xdd\
\xf5\xee\x7a\x77\xbd\xbb\xde\x5d\xff\x8f\xad\xff\x03\x21\xf6\x70\
\xa0\xc1\x03\x97\x85\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
\x00\x00\x3a\x79\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\
\x95\x2b\x0e\x1b\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd9\x03\x03\
\x0e\x1c\x1e\xba\x16\x7f\x4d\x00\x00\x20\x00\x49\x44\x41\x54\x78\
\xda\xed\xbd\x79\x9c\x5c\x55\x99\xff\xff\x3e\x77\xa9\x5b\xd5\xd5\
\xdd\xd5\xdd\xe9\x2d\x49\x77\xd2\x21\x1b\x09\x59\x00\x59\x02\x81\
\x08\x88\x40\x40\x10\x46\x5c\x66\x1c\xc1\x19\x01\xf9\x02\x83\xdf\
\x41\xe7\x0b\xe3\x38\x83\xc8\x57\x04\x74\x5c\x58\x64\x40\x51\x91\
\x4d\x04\xd9\x05\x21\x04\x08\x09\xd9\x43\x3a\x0b\x4d\xd6\x4e\xa7\
\xf7\xa5\xba\xf6\xed\xae\xe7\xf7\xc7\xed\xba\x49\x00\x35\x60\xc0\
\xef\xcc\x8f\xf3\x7a\xd5\xab\xb7\xea\x53\xf7\x9e\xe7\x9c\x67\xf9\
\x3c\x9f\xe7\xb9\xf0\xd1\xf8\x68\x7c\x34\x3e\x1a\x1f\x8d\x8f\xc6\
\x47\xe3\xa3\xf1\xd1\xf8\x68\xfc\x8f\x1f\xe2\x7f\xf2\xcd\x9d\x74\
\xd2\x49\x3b\xa7\x4c\x99\x32\x51\x4a\x89\x00\xa4\xad\x20\x25\xa0\
\x48\x50\x24\x42\xbc\x73\x05\x4a\xa5\x52\x78\xc9\x92\x25\x5f\x4c\
\xa7\xd3\x0f\xff\x35\xae\x59\xfb\x20\x26\xad\xaf\xaf\x5f\x94\xcf\
\xe7\x27\x1d\xf4\xae\x10\x02\xcf\xf3\xa8\xa9\xa9\x79\x6e\x70\x70\
\x30\x71\xa8\xae\x43\x55\xd5\xea\x42\xa1\x10\x01\xd8\x63\x0f\x32\
\xf3\x73\x05\x2a\xa3\x3a\x4e\x4a\xc3\x4b\x87\xf0\x72\xfe\xf7\x6e\
\x4a\xc3\x4d\x86\x00\xf0\x3c\x0f\x29\xa5\xf6\xd7\xda\x44\x1f\xc8\
\x07\xd7\xd5\xd5\xdd\xf5\xd9\xcf\x7e\x76\x36\x80\x27\x3d\xdc\x0a\
\x8b\xb0\xa1\x20\xad\xb1\x97\xad\x80\x1c\xdb\x9d\x63\x3b\xd4\x30\
\x0c\x1e\x7f\xfc\xf1\x63\x81\x43\x26\x10\x29\xa5\xb4\x2c\x0b\x01\
\x8c\xb8\x29\x3e\x16\xa9\x24\x24\xab\x38\xf6\x88\x05\x6c\x1f\x5d\
\x4f\x22\x93\xa4\xae\x2a\x42\x6e\xd8\x66\xf8\x77\x75\x07\x6c\x90\
\xff\x51\x02\xb1\x6d\xdb\x18\x1a\x1a\x02\x60\x67\xbe\x9f\x85\x57\
\x81\xed\xed\xbf\x50\x60\x0f\x86\x28\xee\x8c\x62\xee\x8e\x80\x90\
\x54\x55\x55\x81\x2f\xa6\x43\x36\x5c\xd7\xc5\xb2\x2c\x90\x20\x74\
\xf0\x84\xc3\x45\x33\xbe\xcf\x73\x3d\xb7\x71\xeb\x99\xbf\xe7\x47\
\x4f\xfe\x90\x89\xad\x1e\x4b\x06\x9e\xf1\xdf\x37\x26\x8c\xbf\xa6\
\x40\x94\x0f\x68\x5e\xcf\x34\x4d\x4c\xd3\xc4\x76\x5d\x74\x5d\x21\
\x37\x50\xc5\xb7\x3f\xfe\x5b\xce\x9e\x71\x31\x8a\x13\x61\xdc\xe4\
\x0a\x42\x93\xf3\x98\x25\x0b\xcb\xb2\x30\x4d\xf3\xa0\x17\xa2\xa5\
\xa5\xe5\xe0\x2e\xc2\xf3\xa4\x65\x59\xd8\xb6\x8d\x87\x24\x95\x31\
\x39\xfa\xf0\x23\xe9\x48\xaf\x40\x11\x0a\x9f\x38\xfa\x93\x54\x55\
\xc5\x50\x42\x12\xc7\x71\x30\x4d\x13\xc7\x71\xfe\xaa\x76\x4f\x7b\
\x1f\xea\xe8\xc2\x63\x8e\x39\xe6\x5f\x1c\xc7\xf1\x24\x12\x55\xaa\
\xb8\xb6\xf4\x45\xab\x48\x34\x5d\x75\x3b\x3b\x3b\x5b\x4d\xd3\xf4\
\x77\xa9\xe7\x62\x96\x14\xfe\xed\xcc\x3b\xf8\xe9\xfa\x6b\x38\xab\
\xf9\x1b\x4c\x51\xe2\x9c\x34\xeb\x28\xee\xde\x7e\xab\x2f\x08\x55\
\x12\x0a\x85\x90\xf2\xe0\x0e\x48\x6f\x6f\x2f\x6d\x6d\x6d\xab\x1a\
\x1b\x1b\x29\x1b\x6c\x3c\x05\x89\x24\x95\x4e\xd6\x4d\x39\x6c\xca\
\x0c\x45\x51\x40\x08\xa4\x2b\x11\x86\xcb\xdc\xfc\x14\x72\xbf\x84\
\xd3\x7e\xf6\x71\x2a\x42\xd3\x39\xf9\x8e\x93\xd1\x55\x0d\x81\x46\
\x28\x54\xc3\xce\x9d\x5b\x68\x6d\x6d\x45\x4a\x89\xeb\xba\x53\x43\
\xa1\xd0\x82\xf7\xb2\x2e\x15\x15\x15\x54\x57\x57\xaf\xe9\xee\xee\
\x96\x1f\xaa\x40\x2c\xcb\x9a\x7b\xd8\x61\x87\x1d\x97\x48\xf8\xaa\
\xbe\xaf\xae\x8b\xd9\x0b\x75\xac\x81\x08\xce\x48\x18\x91\x8b\xd0\
\xdd\xa3\x62\xdb\xb6\xbf\x4b\x85\x87\x6b\x4b\x66\xb5\xcd\x40\xd9\
\x03\x45\x3b\x4b\xb6\x98\xe3\xa5\xdd\x0f\xe0\x79\xbe\x4a\x11\xaa\
\x0c\xde\x7f\x90\x23\x5c\x5f\x5f\xbf\x60\xda\xb4\x69\x78\x9e\x47\
\x51\x9a\x68\x87\xa7\xa8\x0c\x87\xe8\x5c\xe1\x31\xae\xba\x1e\xcb\
\x32\xd1\x84\xca\xf6\xf1\x5b\x39\xe1\xec\x4a\x5c\xd7\xc3\x1e\xd1\
\x71\x07\xa3\x58\xc3\x50\x93\x19\x87\x9d\x52\x28\x16\x4a\xa8\xaa\
\x40\xd3\x34\x72\xb9\x1c\xae\xeb\xca\x93\x4e\x3a\xe9\xfa\xda\xda\
\xda\xeb\x01\x1c\xe9\xa2\x57\x79\xe0\x28\x60\x8f\xd9\xbf\xc0\xf6\
\xc9\x40\xcd\xed\xde\xbd\x9b\xb5\x6b\xd7\x46\x80\xd2\x87\x6e\x43\
\x4a\xa5\x12\xa6\x69\x92\x73\x4b\x44\x8e\x2d\xa2\x56\x2b\x44\xaa\
\x73\x30\x33\x87\x66\x08\xc4\x5a\x27\xd0\xc9\x9e\xe6\xa1\xaa\x2a\
\xa9\x5c\x1a\x45\x51\x51\x15\x1d\xdb\x33\xa9\xd2\x2b\xf1\x24\x58\
\xa6\x89\xd0\x08\xde\x7f\xb0\x8e\x99\xe7\x79\x94\x4a\x25\xf0\x24\
\x9d\x7a\x37\x0b\x8f\xd6\x81\x22\x46\x3c\x83\xbd\xc7\xc2\xb2\x2d\
\x3c\x54\x8c\x98\xc4\x35\x05\xf5\xea\x6c\x1a\x67\xd6\xd0\x51\xb3\
\x8a\x8a\xd9\x2a\x20\xd8\xf4\x23\x9b\xa9\x2d\x87\x53\x55\x55\x45\
\x6b\x6b\x2b\x00\x83\x83\x83\xa2\xac\x42\x55\x14\x76\x36\x6e\x63\
\xc1\xd9\x51\xbc\x31\x1b\xe8\x59\x02\x7b\xd0\xc0\xea\x89\x60\x76\
\x85\xc1\xf5\xb5\xbe\xae\xeb\x78\x9e\xf7\xe1\xdb\x10\x21\x04\x96\
\xe5\xeb\x7d\xc7\x71\x50\x55\x05\x37\x5d\xcf\x4d\xa7\x3f\xc3\x95\
\x47\xfd\x17\x7a\xb1\x85\xd6\xd8\xd4\xe0\x3d\x9e\xe7\x11\x8d\x86\
\xb8\xe5\xf1\xef\xf2\x37\xd3\xaf\x61\x7c\x53\x2d\x49\xaf\x93\x73\
\x66\x5c\x82\x22\xf7\xcd\xf5\x1e\x4f\x08\x52\xca\x31\xdb\x63\x21\
\x3d\xdf\x65\x3b\xa7\xf5\x1b\x2c\x9e\xf6\x0f\x98\xb6\x3f\xa7\x65\
\x9a\xa8\xaa\x82\x9a\x6f\xe2\xfa\xb3\xfe\x0b\x37\x3e\x81\x93\x9b\
\xbe\xcc\x37\x4f\x7a\x88\x90\x6a\x50\x32\x4b\x64\x32\x19\x86\x86\
\x86\x18\x1a\x1a\x62\x53\xec\x75\xe4\xc2\x1d\x38\xa6\x87\x69\x9a\
\x58\xa6\x85\xa2\x4b\x1c\xc7\x65\x6e\xf4\x5c\xbe\x76\xe2\x8f\x89\
\x44\x42\xd4\xb4\x54\x52\x7d\x52\x12\xcf\xf0\x37\x66\xf9\x3e\xff\
\x6a\x46\xbd\xbc\x83\x2c\xcb\x42\x08\x98\xa0\x1e\xc5\x8a\x9e\x27\
\xf9\xed\xf2\xdf\xf0\xa5\x23\xbf\x89\xeb\x3a\xa4\xd3\x69\x52\xa9\
\x14\xf9\x42\x1e\xc7\xb3\x49\xd6\x2e\x63\xeb\xee\xdd\xf4\xe4\x36\
\x43\x6d\x3f\x3f\x5a\x7b\x39\xe9\x64\x8e\x54\x3a\x4d\x3a\x9d\x26\
\x93\xc9\x20\x84\xf8\xb3\xc7\x64\xfc\xf8\xf1\x41\xbc\xe0\x0b\xd2\
\x02\x21\xd1\xc3\x1a\x9f\x9c\x73\x3e\xab\x07\x9f\x41\x7a\xbe\x0a\
\xb4\x6c\x1b\x3d\xac\x23\xac\x18\xbd\xc5\x1d\xb4\xf7\xaf\xe0\xb0\
\xfa\x79\x34\xd5\x34\xa1\x0a\x1d\xd7\x75\x71\x1c\x27\xd8\x14\x91\
\x2a\x81\xaa\x8a\xe0\xf7\xb6\x6d\x23\x74\x41\x85\xde\xc0\x95\x9f\
\xf8\x26\x3f\x7c\xf6\x07\x5c\x7a\xec\xad\x5c\xb3\xe8\x1e\xda\x1a\
\x67\xe0\xb8\x0e\x8e\xe3\xbf\x3c\xcf\x43\x51\x94\x0f\xdf\xa8\x97\
\x4f\x88\x6d\xdb\x38\xd8\x08\xa1\x10\xd6\x23\xb8\x9e\x8d\x2b\x5d\
\x92\xc5\x61\x14\x45\x0b\xbc\x15\xd7\x71\x91\x8a\x87\x6b\x49\x9e\
\xeb\xba\x85\xd2\x56\x97\xe2\x80\x20\xdf\xa5\x31\xb4\xb1\x88\x11\
\xd1\x41\x82\x65\xda\x54\x46\xab\x6e\x9e\x3b\x67\xee\xe8\x1f\xc3\
\x0f\x52\xa9\xd4\xeb\x3d\x3d\x3d\x77\x97\x05\x32\x38\x38\xe8\x7b\
\x66\xa3\x2e\xab\x6f\xca\x70\xfe\x0f\xcf\x63\x30\x91\x43\x9a\x05\
\x24\x1e\x02\x81\x7c\xda\x65\xad\xda\xc1\xe5\x8f\x5c\xc3\x68\x36\
\xc1\xe5\x77\x5f\xc1\x9a\x37\x5e\xc3\x74\x8b\xbe\x5d\xd9\xef\x64\
\x6a\x86\xc2\xf6\x9f\xbb\x54\xc7\x72\x60\xfa\x66\x42\x5f\x51\xc5\
\xea\x57\x93\xcc\xbd\x76\x1e\xd5\x91\x18\x5f\xfa\xd1\x57\x71\xa5\
\x8b\xa1\x46\xd0\x23\x1a\x42\xf8\x1b\x54\x55\xd5\x43\xe2\x2e\x6b\
\xef\xf7\x84\x58\x96\x85\x2d\x1c\x34\x45\x63\x30\xbf\x97\x33\x62\
\x0b\xe8\x89\x99\x24\x0a\x7e\xfc\xa1\x69\xfe\xd4\x75\x05\x95\xa5\
\xd7\x8d\x62\x67\x64\x10\x04\xda\xb6\xcd\xa2\x45\x8b\x68\x98\x6d\
\xa2\x0a\x85\x4e\x6d\x2f\x47\x2c\xd2\xb1\xfb\x2a\xce\xb6\x07\x22\
\xc8\x74\xe8\x5d\x41\x9d\x2d\x5b\xb6\x00\xdc\x5d\x56\x59\x93\x27\
\x4f\x26\x12\x89\x20\x84\xa0\xcf\x19\xa1\xee\xfc\x21\x26\xca\x2a\
\x9c\xfe\x0a\xac\xde\x0a\xdc\x78\x08\xe4\xbe\x89\x6a\xaa\xc6\xb1\
\x77\xef\x5e\x22\x54\x51\x1b\x6e\xc4\x73\x07\x71\x1c\x27\xf0\xee\
\xd4\x9c\x47\x7d\x63\x3d\xe3\xea\xc6\xa1\x28\x0a\x3b\x0b\xfd\xcc\
\xfc\x5c\x91\x58\x4c\xc7\x4d\xc7\xb0\x7a\x23\x38\x83\x35\x58\xfd\
\x61\x14\x45\x04\xaa\x13\xa0\xbb\xbb\xfb\xaf\x2b\x90\x52\xa9\x84\
\xad\x49\xa4\x0c\x63\xd7\x6c\x27\x31\xec\x70\xd2\xc7\x8e\xe4\x57\
\x9b\x6e\xa0\x2f\xd9\xc7\x59\x67\x9d\xc5\xa5\x97\x5e\xca\xf2\xe5\
\xcb\x69\x6f\x6f\x67\x68\x68\x08\xd7\x75\x71\x5d\x97\x81\x81\x01\
\xca\x71\x4a\xca\x29\x10\x3d\x39\x4b\xa8\x39\x8a\xd7\x90\x44\x3b\
\x4a\x10\x12\x15\x38\x25\x0f\x55\x68\x0c\x3d\xd0\xe8\x63\x4f\xfb\
\xdd\x7c\xf9\xfb\x72\xec\xa0\x08\x85\x2d\xd6\x1e\xe6\xd5\x36\x53\
\xb2\x0a\xd4\xcc\x0b\x51\x98\xdd\x0b\x48\x84\x13\x22\xbd\xaa\x9a\
\xe2\x8e\x30\x00\x21\x11\xe6\xba\x57\xcf\x23\x5b\xca\x60\xd9\x56\
\xa0\x72\x00\x2a\x84\x82\x3b\x36\xa7\x10\x82\xbe\xd2\x28\x27\x37\
\xd6\x51\x2c\xba\x1c\x39\xe3\x38\xb6\xd6\xac\x40\xce\x75\x11\xa8\
\xe4\x36\x45\xc9\xac\xa9\x02\x21\x51\x14\x05\xcf\xf3\x3e\x78\x81\
\x8c\x1f\x3f\x9e\x81\x81\x81\x03\x8d\x8e\xa2\x38\xc9\x64\x92\x54\
\x2a\x85\xad\x79\x94\xac\x18\x8e\x90\xdc\xf5\xc6\xd7\xf9\xc6\x19\
\x37\x23\xc3\x26\x96\x5b\xe0\xfa\xeb\xaf\x67\xd9\xb2\x65\x7c\xfe\
\xf3\x9f\x47\xd7\x75\x22\x91\x08\x3b\x76\xec\xa0\xb2\xb2\x92\xf6\
\xf6\x76\x5f\xa0\xb6\x8d\xed\xda\x18\x9a\x4a\x6a\x48\xe5\x07\x17\
\x3e\xcd\x60\x7a\x80\xfb\x36\x5d\xcf\xe2\x79\x7f\xc3\xf3\x5d\xf7\
\x8e\xb9\xc5\xbc\x43\x20\x65\xe1\x5a\x96\x85\x22\x04\x8a\xa2\x60\
\xd9\x36\x5f\x3d\xfc\x67\x64\x44\x17\x9a\xe1\x91\x1c\x75\x58\x91\
\xf8\x39\x49\x7c\x7b\x20\xa5\xc4\x93\x1e\xdd\xdb\x06\x48\xec\xb4\
\xc9\xc6\x4b\x38\xd5\xfb\x3c\xc2\xa8\x50\x02\x35\x26\x84\x00\x01\
\x25\xd3\xe4\x82\xe6\xef\x31\xaa\xb7\x73\xcd\x82\x7b\x78\x65\xf3\
\xcb\x64\x22\x1d\xb4\xb3\x0b\xdb\xb6\x90\x48\x34\x4d\xc3\xb6\x6d\
\x54\x55\x7d\xcf\xce\xc9\x7b\x12\xc8\xc0\xc0\x00\x6d\x6d\x6d\xff\
\xbb\xba\xba\xfa\x68\x3f\x00\x13\x8c\x26\x46\xe7\xab\xaa\x8a\xa2\
\x28\xe8\x52\xc1\xc9\x48\x72\x03\x2e\x76\x9f\xe0\xeb\xff\xf2\xcf\
\xa4\x77\x49\x4a\xa3\x82\xaf\x7c\xe5\x2b\x7c\xfb\xdb\xdf\xe6\x57\
\xbf\xfa\x15\xdb\xb7\x6f\x47\x4a\x49\x22\x91\x20\x16\x8b\xd1\xd9\
\xd9\x49\x7d\x7d\xbd\x1f\x19\x4b\x1b\x01\x1c\x1e\x5e\xcc\x9a\xc1\
\xa7\xd9\xd3\x35\xca\x39\xe3\xff\x0f\x0b\x0f\x3b\x96\x35\x43\xcf\
\xb2\xd3\x2c\xbd\xab\x40\xca\x27\xc4\x71\x1c\x14\x04\x4a\xa5\xa0\
\x73\x67\x96\x23\xcf\x9b\xcd\x65\xbf\xbd\x9c\xfb\xfe\x76\x1d\xc3\
\x75\x23\x78\x03\x23\x3c\xe4\xfc\x0e\xd3\x1c\xfb\x7f\xcd\x63\xe5\
\x77\x53\xa8\xba\x40\x11\x0a\xc5\x62\x91\x62\xb1\x08\x40\x95\x1b\
\xc2\x75\x55\xff\xd4\x29\x0a\x42\x48\x46\x86\x4a\x9c\xfd\xe9\x4f\
\x72\xe5\xd3\x37\x70\xe6\xd4\x8b\x99\xde\x30\x8f\x94\x6e\xb0\xc1\
\xdd\x8e\x69\xfa\x0e\x85\x94\xfe\xeb\x43\x31\xea\xe3\xc6\x8d\xfb\
\xc2\xb4\x69\xd3\x8e\x07\x70\xa5\xc7\xfa\x3d\x6f\xa0\x0a\x95\x89\
\x13\x27\x62\xdb\x36\xe9\x67\x2d\x52\xd2\x43\x4a\x81\x94\x61\x34\
\x40\xd3\x2d\xfa\xfa\xfa\xb8\xf4\xd2\x4b\xdf\x75\xce\x5c\x2e\x47\
\x2c\x16\xf3\x5d\x67\xe1\x20\x84\x42\xa5\x51\x83\xe5\x16\x71\x3d\
\x97\xba\x86\x10\x5d\xa9\xad\x64\xac\x04\x96\xa5\xa1\x68\xe2\x1d\
\x02\x51\x14\x25\x38\x25\x02\x81\xa2\xa8\xb8\x2e\x68\xaa\x8a\x10\
\xfe\xc2\x38\x8e\xc3\xea\x9e\xdf\xe3\xb9\x2e\x96\xe5\x05\xae\xf2\
\x9c\xf9\xb3\xe9\xe8\xe8\x20\x16\x8b\x05\xaa\x13\xc0\xf1\x94\x7d\
\xa7\x4e\x51\xc0\xd8\x87\x7e\x0a\x45\x41\x08\x41\xb6\x94\x65\xc3\
\xc8\x92\x60\x2e\xc4\xbe\xe0\xf0\x60\x91\x86\xbf\x48\x20\x9e\xe7\
\x09\xd3\x34\x91\x52\xf2\x96\xd9\x4b\xed\x14\x97\xe3\x9a\x8e\xe7\
\x9c\x73\xce\xe6\xe4\x93\x4f\xe6\xa7\x3f\xfd\x29\x5b\xb7\x6e\x65\
\xfe\xfc\xf9\xac\x5c\xb9\x92\x99\x33\x67\xb2\x66\xcd\x1a\xe2\xf1\
\x38\xe1\x70\x98\x7c\x3e\x1f\xec\x9c\x8c\x52\x62\xfc\x1c\x85\xfa\
\xc4\x54\x8a\xd9\x9c\xef\x18\x28\x0e\x8a\xa2\x32\x9c\xeb\x61\x7e\
\x74\x2e\x03\xc2\x64\xb4\xd0\xcf\x84\xc8\x44\x84\x14\x58\xa6\x85\
\x70\xdf\x79\x42\xf6\x07\x0f\x05\x02\x01\x34\x35\x45\xc8\x65\x1c\
\xa4\xeb\xab\x26\x17\x9b\xb0\x52\x85\xeb\x66\x30\x4d\xb7\x0c\x8b\
\x80\x84\x49\x93\x26\xa1\x69\x1a\xae\xeb\x06\xf3\x2a\x1b\x24\x39\
\x27\x03\x58\xbe\xca\x0a\x43\x7d\x7d\x84\xcd\x3b\x77\x20\x2d\x1d\
\x45\x51\x48\x17\xe3\x1c\x56\x7d\x34\x3b\xdc\x17\xb0\x2c\x5f\x65\
\x95\x6d\xc7\x87\x22\x10\x29\xe5\x3e\xfd\xeb\xf9\xae\xa4\x10\x82\
\x9a\x9a\x1a\x22\x91\x48\xe0\x7f\x87\x42\x21\x8e\x3a\xea\x28\xf6\
\xee\xdd\xcb\xd1\x47\x1f\xcd\xba\x75\xeb\x30\x0c\x83\x9a\x9a\x1a\
\xd2\xe9\x34\x42\x08\x52\x86\xc7\xb4\x4f\x45\x08\xaf\x9e\xc6\x96\
\xd5\xeb\x7c\xf0\x51\xf7\x50\x44\x98\x41\x63\x39\x13\xf5\x8b\x68\
\x9a\x3f\x9d\x7b\xd6\xff\x1b\xe7\x86\xfe\x91\xaa\x70\x2d\x25\xab\
\x0f\xe5\x5d\x04\x22\x84\x1f\x2f\x94\x4a\x25\x94\x31\x2c\x63\xc2\
\xc4\x6a\x7e\xf2\xca\xb7\xb8\xeb\xc2\x65\xdc\xf4\xda\xc5\x48\x33\
\x42\x63\x4d\x33\xb6\xd3\x45\xa9\x64\xf9\xaa\xc5\x15\x8c\x3f\x5d\
\x62\x0e\x55\xe1\x66\x75\x9c\xa4\x06\xb6\x82\x50\xfd\x13\x35\xe2\
\x24\x82\x13\xe2\x54\xba\x54\x55\x86\xb9\x73\xdd\xff\xe6\xa7\x9f\
\x5b\xca\x7f\x6d\xb8\x86\x1d\x99\x0e\x5a\xf4\x56\x6c\xd7\x22\x9f\
\x2f\x80\xf0\x05\xa2\x28\xca\x87\x76\x42\x28\x9f\x10\x4f\x78\x28\
\x42\xa1\xbd\xbd\x9d\xd1\xd1\x38\xc3\xc3\xc3\x14\x0a\x05\x00\x3a\
\x3a\x3a\x02\x5d\xea\xba\x6e\xb0\x6b\x4a\xa5\x12\x86\x61\xf8\x49\
\x28\xdd\xa3\xc2\x30\xd8\x34\xf4\x3a\x89\x44\x8e\x62\xb1\x88\x19\
\x92\x14\xad\x18\x46\x34\xc2\xb5\xbf\xbb\x98\xbb\xbf\xf2\x5b\x32\
\xde\x20\x8f\x75\xfc\x88\x52\xca\x63\x74\x64\x14\xcd\x50\xca\x6a\
\xca\xdb\x5f\x20\xc5\x62\x91\x54\x2a\x85\x40\x50\x30\x25\x25\xcb\
\xa4\xcb\x7b\x85\x5f\x2f\xbf\x87\x86\xda\x09\xac\x8b\x2f\xc5\x4b\
\x08\xba\x37\x26\xc9\xa7\x7d\xf7\x56\xc3\xa0\x75\x41\x05\xfd\xf1\
\xbd\xfe\x42\x86\x24\x89\x57\x63\x14\x3b\xa2\x41\x90\x58\x2c\x16\
\xfd\xf9\xab\x3c\x2c\xd7\x82\xaa\x38\x5f\xfb\xed\xe7\x39\x72\xc6\
\x0c\x52\xb9\x61\x92\xf1\x38\x3b\x96\x24\xc9\xe7\xfd\x39\x85\x10\
\x18\x86\xf1\xe1\xa0\xbd\xe5\x88\x18\x40\xea\x12\x90\xd4\x8f\x1b\
\x47\x3c\x1e\x47\x51\x14\x2a\x2b\x2b\xff\x64\x10\xd9\xdf\xdf\x4f\
\x32\x99\x44\x51\x14\x72\xc2\xa1\x64\x57\x10\xad\xcf\x93\x56\x55\
\x54\x55\x45\x43\x82\x10\x08\x45\x50\x69\x48\x2e\xfa\xe6\xa7\x19\
\xed\xb0\x49\xed\x2e\x20\x14\x81\x51\xa1\x07\xf3\x19\x86\x71\xf1\
\x84\x09\x13\xf6\xf4\xf7\xf7\xdf\x2a\x84\x40\x1d\x9b\x43\x11\x0a\
\x46\xc1\x45\xba\x92\x64\x07\x3c\xbc\xfd\x1e\x86\x37\x5b\x98\x69\
\x17\x31\x16\x2f\x68\x9a\xe6\x67\x03\x3d\x97\xd3\x26\xfc\x23\x62\
\xa2\x43\x7a\x58\xf0\x4a\xf2\x36\x46\x6c\x5f\xf5\xb9\xae\x8b\xa2\
\x28\x68\x9a\x86\xa2\x28\x44\xd3\x2e\xe9\x5e\x87\xfe\x0d\x26\x89\
\x1d\x9b\x78\x7e\xef\xfa\xfd\x5c\x5b\x81\xae\xeb\x48\x29\x83\xa0\
\xf0\x50\x9c\x10\xf5\xcf\xbd\xa1\xa1\xa1\xe1\x52\x55\x55\x27\xba\
\xae\x4b\x56\xb5\x98\xff\xe9\x28\x67\x1c\xfd\x0f\x94\x66\x6c\xe2\
\xb2\xcf\x5c\x87\xd1\xa2\x30\xfe\x38\x41\x32\xba\x87\xee\x35\x19\
\xf2\x85\x3c\x85\x42\x81\x81\x81\x01\x2b\x9b\xcd\x5a\xb6\x6d\xab\
\x8e\xe3\x08\x29\x25\xc5\x88\xc7\xec\x8f\x57\x92\xed\xb5\x29\xec\
\xd1\x88\x44\x22\x18\x42\xa3\xd8\x01\x5d\x2f\x9a\x0c\xac\xb4\xc9\
\x74\x4a\x9c\xac\x8f\xbe\xaa\xaa\x5a\x4e\xc5\x22\x84\x60\xe2\xc4\
\x89\x24\x12\x89\x64\x38\x1c\x56\x33\x99\xcc\xce\x58\x2c\x76\x7c\
\xb1\x58\x54\x9a\x9b\x9b\x09\x25\x25\x7b\x5f\xb2\x19\xdd\xec\x52\
\xe8\x07\x5c\x05\x4d\x53\xc7\xbc\x25\x7f\x3e\x21\x04\x9a\x30\xc8\
\x1f\xdd\xce\xff\x3a\xf6\x7b\x44\x8d\x2a\x06\x0a\xbb\xd9\xbd\xb9\
\x97\x5c\x9f\x87\xe3\x38\x14\x0a\x05\xc6\x8f\x1f\xcf\x55\x57\x5d\
\x45\xba\x6f\x84\xe4\x46\x0d\x67\x28\x44\xc8\xad\x20\x52\x11\xa1\
\xa2\xa2\x02\x45\x51\x08\x87\xc3\x01\xa0\x68\x18\x06\xf1\x78\xfc\
\x51\x55\x55\x1f\x2b\x14\x0a\xde\x07\x2e\x10\x60\xa2\xe7\x79\xe4\
\x0d\x87\x39\xa7\x56\x93\x74\x07\xb9\xee\x9c\x3b\x18\x12\x6f\xb1\
\x31\xf5\x02\x0b\xe7\x9c\xca\x9e\xc4\x56\x06\xd6\xfa\xd8\x96\xa2\
\x28\x4c\x9f\x32\x53\x9d\x75\xf8\x6c\x3d\x91\x4a\x88\x52\xa9\x44\
\xa9\x54\xa2\xca\x32\xc8\x74\xb9\xf4\xaf\xcb\x33\x67\xd6\x3c\x6e\
\xbd\xf5\x56\xae\xb8\xe2\x0a\xf6\xee\xe9\x26\x16\x8b\xb1\x60\xc1\
\xf1\x84\x42\x21\xe6\xcc\x99\xc3\xcc\x99\x33\x31\x4d\x93\x29\x53\
\xa6\x30\x69\xd2\x24\xe2\xf1\x38\x91\x48\x84\xda\x58\xdd\x2c\x4f\
\xba\x17\xcc\x99\x33\x67\x61\x32\x99\x54\xee\xbe\xfb\x6e\x2e\xbf\
\xfc\x72\x6a\x6a\x6b\xd1\x43\x3a\xf3\xe6\xcf\xa3\xb2\xaa\x92\xb9\
\x73\xe7\x00\x30\x6f\xde\x3c\xa6\x4d\x9b\x86\x94\x92\x48\x24\x42\
\x3e\x5b\x64\xea\xe2\x6a\xce\x9e\x71\x31\xd9\x42\x8e\xe7\x3a\xef\
\x22\xb1\xdd\x09\x04\x52\x2c\x16\x79\xf9\xe5\x97\x91\x52\x72\xd2\
\xc9\x27\x91\x2f\xe4\x99\x35\x7b\x16\x55\x55\x55\xcc\x9b\x37\x0f\
\x80\x73\xce\x39\x87\xe9\xd3\xa7\x07\x73\x16\x8b\x45\xb6\x6d\xdb\
\x36\xe7\x2f\x15\xc6\x41\x81\x8b\x8e\xe3\x30\x3a\x3a\xca\xe8\xe8\
\x28\xe9\x4c\x86\xa2\x55\xa4\x10\xd9\xc5\xf7\x9f\xfa\x2e\x9a\xae\
\x90\x55\xba\x79\xea\xad\xbb\xe9\xeb\x48\x92\x4c\x26\x48\x24\x12\
\xa4\x53\x19\xd4\xd3\x76\x23\x3e\xbe\x1d\x19\x29\x71\xc2\x09\x27\
\xb0\x7e\xfd\x7a\x3e\x76\xcc\xd1\xc4\xec\xf1\x54\x68\xd5\x4c\x99\
\x32\x85\x64\x32\x49\x73\x73\x33\xe3\xc7\x8f\xa7\xb6\xb6\x96\xa9\
\x53\xa7\x32\x7d\xfa\x74\x14\x45\x61\xf2\xe4\xc9\x68\x9a\x46\x38\
\x1c\xa6\xb1\xb1\x31\xd8\x8d\xea\xcc\x38\x42\x11\x54\x54\x54\xe0\
\x79\x1e\x8d\x8d\x8d\xc4\xe3\x71\x26\x4d\x9a\x84\xeb\xba\x4c\x98\
\x30\x81\x86\x86\x06\x66\xce\x9c\x19\xc0\x34\x75\x75\x75\x9c\x7e\
\xfa\xe9\x14\x0a\x05\x54\x45\xa1\x52\xaf\xc5\x76\x4d\x2c\xcf\xa4\
\xda\x68\x08\xbc\xb5\xb2\xda\xea\xea\xea\xc2\x75\x5d\x86\x87\x87\
\xf1\x3c\x8f\xda\xda\x5a\xf2\xf9\x3c\x9a\xa6\x91\x4c\x26\xe9\xeb\
\xeb\xc3\x71\x1c\x9a\x9b\x9b\x29\x95\x4a\xc1\x49\xfe\x50\x68\x40\
\x33\x67\xce\x5c\x63\x9a\xe6\x71\x8a\xa2\x90\xaa\x74\xf8\xd4\x37\
\x1b\x31\x0c\x41\x6a\xb7\xc3\xf0\x16\x93\xf8\x5b\x16\xf9\x61\xf7\
\x00\x6c\x47\x55\x35\xce\xb9\xe2\x54\xf4\x79\x7b\x78\xf1\x3b\x7b\
\xf8\xc9\x0d\x3f\x23\x9d\x4e\x73\xef\xbd\xf7\x52\x53\x53\x43\x67\
\x67\x27\xae\xeb\x72\xec\xb1\xc7\x92\x4c\x26\xe9\xef\xef\x0f\x9c\
\x01\xcf\xf3\xde\xf1\x2a\xbb\xa6\x95\x95\x95\x0c\x65\x7a\x50\x4c\
\x83\x19\x33\x66\x32\x3c\x3c\x4c\x65\x65\x25\x0b\x16\x2c\x60\xcd\
\x9a\x35\x7f\xf6\xff\x1d\xc7\x21\x97\xcb\xb1\x6b\xd7\x2e\xae\x7f\
\xf9\x73\x14\xf3\x50\x5d\xa3\xb1\xf2\x81\xdd\x0c\xac\xdd\x67\x43\
\xde\x4b\x5e\xbd\x7c\x4a\x56\xad\x5a\x25\x3e\x14\xa3\x5e\x1e\xcd\
\xcd\xcd\x18\x89\x24\x1b\x6f\x2e\xe2\x94\xe4\x18\x6b\x44\x22\xd1\
\x30\x0c\x35\xb8\x38\xcf\xf3\x50\x04\x7c\xe6\xd8\xbf\x27\xde\xb4\
\x89\x57\x42\x77\xf2\xfd\xef\x7f\x9f\x6f\x7e\xf3\x9b\x64\x32\x19\
\x22\x91\x08\xb6\x6d\x53\x55\x55\x45\x7b\x7b\xbb\x7f\xf3\x08\x3c\
\x57\xe2\x49\x17\x21\x14\xdf\xb0\x2a\x1a\x9e\x94\x28\x42\x09\x22\
\x75\xcb\xb2\x68\xaa\x6e\x65\x64\x64\xc4\x8f\x01\xa4\xc4\xb1\x1d\
\x96\xbd\xf2\x1a\xae\x74\xd0\x14\x1d\x4f\xfa\x71\x8d\x82\x8a\x16\
\xd2\x82\x6d\x27\x3c\x81\xeb\x4a\x9c\xa2\xc7\xcf\x97\xdf\x49\x63\
\xb4\x85\xcd\x85\xe5\x14\x6d\x95\x3d\x6f\xc4\x29\x26\xfd\x20\x33\
\x1c\x36\xd0\x09\x53\x74\xf2\x44\x42\x11\x2c\xdb\xf6\x6d\x11\x3a\
\x8a\x2e\x0f\xd8\xc2\x41\x7c\x95\xc9\x7c\x78\x39\x75\xdb\xb6\xb9\
\xfa\xea\xab\x59\xb4\x68\x11\x8e\xe3\xf0\xe8\xa3\x8f\x92\xc9\x64\
\x82\xa0\xaf\x50\x28\x30\x7d\xfa\x74\xde\x7c\xf3\x4d\x12\x89\x04\
\xf5\xf5\xf5\x6c\xdd\xba\x15\x4d\xd5\x58\xd3\xf3\x1c\x21\xcd\x20\
\x97\xcb\xf1\xf5\xaf\x7f\x9d\x42\xa1\x40\x4d\x4d\x0d\xd1\x68\x94\
\xad\x5b\xb7\xd2\xd8\xd8\xe8\xab\x0b\xc5\x65\xce\xe7\x22\x1c\x3b\
\xee\x42\xcc\x70\x1f\xa9\xbe\x30\xeb\x7b\x5e\xe5\xf4\x63\x4e\x66\
\xdd\x8e\x57\xd9\xf3\x8c\x83\xa2\x41\x43\x43\x03\xc5\x62\x31\xf0\
\xfc\x5c\xc7\xa1\xdb\xeb\x67\xde\x19\x95\x5c\x71\xc2\xcd\xfc\x62\
\xe3\xb7\x38\xa5\xf1\x32\x56\xed\x7a\x95\x7c\xc5\x36\xb6\x3c\x90\
\x21\x9f\x2d\x20\x10\xe4\xc6\x79\x2c\x38\x3b\x86\x18\xf6\x78\xfc\
\xf9\x9f\x92\xee\x76\x31\x47\x20\xdb\xe7\x60\xe7\xf7\x79\x4a\x9e\
\xa5\x70\xfa\x1d\x2d\xfc\xe8\xec\xdf\xf3\x99\xff\xfc\x24\xff\xf2\
\xe9\x7f\x47\x91\x21\x36\x24\x7f\xc7\x0b\xff\xf5\x06\xa5\x3e\x3d\
\x10\xca\x4b\x2f\xbd\xd4\x0c\x0c\x1f\x4a\xb6\xcc\x41\xb9\xbd\x93\
\x27\x4f\xa6\x58\x2c\x32\x73\xe6\x4c\x14\x45\x61\xda\xb4\x69\x54\
\x56\x56\x52\x55\x55\xc5\x53\x4f\x3d\x15\x18\xe2\x4c\x26\xc3\xc6\
\x8d\x1b\x09\x85\x42\x08\x04\xba\x88\x50\x74\xf2\xec\xdd\x35\xe2\
\xbb\x91\xd1\x68\x80\x13\x95\xed\x83\x6d\xdb\x48\xcd\x63\xe2\x91\
\xd5\xbc\x35\xf8\x2a\x3f\xb8\xe0\x01\x4c\x25\xc5\x7f\x3c\xbb\x86\
\xc5\x27\x9f\x4d\x9f\xda\x4e\xef\xf3\x69\xd4\x90\x4f\xa6\x2b\xc3\
\xe5\x96\x65\x11\x0e\x47\x30\xea\x1c\x5a\xe6\xd7\x70\xc6\x19\x67\
\xb0\x54\xf9\x21\xc7\xb7\x9e\xc8\x50\xa4\x8b\xe1\xe8\x5e\xba\xaa\
\x3c\x0c\x3d\x4c\x43\x43\x03\xc3\x83\x43\x74\x3d\x06\x42\x11\xc4\
\x55\x90\x63\xe6\x53\x78\x1a\x46\x58\x04\x2a\x53\x95\x1a\x8a\x5d\
\x85\xc4\x25\x4b\x2f\x4d\x35\xcd\x3c\xbf\xfa\x55\x76\x2a\x1b\xf1\
\xc6\xd4\x1e\xc2\xf7\xfc\xaa\xaa\xaa\xc8\x66\xb3\x87\x94\xba\xf4\
\x67\x05\x12\x89\x44\xb8\xe9\xa6\x9b\x38\xf7\xdc\x73\xf9\xde\xf7\
\xbe\x47\xa9\x54\x0a\x74\x72\x59\x3f\xf7\xf6\xf6\x1e\x60\x03\x14\
\xa1\xb0\xec\xad\xa5\xcc\x3d\xfe\x44\xd6\xb6\x75\xe3\x0c\x86\x89\
\x46\xa3\xa4\xd3\xe9\x03\xd2\xb5\x89\x44\xc2\x0f\xc6\x42\x12\xcb\
\x0d\x53\xd1\x58\xe2\x7b\x7f\xb8\x86\x1b\x3f\xf3\x13\x8c\x4a\x87\
\x5f\x6e\xfc\x0f\xe2\x03\x26\xa9\x74\x12\x45\x13\xd8\xb6\x1d\xc4\
\x13\x65\xbe\x95\x94\xe0\x78\x26\xb9\x62\x0e\x2b\x6b\xa0\x1a\x16\
\xaa\xaa\x53\x1f\x6a\xa3\x64\x6e\xe0\xb6\x1f\xdf\x41\x7d\x7d\x3d\
\xab\x56\xad\x62\xe3\xc6\x8d\x84\xc3\x61\x7a\x7a\x7a\x68\x6c\x6c\
\xa4\xab\xab\x8b\xd9\xb3\x67\x63\x59\x16\x1b\x36\x6c\xa0\xa2\xa2\
\x82\xbe\xae\x01\x14\x0d\x54\x55\xc7\x71\x05\xae\x74\x89\xe8\x61\
\xa4\xe7\xe1\xd8\x0e\xa6\xe5\x93\x1b\xca\xc1\xee\x87\x4e\x03\x72\
\x5d\x57\xaf\xac\xac\x64\xe9\xd2\xa5\xc1\x05\x94\x8d\x5e\x39\x30\
\x13\x12\xf6\x4f\x29\x3b\xb6\xc7\xda\xc4\x23\x74\xf7\x45\x48\x0d\
\x65\x49\xa7\x0b\x54\x55\x55\x51\x57\x57\x17\x08\xa4\x1c\x50\x79\
\x9e\x87\x82\x44\x0b\x0b\x8a\x09\x97\xc1\x9d\x5b\x39\xeb\xcb\x27\
\x91\xeb\xf5\xb0\xf3\x12\x2b\xe3\xa2\x85\xf6\xc5\x23\xe5\xcd\x50\
\x2a\x95\x90\x9e\xc4\x71\x3c\x22\x51\x95\x6b\x9f\xfd\x2c\x37\x9e\
\xf9\x30\xbf\xd8\x72\x1d\x83\xfa\x4e\x1a\xf4\x06\xa4\x84\x71\xe3\
\xc6\x91\xcd\x66\x69\x6d\x6d\x65\xdd\xba\x75\xcc\x9f\x3f\x1f\xcf\
\xf3\x98\x3d\x7b\x36\x43\x43\x43\xe4\xf3\x79\xda\xda\xda\x98\x35\
\x6b\x16\x0f\x3c\xf0\x00\x42\x51\x70\x34\x3f\xfa\xff\xdb\xa3\xff\
\x89\xbc\xec\x47\xa2\x72\xc6\x94\x7f\x60\xab\x73\x27\xd6\x98\x40\
\x0e\x55\x86\xf0\x3d\x7b\x59\x55\x55\x55\x0b\x3c\xcf\xab\x79\x9b\
\x67\x51\xdb\xda\xda\xfa\x50\x2e\x97\x43\x48\x70\xa7\x08\x8e\x3b\
\xb7\x9a\xfc\x90\x4b\x7e\xd8\xa5\x30\xe4\x11\xa9\xd2\x29\xa5\x3d\
\x92\xbb\x6c\x3c\xe9\x32\x79\xf2\x64\x74\x5d\x27\x97\xcb\x61\xdb\
\x36\xc3\xc3\xc3\x84\x42\x21\xe6\xcd\x9b\x47\x5f\x6f\x1f\x96\x6d\
\xe3\x99\x72\x8c\x08\xed\x3b\x0c\xae\xeb\x1e\xa0\x3a\xa3\xd1\x28\
\xa5\x52\x89\x7c\x3e\xef\x5f\xbc\x04\x73\x9a\xe0\x6f\xbe\xd1\x84\
\xeb\x42\x4b\xf8\x28\xc6\x37\xd7\xb2\x7a\xd7\x4b\xe4\xfa\x5d\xba\
\x1e\x0c\xd1\x32\xb1\x95\x63\x8f\x3d\x96\x17\x5e\x78\x21\x88\xa6\
\x2d\xcb\xc2\x30\x7c\xdb\x56\xce\x65\xf8\xfc\x00\x81\x5d\xf0\x38\
\xed\xce\x4a\xf2\x43\x06\x3f\xff\xfc\x52\x6e\x5a\xf1\x65\xba\xd2\
\x6f\x22\xf1\xd8\xfc\xd3\x12\xc9\x4e\x07\x90\xd4\xd6\xd6\xb2\x73\
\xe7\xce\xe6\x64\x32\x39\xf4\xa1\x0a\xe4\xdd\x86\xae\xeb\x4d\xf3\
\xe7\xcf\x1f\x4c\xa5\x52\xd4\xd4\xd4\x90\xcb\x66\x71\x2c\x3f\x5f\
\x82\x02\x8a\xcf\xb2\x39\x00\xdb\x6a\x69\x69\x61\x60\x60\x00\xcf\
\xf3\x13\x40\xa6\x69\x72\xdf\x7d\xf7\xd1\xd2\xd2\x82\xa2\x28\x3c\
\xf9\xe4\x93\xec\xde\xbd\x1b\xd7\x75\x03\x6f\x6c\xc2\x84\x09\x74\
\x76\x76\x92\x4c\x26\xd1\x34\x8d\x74\x3a\x4d\x22\x91\xc0\x34\xcd\
\x40\x75\x79\x8d\x82\x93\xff\xb1\x96\xf8\x0e\x9b\x54\xa7\xcd\xe8\
\x36\x0b\x33\xe5\xa1\x68\x82\xc6\xf1\xf5\x41\x2a\xf9\xcf\x79\xaf\
\xe5\xb5\xc8\xa5\x0a\x1c\xf5\x8d\x10\x4e\x49\x52\xa1\xd6\xa1\xaa\
\x1e\x59\x33\x49\xba\xc7\x61\xfb\x13\x39\x3c\xc7\x37\x19\xd5\xd5\
\xd5\xe4\xf3\xf9\x33\xc3\xe1\xf0\x68\x2a\x95\x32\x7b\x7a\x7a\xb6\
\xfe\x55\xb9\xbd\x42\x08\x1e\x7d\xf4\x51\xa4\x94\xac\x5c\xb9\x92\
\xdd\xbb\x77\xd3\xdb\xdb\xeb\x67\x01\xc7\x5e\xaa\xaa\x62\x18\x06\
\x9d\x9d\x9d\x44\x22\x11\x74\x5d\x3f\x80\xa1\x31\x7d\xfa\x74\x5e\
\x7f\xfd\x75\x3e\xfb\xd9\xcf\xd2\xda\xda\x4a\x24\x12\xa1\xbf\xbf\
\x9f\x19\x33\x66\xd0\xde\xde\x8e\x61\x18\x34\x34\x34\x90\xcd\x66\
\x49\xa5\x52\xe8\xba\xee\xa3\xbb\x8a\x82\x61\x18\x7c\xf1\x8b\x5f\
\xe4\xe5\x97\x5f\xa6\xe7\xa1\xb1\x39\x5d\x97\x71\x31\x0f\xb7\xd2\
\xb7\x65\xfb\x7b\x64\xb1\x58\x8c\xed\xdb\xb7\x9f\x52\x2a\x95\xf6\
\xbc\xed\x56\x8e\x3e\xfc\xf0\xc3\x9f\xc8\xe5\x72\x14\x0a\x05\x16\
\x2d\x5a\x84\xb2\xbc\x92\x10\x50\xf0\x6c\xb4\x3a\x0b\x2d\x35\x91\
\x06\x04\x0d\x27\xbe\x63\x19\x5e\x50\x55\x95\x8e\x8e\x8e\x1d\x3d\
\x3d\x3d\x33\xff\x6a\x27\x44\xd3\xb4\xa6\xa3\x8f\x3e\x7a\xf0\x96\
\x5b\x6e\xa1\xa9\xa9\x09\xc3\x30\xb8\xe1\x86\x1b\x68\x68\x68\x40\
\x55\x55\x16\x2c\x58\xc0\x5d\x77\xdd\x45\x55\x55\x15\x4d\x4d\x4d\
\x84\xc3\x61\x5e\x78\xe1\x85\x20\xc3\xd7\xd0\xd0\xc0\xde\xbd\x7b\
\x99\x3e\x7d\x3a\x47\x1e\x79\x24\x83\x83\x83\x24\x93\xc9\x20\xe1\
\x34\x56\x12\x80\xae\xeb\x14\x8b\x45\x1c\xc7\x21\x12\x89\x04\x64\
\x02\x45\x51\x78\xf3\xcd\x37\xd9\xbc\x79\x33\x00\x1b\x36\x6c\x40\
\xd3\x34\x56\xae\x5c\xc9\xe4\xc9\x93\xc9\x66\xb3\x4c\x99\x32\x85\
\x62\xb1\xc8\xfa\xf5\xeb\x91\x52\x32\x34\x34\xc4\x5b\x6f\xbd\x35\
\x7d\x74\x74\x74\x57\xf9\x3e\x1a\x1b\x1b\xaf\x2d\x14\x0a\xc7\xd4\
\xd6\xd6\x5e\x58\xbe\xf6\x6c\x36\x1b\x44\xde\x83\xc5\x61\x2a\x23\
\x3e\xac\x2e\xed\x31\xe6\xbe\x23\x98\x34\x69\x12\x55\xd5\x55\x3e\
\x89\x5b\x08\x1c\xc7\xb1\x84\x10\x5d\x63\xea\x5c\xdb\xb4\x69\xd3\
\x7d\xf1\x78\xfc\x3b\x1f\xea\x09\x51\x14\x85\x47\x1f\x7d\x94\xe3\
\x8f\x3f\x9e\x67\x9f\x7d\x96\x5c\x2e\x17\xa8\x93\xce\xce\xce\x20\
\x67\x1e\x8f\xc7\xf1\x3c\x8f\x48\x24\x42\x26\x93\xe1\x86\x1b\x6e\
\x60\xca\x94\x29\x5c\x7c\xf1\xc5\x98\xa6\xc9\xca\x95\x2b\x03\x1b\
\x51\x86\x47\xca\xc6\xb2\x9c\xdb\xd6\x75\x1d\xdb\xb6\x09\x87\xc3\
\x41\x00\xba\x71\xe3\x46\x9a\x9a\x9a\xd8\xb1\x63\x07\xa3\xa3\xa3\
\x81\x0b\x5d\x59\x59\xc9\xc6\x8d\x1b\x19\x1a\x1a\xa2\xa1\xa1\x01\
\x5d\xd7\xe9\xed\xed\x45\x55\x55\xa4\x94\xe2\x6d\xc1\xee\x25\x0b\
\x17\x2e\x9c\x66\xdb\xb6\x44\x22\x8a\xd5\x69\xaa\x62\x1a\x5e\x2a\
\x84\x9b\xd6\x11\x52\x01\x05\x84\x90\x01\xc8\x64\x95\x2c\x7a\xd8\
\x4c\xf3\x94\x5a\xdc\x8c\x8e\x9b\xd2\xf0\xb2\x5a\x48\x68\xcc\x00\
\x08\x87\xc3\x6c\xd8\xb0\xa1\xf1\xc3\x88\xd4\xeb\x92\xc9\xa4\x3e\
\x56\x5c\xd3\xe0\xba\x2e\x6b\xd6\xac\x61\xf5\xea\xd5\xef\xf0\x36\
\xca\xbc\xdf\xb2\x6a\xdb\x3f\xc5\x59\x55\x55\x45\xa9\x54\xe2\x17\
\xbf\xf8\x05\xf7\xde\x7b\x2f\xb6\x6d\x53\x53\x53\x43\x63\x63\x23\
\x2f\xbc\xf0\x02\xad\xad\xad\xe4\xf3\x79\xb2\xd9\x2c\x9e\xe7\x91\
\x4e\xa7\xc9\x66\xb3\xc4\x62\x31\x52\xc9\x14\x55\x95\xd5\x14\x4b\
\x05\x2e\xbb\xec\x32\x62\xb1\x98\x8f\x4f\x8d\xed\x68\x21\x04\x6f\
\xbd\xf5\x56\x90\xde\xdd\x1f\xde\x18\x37\x6e\xdc\xbb\xb2\xe3\x13\
\x89\x04\xb6\x6d\x8b\xbd\xf6\x10\xb3\x3f\x69\x22\xa3\x0a\x1e\x0e\
\xaa\xa6\xe0\xa5\x74\xac\x01\x03\xbb\x2f\x4c\xa9\xd3\x3f\xa1\xd2\
\x85\xda\x79\x61\xea\x16\x94\x28\xba\x71\x14\x05\x9c\x9c\xca\xf0\
\x43\x8d\xef\xb8\xe7\x0f\x54\x20\x8a\xa2\xac\xb9\xf0\xc2\x0b\xa7\
\xbd\x17\xca\x64\x28\x14\xe2\xc5\x17\x5f\x7c\xc2\x34\xcd\x27\x01\
\xea\xeb\xeb\x6f\xbd\xe9\xa6\x9b\x9a\xce\x3a\xeb\x2c\x56\xad\x5a\
\x15\xa8\xa7\x72\x1c\xa3\x28\x0a\xbd\xbd\xbd\x07\xe0\x50\xd1\x68\
\x94\xfa\xfa\x7a\xc6\x35\xd7\xc3\xbc\x21\xaa\x23\x21\x5e\xb9\x6d\
\x37\xb1\x58\x2c\x60\x9d\x1f\xcc\xd8\x3f\x95\xfc\x76\x3a\xaa\x6d\
\xdb\x58\xd2\x46\x41\x41\xcf\x4d\xe4\xe6\xf3\xee\xe1\x85\xdd\xbf\
\xe6\xf9\xd1\xc7\x99\x3c\xbf\x9e\xbe\xd8\x20\xd9\xed\x5a\x70\x6a\
\x3f\x31\xe1\x5c\xae\x38\xf5\x6a\xbe\xf0\xd3\xd3\xf8\xa7\xf3\xaf\
\xe4\xd1\x55\xf7\x04\x39\xa3\xbf\x94\xe8\x70\xd0\x02\xf1\x3c\xaf\
\xaa\x4c\xb1\x1c\x75\x33\x4c\xbc\x20\x4d\xa4\x42\x01\x4b\xc1\xb3\
\x15\xdc\x94\x86\x35\x60\xe0\x0c\x19\xb8\x19\x1d\x21\xfc\xaa\xa8\
\x50\x28\xd4\x3e\x30\x30\xf0\xeb\x31\x28\xff\x3f\xea\xeb\xeb\x1b\
\x57\xae\x5c\x29\xf6\x8f\x63\x74\x5d\xff\x93\xe0\x5d\x3e\x97\x67\
\x43\x7e\x07\x67\xcc\xaa\x40\xd5\x2d\x16\xde\x58\x81\xa6\x14\xd0\
\x55\x1d\xd7\x96\x38\x71\x83\xd4\xd2\xda\xe0\x7f\x5e\x7b\xed\xb5\
\xaf\xe6\xf3\xf9\x47\xde\xb6\xa1\xdc\xa6\xa6\xa6\xfc\xdb\xe7\x37\
\x4d\x13\xc7\x76\xf0\x54\x3f\x99\xf5\x99\xe9\xd7\xf0\x5f\xab\xff\
\x9d\x2f\xcc\xfa\x57\x66\x2d\xfc\x04\x7a\xb4\xc4\xcd\xbb\xaf\xde\
\x47\x06\xf7\x04\xaf\x74\x3f\x88\xf6\x6c\x8c\x4f\xcd\xfe\x32\x6d\
\xb1\x23\x70\x5c\xc7\x27\x7e\x8f\x09\xe4\x43\x39\x21\x65\x70\xcf\
\xb1\x1d\xb2\x4a\x9e\xca\x6a\x41\x36\x67\x72\xce\x11\x7f\xcf\xa8\
\xd9\xcd\x9b\x43\xab\xa9\x9a\xe6\x51\x18\xcd\x30\xf4\x70\x3d\x42\
\x0b\x6a\x3e\x82\xab\x8b\xc7\xe3\x09\xdb\xb6\xa7\x9a\xa6\x19\x40\
\x31\xe5\x2c\x5b\x52\x49\xd1\x38\x51\xc7\x4d\xeb\x78\x59\x1d\x69\
\xa9\x64\x0b\x29\x8c\xb0\xc1\xb8\xba\x71\x40\x06\x45\x15\xcc\x33\
\xbe\xc0\xe2\xc5\x67\x63\x91\xe3\x7b\x4f\xfd\x3b\xf3\x67\x4f\x66\
\xab\xb3\x35\x98\x67\x2c\xbf\x5d\xf4\x3c\x2f\xbd\xff\xb5\x47\xa3\
\xd1\xfa\xbe\xbe\xbe\xf0\xdb\x84\xa1\x96\xe3\x22\xcb\x30\x29\xa6\
\x15\xbc\x82\x4a\xef\xd0\x1e\x36\x29\x5b\x78\xab\xe7\x4d\x1a\x0e\
\xcf\xa1\x29\xa1\x80\x99\xa2\xa0\xa2\x22\x71\x5c\x9b\x81\xd4\x30\
\x7b\x52\x11\x34\xa1\x07\xe8\x43\x39\x9e\xf9\xf0\x04\xe2\x38\x78\
\xba\x8b\x50\x14\x3e\x5e\x7f\x19\xcd\xb1\x3a\x1a\x12\x47\xb1\x6e\
\x68\x3b\x9f\x3d\xf9\x42\x1e\x1f\xf4\xc9\x6d\x8a\xa4\x1c\x91\xef\
\x8f\xf5\x64\xc3\xe1\x30\xae\xeb\xa2\x69\x1a\x13\x27\x4e\xc4\x75\
\x5d\xd2\x4e\x81\x59\x17\xb9\x18\x61\x05\x17\x1b\xa1\x38\x78\x79\
\x85\xed\x77\xd7\x60\x88\x0a\x3f\x4a\xf0\x20\x95\x2a\x72\xfe\xf1\
\x17\x71\xdb\xda\xcb\xf8\xfc\xf4\xff\xe0\xdf\x17\xdf\x81\xad\x8f\
\xd2\xbe\xf5\xaa\x60\xc1\xfe\x48\x04\xad\xb4\xb4\xb4\x8c\x94\x13\
\x55\x00\x9e\xea\xa2\x69\x2a\x9e\x29\x10\x52\xd0\x20\x1a\xc8\x3d\
\x06\x37\x7b\xd7\xa3\x2b\x06\x3f\xf5\x6e\x43\x41\xc5\x99\xbb\x9b\
\x49\xb3\x9a\xf6\x95\xbc\xa1\xd0\x1a\x69\xe5\xb0\xe6\xc3\x58\xdf\
\xbb\x97\x1a\xa3\x11\x89\xc4\xb2\xec\x03\x28\xb4\x1f\x8a\x40\xca\
\xc7\xdb\x15\x0e\x0a\x61\x16\xcd\x5d\xc4\xca\xa1\x87\x19\xe9\xaa\
\xc0\x70\xea\xb1\xbd\x12\x48\xe1\x03\x88\x1e\xe5\x9c\xf3\xa9\xe3\
\xc7\x8f\x1f\x1c\x18\x18\xb8\xa7\x7c\xc1\xe5\x8b\x2e\xd3\xf8\x6d\
\xd7\x26\xa4\x6b\x24\x06\x5d\x7e\x70\xc1\xe3\x74\x66\xd6\xf1\xe0\
\xc6\xff\x44\x7a\xc2\x27\x19\x48\xc0\x00\xab\x24\x89\x46\x2a\xf0\
\x84\x43\xb6\x90\x67\xdd\xce\x0d\x54\xb4\xf4\xa3\x29\xa1\x7d\x05\
\x42\xef\x4e\xe9\x34\x2a\x2a\x2a\x88\x44\x22\x48\xcf\xa3\x47\x0e\
\x33\xff\x22\x6b\x1f\xe1\x6e\x34\x84\x33\x10\xa1\xd4\x6d\x10\x4e\
\x54\x8f\x91\x8a\xc2\x84\x8d\x30\xf3\xa7\x9f\xc4\x26\xf7\x09\x2c\
\x6b\xdf\xbe\x3a\xa5\xed\x52\x26\x4f\x8f\xf0\xfc\xf0\x2a\x56\x74\
\xd9\x50\xe6\x68\x8d\xdd\xdf\x5f\x72\x42\x0e\xda\x02\x95\x0d\xa0\
\x65\x59\x78\xae\x07\x63\xa5\xcc\x08\x51\xa6\x92\x51\x72\x72\x63\
\xef\x33\x83\x72\x85\xc3\x0f\x3f\xfc\x14\xc3\x30\xbe\x5a\x9e\x23\
\x16\x8b\x71\xd7\x5d\x77\x31\x7e\xfc\xf8\x7d\x75\x26\xb6\x83\x10\
\x82\xaf\x1c\xf3\x6d\xd6\x0c\x3d\xce\xce\x6d\x19\x66\xa9\x9f\xa5\
\xd2\xa8\x0e\xe6\x41\x42\xac\xc6\x60\x24\x35\x4a\xb5\x5e\xcf\xf8\
\xba\x46\x5c\xcf\xc6\x76\x6c\x84\x54\x02\xc2\xdb\x1f\xa3\x72\x4a\
\x29\xc7\x6a\x3e\x6c\xf2\x22\x0f\x12\x0a\x29\x95\x0b\x66\xff\x2f\
\xaa\x9b\x43\x18\xb3\xd3\x8c\x3b\x37\x4e\x78\x7e\x22\x98\xcb\x71\
\x5c\x96\x74\xdd\x87\x63\x7b\x24\x93\x49\x92\xc9\x24\xa5\x82\xc9\
\xa3\x9b\xee\xc2\x94\x39\x42\x95\x92\xce\xe2\x5a\x86\x77\xe7\x82\
\xbf\x8f\x51\x9e\x9c\x0f\xd5\x86\xb8\xae\x8b\x82\xc2\x8b\xed\x7f\
\xe0\xe4\x8f\x9d\x4c\x4a\xea\xbc\xd6\xf5\x34\x47\x36\x7e\x8d\x97\
\xc5\x73\x58\x56\x1e\x45\xfa\xe8\xec\x98\x57\xe6\x95\x77\xef\x75\
\xd7\x5d\x47\x3c\x1e\xa7\xa5\xa5\x85\x42\xa1\xe0\x9f\x10\x61\xa3\
\x48\x95\xc3\x9a\xa6\xf2\xda\xe0\x4a\x46\x0b\x25\x76\x0d\x75\x20\
\xf0\x17\x5a\x20\x90\x48\x2a\x2b\x22\x3c\xb2\xe5\x87\x7c\x79\xc1\
\x77\xe8\xcc\xad\xe6\xcd\xfc\xf3\x9c\x11\x39\x1b\x5b\xee\x63\x1f\
\xfe\x31\xb7\xb3\x1c\xb1\x4b\x57\xe2\x85\x7c\xa4\xe0\xc6\x33\x1f\
\xe6\xa9\xad\x3f\xe7\xba\x85\x0f\xb0\x64\xe3\x12\xcc\xea\x9d\x2c\
\x59\xff\x2a\xa6\xe9\x73\x03\x8c\x90\x4d\xff\x6a\x8b\x3d\x23\xa9\
\xa0\x20\xc7\x71\x6d\x92\x9b\x2c\x6e\xf8\xfa\xad\x28\xd2\x20\xdf\
\xef\x61\xe7\x75\x26\x8e\xaf\x06\x21\x51\x35\x85\x9a\x9a\x9a\xcf\
\xcf\x9d\x3b\xf7\xf8\xfd\x3f\x7f\x70\x70\x90\xee\xee\xee\x8f\xe7\
\x72\x39\xeb\x90\x09\xa4\x50\x28\xe0\xd8\x36\x66\xc4\xc3\xc3\x65\
\xb3\xf5\x30\x87\xf5\x4d\xa5\xae\x51\xa1\x6d\x96\xca\x7f\xae\xba\
\x04\xcb\x09\x91\xcb\xe5\x51\x74\x7f\x61\xca\xc1\x5c\xd9\x03\x59\
\xb9\x72\x25\x9f\xf9\xcc\x67\x02\x40\xcf\xf3\x3c\x6c\xc5\x46\x28\
\x1a\x99\x62\x86\x90\x6a\xa0\x8b\x10\x96\x6b\xe2\x38\x36\xa3\xa3\
\xa3\x28\x42\x21\x5b\xe5\x51\xb4\x2a\xe8\x0e\xbd\xc6\x0f\x9e\x4b\
\xf1\xa5\x4f\x7e\x11\x33\xd2\xc3\xb3\xdb\x7f\xce\x68\x57\x91\x64\
\x32\x1b\xb8\xc1\xef\xe6\x7a\x06\xfc\x32\x57\x42\x54\x22\xa4\x42\
\x4d\x64\x1c\x9b\xe3\xaf\x70\xf2\xf0\x67\xf0\x2c\x15\xd7\x75\x70\
\x5d\x89\x65\xfa\xc5\x21\x86\x61\x90\xee\x74\x31\x0c\x0d\x4d\xf3\
\xe7\xb4\x1d\x9b\xa6\xca\x49\x18\xd2\x40\x00\xb9\x89\x83\xcc\x3a\
\xcb\x81\x4c\x08\x37\x6e\xe0\x0c\x87\x91\xb6\xd2\x2c\x4d\xa5\x59\
\xda\x3e\x81\xaf\x4c\xea\xeb\xe8\xe8\x50\x0e\xe9\x09\xc9\xe7\xf3\
\x7e\x4e\xa0\x42\x62\xbb\x16\x9a\xaa\xf2\x78\xe7\x77\x39\xa2\x74\
\x0c\x96\x6d\x61\xa5\x15\xb6\xfd\x3e\x49\xc9\xb6\x10\xce\x81\xf6\
\xa2\x3c\x96\x2f\x5f\xce\xf3\xcf\x3f\xcf\xe0\xe0\x20\x2d\x2d\x2d\
\x7e\xb5\x92\xea\xa7\x6e\xef\x5b\x76\x0f\xff\x72\xc1\xb5\x94\x1a\
\x3c\x56\xfc\xee\x51\x2c\x3b\x12\xf0\xae\x34\xd7\x57\x8b\x9e\x25\
\xe8\x1b\x6e\xe7\x9a\x7f\x5d\xcb\xe8\x36\x93\xc2\xa0\x8b\x50\xc5\
\x01\x94\xa1\x77\xe3\x47\x05\x5c\x5c\x4f\xe2\x49\x89\x50\xfc\x53\
\xa7\x08\x05\x55\x55\x49\x66\x53\xa4\x43\x5d\xe0\x81\x65\xdb\x41\
\x21\x4e\x4b\x4b\x0b\xd1\x68\xb4\x5c\x10\x4a\x32\x99\x0c\xd4\xa2\
\x8a\x42\x69\x72\x82\xc8\xb8\x4a\x64\x5d\x11\xda\x8a\x07\x7c\xe6\
\xf0\xef\xc6\xe1\xa6\xb5\x80\xf6\x7a\xc8\x55\x56\x99\x0b\x1b\xc9\
\x29\xec\x7e\x31\x4f\x6a\xb7\x83\x99\xf6\x58\x55\x58\x82\x5b\x02\
\xd7\x94\x08\x55\xa0\x69\xfb\x16\x67\xff\x82\x18\xa0\xd0\xd3\xd3\
\x93\x97\x52\x4a\xdb\xb6\x23\xe9\x74\x5a\xb5\x6d\xdb\x4f\x50\x39\
\x06\xb2\xae\x9d\x47\x56\x3e\xc4\xe7\x4f\xfe\x3c\x8d\x93\xa0\xc7\
\x49\xe1\x38\x0e\x93\x26\x4d\xc2\x48\x24\x58\x7b\x63\x1e\xb7\x34\
\x06\xce\x0a\x90\x68\x84\xc2\xfb\xf2\xf9\x7f\x8a\x81\x5e\x16\x88\
\x74\x25\xd2\xf3\x91\xe9\x54\x21\xce\xec\xda\x53\x98\x32\x71\x02\
\x2f\xae\xf7\x38\xbe\xf9\x53\xbc\xe1\xfe\x38\x30\xe0\xe1\x70\x98\
\x68\x45\x94\xa6\xba\x09\x34\xd6\x4a\xde\xdc\xbe\x85\xb6\xb6\xb6\
\x7d\xf9\x1c\x1f\x57\xc1\x73\x42\x7c\xef\x93\x4f\xf2\xd2\x96\x17\
\x70\xaa\xf7\x92\xd8\x1b\x61\x93\xfd\x80\x5f\x16\x67\x79\x41\xeb\
\x90\x83\x21\xd3\xbd\x27\x81\xb4\xb5\xb5\x71\xe1\x85\x17\xb2\x7e\
\xfd\x7a\x06\x7a\x06\xf1\xac\x0c\x6e\xc8\x41\xea\x12\x2b\x62\x05\
\x09\x27\xcf\xf3\xc8\xe5\x72\xc1\xcf\x80\x33\x06\xa9\x9c\x5b\x86\
\x55\x62\xb1\xd8\x8b\xae\xeb\x7e\xd2\xcf\x3a\x7a\x28\x86\xbf\x6b\
\xdb\x47\x7e\x47\xc7\xfd\xaf\x30\xd8\x53\xc0\x2a\x48\xee\xbc\xf3\
\x4e\x26\x4f\x9e\xcc\xf0\xf0\x30\x4b\x96\x2c\xc1\x34\x4d\x5c\xd7\
\xa5\x50\x28\x90\xc9\x64\x98\x30\x61\x02\x7b\xf6\xec\x61\x64\x64\
\x84\xe6\xe6\x66\xba\xbb\xbb\xff\x68\x2a\x3a\x9f\xcf\x23\x5d\x0f\
\xcb\xf6\x09\x15\xdf\x7d\xf5\x62\xae\x3f\xf5\x11\x1e\xda\x74\x0b\
\x3b\xb4\x65\xf4\x77\x56\xe2\xd8\x2e\xb9\x5c\xa1\x9c\x66\xa0\xc7\
\x1b\xa1\xf9\xb2\x51\x22\xba\x8e\xbc\x67\x14\xe1\x34\x04\x20\xa8\
\x8a\x82\x22\x04\x21\x6b\x1c\xb1\x68\x8c\x1f\xbd\x74\x1d\xf7\x5e\
\xfc\x07\xea\x4f\xaa\x23\xd7\xbe\x85\x1e\x6b\x17\x96\xa9\x7c\x70\
\x02\xf9\xf5\xaf\x7f\xcd\xab\xaf\xbe\xca\x2d\xb7\xdc\xc2\xb2\x65\
\xcb\xd8\xb8\x71\x23\x42\x08\xba\xbb\xbb\x99\x34\x69\x12\x93\x27\
\x4f\x66\xf5\xea\xd5\x74\x74\x74\x70\xec\xb1\xc7\xb2\x71\xe3\x46\
\x8a\xc5\x22\xaa\xaa\x2e\x38\xf1\xc4\x13\x65\x40\x92\xf6\x3c\x34\
\x5d\x05\x47\x48\x84\xdf\x94\xa7\xfd\x5b\x1e\x12\x50\x14\x1d\xa1\
\xda\xa8\x7a\x2d\x8e\x19\x0f\xa2\xf8\xa9\x53\xa7\xf2\xc8\x23\x5f\
\x1c\x22\x28\x00\x00\x19\xed\x49\x44\x41\x54\x8f\x50\x57\x57\x47\
\x73\x73\x33\x8d\x8d\x8d\xdc\x7f\xff\xfd\x14\x0a\x05\xc2\xe1\x30\
\xb1\x58\xac\x5c\x38\x8a\xe7\x79\x6d\xb5\xb5\xb5\x1f\xdb\x8f\xa8\
\x11\x76\x5d\x37\xb0\x21\xb6\x09\x8e\xb4\x51\x2b\x6d\x7e\xb0\xfa\
\x2b\xb4\xd5\xcc\xa0\x94\xb6\xc9\x0f\x27\xe8\x7c\x2d\x45\xb1\xe8\
\xab\x97\x48\x38\x8c\x5d\x6d\x51\x53\x15\xc6\xce\x87\x69\xac\x99\
\x88\x3d\x60\xe3\x49\x1f\x8d\x76\xa5\x40\x11\x02\xe9\x29\x38\x9e\
\x0d\x02\x14\xdd\x65\xf7\xc0\x2e\xba\xd3\xdb\x70\x6c\xb0\x6d\x11\
\x6c\x88\x72\xa5\xd5\x21\x13\xc8\x13\x4f\x3c\xc1\x69\xa7\x9d\xc6\
\x9e\x3d\x7b\x18\x1a\x1a\xc2\xb6\x7d\xa3\xab\x69\x1a\x5b\xb7\x6e\
\xc5\xb2\x2c\x92\xc9\x64\x40\xb0\x03\x02\xe2\xf2\xc4\x89\x13\x7d\
\x6f\xa3\x94\xa2\x6e\xf1\x30\x13\x26\x86\x71\xb2\xaa\xb0\xfb\x22\
\x38\x03\x61\xac\xbe\x08\x65\x07\xba\xcc\xc1\x2a\x6c\xc9\x73\xe3\
\x8d\x37\x72\xc6\x19\x67\xf0\xfa\xeb\xaf\xe3\x79\x1e\xbb\x76\xed\
\xc2\xb6\x6d\x74\x5d\x67\xc2\x84\x09\x14\x0a\x05\x74\x5d\x0f\xc0\
\xc3\xe6\xe6\x66\x66\xce\x9c\xf9\x1d\xe0\x3b\xfb\x93\xfd\xb6\x6c\
\xd9\x82\xa6\x69\x48\x45\xa2\xc7\x61\xef\xcb\x45\x46\xb6\x9a\xa4\
\xba\x46\x11\x6c\x3b\x20\x23\x51\xb6\x7b\xaa\xea\x17\x75\x96\x8a\
\x2e\xb7\x9c\xf9\x3b\xfe\xfe\xa9\xf3\x28\x39\x2e\x12\x9f\x6c\x21\
\x3c\x40\x0a\xf4\x88\x43\x48\x35\x68\x8c\x4e\x22\x59\x1c\x42\x11\
\x51\xf0\x14\x6c\xc7\x0a\xd4\x5f\x99\x94\x7d\x48\x6d\xc8\x3d\xf7\
\xdc\xc3\x43\x0f\x3d\x44\x2e\x97\x7b\x87\x7e\x06\xd8\xb9\x73\x67\
\xf0\xbb\x35\x6b\xd6\x50\x5d\x5d\x1d\xfc\xbd\x8c\xf5\x8c\x94\x52\
\x1c\x39\xa5\x82\x62\xc9\x66\xf6\xe4\xe3\xd8\x53\xb3\x09\x67\x76\
\x16\xcf\x93\x98\x7b\x2a\x18\x7d\x31\x86\x50\x65\x90\xcc\x8a\x46\
\xa3\x41\x3e\x5f\x08\xe1\x97\xd2\xd9\x36\x93\x27\x4f\xc6\xb6\x6d\
\x14\x04\x3b\xd4\x3d\x1c\x73\xa1\x86\x90\x0a\xd2\x12\x78\x25\x15\
\x27\x1e\xc2\xea\x37\x70\x86\x0d\x9c\xb1\xce\x37\xc7\x1c\x73\x0c\
\x97\x5e\x7a\x29\xf7\xfd\xea\x3e\xe4\x28\x48\x75\x90\xba\x29\x5e\
\x60\x13\x4c\xd3\x24\x1a\x8d\x32\x86\x00\xa3\x69\x2a\x02\xbf\xe6\
\xb1\xae\xaa\x96\x82\x95\xc3\x71\xfc\x66\x39\xa5\x52\x09\x45\x0a\
\x42\x02\x6c\x2d\xc1\x8f\xff\xf0\x7f\xf9\xe5\xa5\x4f\x70\xd3\xeb\
\x17\x61\x78\x31\xaa\x23\x31\x4c\xb3\x97\x52\xc9\x0b\xec\xe9\xc1\
\x00\x8f\xef\x05\x5c\xdc\x65\x9a\xa6\x61\x59\x56\xcd\x3e\x86\xe2\
\x81\x30\xc5\xdb\x4b\xce\xb2\xd9\xec\x01\xb9\x0d\xff\xf7\x1e\x9e\
\x27\x39\x2e\x74\x39\x2d\x0d\x1a\x67\x4c\xba\x84\x5f\xbc\xfe\x9f\
\xcc\x98\xda\xcc\xaa\x3d\xab\xb0\x2d\x1b\xa1\xf9\x35\x29\x75\x8d\
\x31\x42\x5e\xa5\xaf\xd3\x14\x49\xae\x90\x65\xfc\xf8\xf1\x44\xa3\
\x51\xb2\xd9\x6c\xd0\x6d\x21\x57\x59\x40\xd3\xab\x89\x29\x93\x58\
\x38\xf7\x14\x5e\xda\xfd\x30\xce\x38\x17\x31\x2f\x41\xfc\xd5\x28\
\xf9\x2d\x7e\xb1\xcd\x1d\x77\xdc\xc1\x8a\x15\x2b\xf8\xc1\x7f\xfe\
\x80\x5f\xfd\xea\x57\xcc\x91\x47\xd0\xdd\xdd\xcd\xac\x59\xb3\x78\
\xe1\x85\x17\x98\x34\x69\x12\x96\x65\xa1\x69\x1a\xa1\x50\x88\x54\
\x32\x35\x56\x6b\x28\xf6\xab\x49\x71\xb0\x6c\xcb\x2f\x83\x70\xa1\
\xb2\x14\xc2\xf6\x74\x3a\xcc\x27\x59\xdd\x73\x18\x55\x15\x55\x0c\
\xe6\x77\x93\x1f\x74\x89\xf7\x25\xb1\x0b\xfb\x52\xbe\x07\x43\x39\
\x3d\x68\x81\x8c\x8c\x8c\x9c\x54\x2c\x16\x7f\x72\xce\x39\xe7\x5c\
\x7d\xdb\x6d\xb7\x91\xcd\x66\x79\xf8\xe1\x87\xd9\xb5\x6b\x17\x8a\
\xa2\x90\x4c\x26\x89\x44\x22\x28\x8a\xc2\xe8\xe8\x68\x90\xf5\x4b\
\xa5\x52\x94\x4a\xa5\x00\x5a\x70\xa5\x47\xa9\xe8\x71\xf1\x79\x7f\
\xcf\x4d\x2b\xfe\x1e\xad\x66\x02\x53\xab\x8e\x61\x76\x43\x2b\x2b\
\xe5\x2a\x4c\xcb\x44\xb8\x3e\x30\xd9\x74\x41\x96\xaa\x89\x05\xec\
\x11\x03\x77\x30\x02\x1d\x02\x67\xd8\x0b\xa8\x48\xb6\x6d\xe3\x4a\
\xdf\x08\x49\x04\xdf\x3c\xe5\x5e\x7e\xf4\xf2\xbf\xf2\xef\x67\x3c\
\xc4\xea\x8d\x6f\xb2\xc9\xf9\x35\x43\x76\x1f\xb6\xed\x2f\xe6\xd3\
\x4f\x3f\xcd\x82\x05\x0b\x68\x6f\x6f\x67\xe7\xce\x9d\xc1\x02\x25\
\x12\x09\x4a\xa5\x12\x89\x44\x22\x48\x3b\x17\x0a\x05\xca\x0d\x6c\
\x5c\x69\x32\x9a\x1e\xa5\xda\xa8\x23\xed\x24\x02\xc6\x0d\x02\x94\
\xbc\xa0\x30\xe2\x91\x1b\x76\xb9\xf5\xd9\x1b\x49\xec\x70\x49\x75\
\x3a\x20\xfd\xba\xf7\xb2\x0c\x0e\xb6\xa0\xe7\x3d\xa9\x2c\x21\x84\
\xbe\x70\xe1\x42\xd6\xad\x5b\xc7\x39\xe7\x9c\xc3\xe1\x87\x1f\x4e\
\x3e\x9f\xc7\xf3\x3c\x5a\x5b\x5b\x29\x14\x0a\x98\xa6\x49\x3a\x9d\
\x26\x1e\x8f\x53\x5b\x5b\x1b\x64\x01\x83\x1a\x13\xc5\x1b\xd3\xd3\
\xba\x8f\xcc\x0a\x85\xa1\xec\x00\xb9\xde\x4d\xfe\xfb\x4c\x2b\x38\
\x21\xb8\x82\x29\x91\xe3\x08\x1d\xee\xb2\xab\x61\x33\x66\x2c\xcf\
\xf0\x23\x3a\x20\x83\x40\x4f\x91\x3e\x15\x35\x1d\xf7\x88\x86\x2b\
\x58\xd6\xf9\x04\xd7\x6a\x37\xd3\x50\x57\x83\xd5\x6f\xe1\x38\x2e\
\xa6\xe9\x43\x33\x77\xde\x79\x27\x77\xdc\x71\xc7\x1f\xbd\xbf\xd1\
\xd1\xd1\x03\x7e\x8e\x55\x8f\xe5\x5c\xa2\x3a\xff\xe7\xf7\x9f\xa5\
\x36\x34\x89\xed\xc5\x5e\xa2\xd1\x28\x86\x61\xd0\xd4\xd4\x44\x6a\
\x73\x8a\x2d\x9b\xdd\x31\xb7\xdb\xef\x68\x54\x15\xf3\x0e\xa0\xd6\
\x96\x0b\x98\x0e\xb9\x40\xa4\x94\xfc\xf6\xb7\xbf\xa5\x9c\x60\xea\
\xe8\xe8\x08\x20\x8b\xb2\x8d\x10\x42\x50\x28\x14\x70\x5d\x37\x10\
\x56\x59\x3f\x03\xb8\x61\x8f\x48\x58\xe7\xe5\x0d\xcb\x09\x51\x4d\
\x6d\xb4\x9e\x81\x4c\x17\x67\x55\x7f\x9c\xad\x72\x07\xa6\x55\x42\
\xf1\x7c\x20\xb3\x3a\x7d\x2c\xe7\x1f\xfd\x45\x5e\x5a\xbd\x8e\x71\
\xd2\xe3\xf4\xd9\xa7\xf0\x7d\xeb\x76\xc2\xaa\xb1\x8f\x94\x37\x56\
\x61\xa8\x28\x82\x03\x6d\xa6\xc0\xf6\x8a\x63\x27\xc9\x25\x95\x4a\
\xf9\xe9\x80\xb1\x60\x50\x4a\xe9\x7f\xaf\xf9\x7c\xa2\xb0\x16\xa5\
\x60\x67\x51\x84\x8a\x6b\xfa\x0b\x97\x4e\xa5\x70\x5a\x1c\x4c\xa7\
\x84\x16\xf5\xd8\x38\xb4\x02\xd7\x11\x34\x36\x36\x72\xf7\xdd\x77\
\xe3\x79\x1e\xab\x57\xaf\x66\xd5\xaa\x55\xb4\xb6\xb6\xb2\x66\xcd\
\x1a\x8e\x3c\xf2\x48\x36\x6d\xf2\x37\x57\x5d\x5d\x1d\xf9\x7c\x9e\
\xde\xde\xde\x83\xe6\xff\xbe\x57\xac\x58\xd5\x34\x8d\x67\x9e\x79\
\x46\x0a\x21\x84\x22\x94\x80\x3d\xe8\x4a\x9f\xaf\xa4\x29\x21\x3f\
\x3f\xae\x86\x70\x1c\x87\x8a\x8a\x0a\xa4\x94\x41\xe9\xb1\xa5\xd8\
\x68\xba\xe0\xc1\xed\xff\xce\x2d\x67\x3f\xce\xda\xe1\x27\x08\x35\
\xf7\xd3\x93\xdf\x82\xed\x59\x14\xf2\x05\x14\xdd\x8f\x01\x8e\x9b\
\xbe\x80\xee\xd4\x76\xde\xec\x5f\xc7\xc2\x19\xa7\x11\x56\x8c\xb1\
\xb2\x33\x11\xb4\xf8\x50\xa4\x40\x22\xa8\xad\x0f\x91\xce\x67\x38\
\xf7\x88\x7f\x20\x51\x1c\x20\x97\x2b\x31\xb5\xf6\x28\xda\x9d\x3f\
\xe0\xba\xd2\xf7\xf2\x5c\x41\xa9\x26\xc1\xb4\xd8\x5c\x4a\xa5\x12\
\x03\x6e\x8a\x19\x5f\x34\x99\x56\xb1\x90\x8b\xcf\xf8\x12\x35\x6a\
\x2b\x37\x3d\x7e\x0d\xcf\x5e\xbf\x0e\x0f\x5f\xe0\x91\xa2\xce\xc0\
\x1b\x26\x99\x5e\x87\xd4\x9e\x02\xba\x5a\x5d\xce\xed\x30\x77\xee\
\x5c\xaa\xaa\xaa\xc8\xe5\x72\x74\x74\x74\xa0\xeb\x3a\xb1\x58\x0c\
\x29\x25\xc9\x64\x12\xd3\x34\x69\x6b\x6b\x63\xdb\xb6\x6d\x1f\x8c\
\xca\x52\x14\xe5\xd1\x37\xde\x78\x63\xda\xac\x59\xb3\x4e\x2b\x14\
\x0a\x58\xd2\x45\x9f\x9e\xa7\xae\xaa\x96\xb3\xe7\x7e\x8e\x92\x69\
\xb1\x74\xfb\x13\x9c\x39\xfb\x1c\x96\xee\xfe\x2d\xe9\xf5\x61\x0a\
\x85\x02\x42\x88\x60\x87\xe4\x85\x87\xed\x5a\x84\xaa\x3c\xfe\xed\
\xf7\x17\xb1\x78\xfe\xd9\x64\xf3\x29\xd6\xc7\x57\xb2\xf3\xe5\x2c\
\xd9\x82\x19\xec\xf4\x92\x69\xe2\x58\x2a\xae\xe3\x91\x2b\xa5\xf0\
\x1c\x1f\xcd\x95\x78\x68\x9a\xe6\xcf\x2d\xfd\xb8\x06\xe1\x71\xcd\
\xd3\xe7\xf3\xc3\xf3\x9e\xe2\xc6\xd7\x2f\x24\x53\x8a\xa3\xe9\x1a\
\x56\xc9\x22\x97\xdb\x97\x2b\x99\x14\x9d\x46\x3e\x9f\xf7\x53\xb7\
\xd8\xd8\xa6\xcb\xac\xc6\x45\x3c\xd9\xfe\x33\x0e\x53\x3f\xc1\x31\
\x0d\x8b\x79\x46\xae\xa6\xbe\xa1\x9e\xe3\x8f\x3f\x9e\xad\x5b\xb6\
\x12\xff\x83\x83\xe5\x98\xb8\x99\x0c\xd1\x6a\x9d\x64\x32\xc9\xd7\
\xbe\xf6\x35\x5a\x5b\x5b\xe9\xec\xec\xdc\x57\x2e\x27\x25\x3b\x76\
\xec\x08\x16\x3f\x95\x4a\x05\x2c\x99\x3f\x06\xe9\xfc\x45\x02\xc9\
\x64\x32\x2f\x55\x55\x55\x9d\xdc\xda\xda\x7a\x5a\x32\x99\xa4\x88\
\x45\xf5\xc9\x3a\xc7\x34\x9e\xc9\xdc\x63\x9a\x59\xb7\xb1\x93\xef\
\x7f\xe6\x6e\x8e\x9b\x79\x0c\x43\xcf\x2c\x63\x47\x4f\x08\xe1\xaa\
\xa4\x52\xa9\xc0\x80\x46\x4a\x2e\xd9\x21\x87\xc1\x8d\x16\xa3\xdb\
\xf6\xb0\xfa\x96\xbb\x83\x5e\x55\x4d\xa2\x99\xa6\xb9\xfb\x3e\xef\
\xf7\x0f\x3f\x05\x63\xcd\x5a\xfb\x78\x09\x80\x09\x13\xc7\x53\x2c\
\x16\xc9\x66\xb3\xe4\xf3\x79\x84\x14\x38\xa6\xf0\x85\x5c\x6d\x71\
\xe3\x6b\x7f\x47\x24\x5c\x41\x41\xd5\x48\x76\xda\x74\xae\x4c\x21\
\x9d\x80\xe0\xc7\xb8\xea\x71\xf4\xf7\xf7\xfb\xec\x79\xd5\x19\xab\
\xf8\xf2\xf1\x32\xd7\xf3\x48\x17\x13\xa8\xaa\xc6\xaa\x55\xab\x78\
\xf1\xc5\x17\xf9\xd2\x97\xbe\xc4\xb2\x65\xcb\xa8\xa8\xa8\xe0\xfe\
\xfb\xef\xa7\xad\xad\x8d\xae\xae\xae\xc0\x3d\xae\xa9\xa9\x39\xa8\
\xb5\x8b\xc7\xe3\x1f\x0c\x0d\x48\x55\xd5\x69\xe5\xf6\x4c\xb6\xf0\
\x6f\xa8\x52\x8f\x61\xba\x45\x4c\xa7\xc4\x50\xa6\x97\xbe\x6c\x2c\
\x80\xeb\x71\xf6\xf9\xde\xd7\x5e\x7b\x2d\x4b\x96\x2c\xa1\xe7\xbe\
\x04\xae\x6b\x13\x05\xf4\x4a\x2b\x28\x1c\xdd\x59\xe8\xe7\x88\xcf\
\x38\x54\x84\x35\xa4\xa9\x62\x27\x74\xdc\x11\x03\xab\x3f\x8c\x97\
\xd5\x10\x9a\x0c\x82\xc6\x6c\x36\xeb\xef\x4c\xd7\x23\xdc\x2d\xd8\
\xf6\xbb\x1c\xe9\xbd\x0e\x4e\x21\x81\x5d\x90\x38\x45\x89\x94\xa0\
\xa8\x6a\x50\xb8\x67\x84\x22\x6c\xea\x5a\x45\xb5\x6c\xf6\xf3\x1d\
\x86\x87\xae\x18\xec\x18\xdd\xc8\xf1\x47\x9f\xc0\x78\xed\x70\x36\
\xed\x5e\x13\x70\xbd\xda\xda\xda\xe8\xee\xee\xf6\x05\x3f\x86\xd8\
\x0e\x0c\x0c\x70\xc3\x0d\x37\x30\x3a\x3a\xca\x93\x4f\x3e\x19\x54\
\x28\xe7\x72\xb9\xe0\x24\x94\x49\xe5\xb5\xb5\xb5\xe5\x62\xa4\xd1\
\xe1\xe1\xe1\x85\x8d\x8d\x8d\x66\x6f\x6f\xef\xa1\x15\x88\x94\xf2\
\xb0\x80\x9d\xa8\xb8\xa8\x42\x63\xb4\x38\x40\x53\x74\x2e\x35\x15\
\x69\xb2\x56\x1a\x5d\x0d\xa3\xa0\x60\x5b\x26\xd2\x11\x38\x8e\xc3\
\x33\xcf\x3c\xc3\xe8\xe8\x28\x27\x9f\x7c\x32\xbf\xff\xfd\xef\x03\
\x2c\x6a\xd9\xb2\x65\x81\xc1\xef\x2d\xc5\x59\x34\x7e\x1c\x85\xac\
\xc7\x71\xd3\x3f\xce\xa6\xc1\x65\x38\x33\x72\x54\xa9\x92\xec\xe6\
\x0a\x32\xaf\x57\xfb\x86\x58\xf1\x8b\x7a\x16\x2f\x5e\xcc\xec\xd9\
\xb3\x79\x69\xc9\x4b\x78\x19\x09\x8c\x42\x05\x58\x9a\x85\x88\xf9\
\x0b\xa8\xeb\x3a\x99\x4c\x06\xc7\x71\xd0\x74\x85\x64\xb7\x83\x5e\
\x5f\xf4\x93\x55\x38\x78\x9e\xc1\x40\xe8\x55\xc2\xc5\xd3\xc9\xd7\
\xf6\xb0\xdb\x7a\x19\x5d\x35\xb8\xe8\xa2\x8b\x98\x3d\x7b\x76\xd0\
\xe5\xc7\xf3\x3c\x86\x86\x86\x98\x3a\x75\x2a\xdf\xf8\xc6\x37\x10\
\x42\x10\x0a\x85\xde\x35\x07\x53\x2e\x32\xcd\x64\x32\xd4\xd4\xd4\
\x50\x28\x14\x9c\xd1\xd1\xd1\xed\x1f\x14\x51\x4e\x09\x1a\x98\xa9\
\x7e\x6e\xb5\x50\xb9\x8d\xb6\xca\xeb\x38\x62\xf1\x09\x5c\xf3\xe4\
\x05\xb4\x4d\x6c\x06\x01\x96\x65\xe2\x8d\x61\x39\x6f\xbe\xf9\x26\
\x8d\x8d\x8d\x64\x32\x99\x72\xd3\x64\x86\x87\x87\x0f\xf0\xc0\x84\
\x10\x94\x6c\x93\x0b\x5a\x6e\x65\xc4\xdb\xc0\x3f\x2f\xb8\x9b\x97\
\xda\x97\x90\x8e\x6c\xa5\xdd\xde\x15\xf4\x16\x29\x0b\xe4\xf2\xcb\
\x2f\x27\x97\xcb\x71\xda\x69\xa7\x71\xfb\xed\xb7\x73\xc2\x89\x0b\
\xe8\xee\xee\xe6\xc8\x23\x8f\xe4\xd1\x47\x1f\x65\xf2\xe4\xc9\x7e\
\x7b\xc1\x54\x8a\x54\x2a\xc5\xe0\xe0\x20\x95\x95\x51\x32\x99\x0c\
\xa6\x69\x52\xf0\xfc\x32\x08\x4f\x95\xfc\xf4\x8d\x7f\xe2\xaa\x53\
\xbf\x8d\xed\x94\x30\x4d\x18\xd7\x54\x4b\x5f\x5f\x5f\xa0\x6a\x35\
\x4d\xc3\x30\x0c\x7a\x7a\x7a\x10\x9a\x40\xd5\xfc\x66\x01\x39\x33\
\x43\xa5\x51\xed\x57\x69\x45\x4d\xec\x3c\x63\x88\xf4\x81\xbd\xc5\
\x3e\x30\xe6\xa2\x10\x82\x44\x22\xe1\xe7\x05\x34\x49\xc9\x8a\x61\
\xab\x70\xe5\x6f\xcf\xe6\xd1\xaf\x6c\x44\x8b\x58\xdc\xb7\xe9\xdb\
\x78\x2e\xc4\x87\x92\x41\x50\x74\xeb\xad\xb7\xbe\x23\xdf\xbd\x3f\
\x1b\xdd\x87\x46\x60\x78\xa0\xc4\xd9\xe7\x9e\xc6\x95\xcf\x5c\xcf\
\xe2\x19\x17\x33\xa9\x76\x06\x03\xa4\x91\xde\xce\xa0\xb7\x48\xb9\
\x8e\xbc\xbb\xbb\x9b\x13\x4f\x3c\x91\xdf\xfc\xe6\x37\x01\x15\xb5\
\xdc\x4c\x40\x4a\x19\xb4\xf7\xd0\x75\xdd\x8f\xac\xc7\x3e\xaf\x6c\
\x84\x2b\x8a\x1e\xa5\xb8\x47\xcf\xae\x12\xf1\xad\x16\xff\xf0\xdd\
\xaf\xfb\xf9\x7b\x05\x46\x46\x46\x0e\x70\xf7\xcb\xc5\xa4\x0a\x82\
\xf8\x94\x0c\xc7\x9e\x59\xc5\xaf\xfe\x76\x25\x67\xde\x39\x85\x67\
\xae\x58\xc1\xe6\xce\x0e\x9e\xea\xbd\x91\xad\x8f\x8f\x32\xba\xc9\
\x37\xde\x86\x61\xb0\x7d\xfb\x76\x1a\x1b\x1b\x3f\x38\x81\x94\x17\
\x58\x51\x14\x34\x4f\xa2\xea\x02\xd7\xf1\x60\x54\x72\xca\xa5\x87\
\x93\xeb\xf2\xb0\xf2\x12\x33\xe3\x05\x1c\xa5\x62\xb1\x28\xcb\xb5\
\xea\x6f\x17\xae\x10\x62\xdf\x0e\x32\x7c\x17\x7a\xff\xbf\xdb\x8e\
\xcd\xae\xec\xc6\x31\xd6\xbc\x15\xb0\x91\x15\x45\xe1\x5b\xdf\xfa\
\x16\xd1\x68\x34\x20\xc1\x95\xe7\xdf\xbc\x79\x73\x80\x18\xec\xff\
\x99\xe5\xfa\x12\xc3\x30\xb8\xfa\xea\xab\x59\xbe\x7c\x39\x83\x2f\
\x0e\xe2\x7a\x1e\x21\xc7\xa6\xa9\x51\xee\x6b\xed\x37\xa6\xa6\xca\
\xbd\x7c\xcb\xc1\xad\x22\x05\x52\x08\x6a\xea\x74\xe2\x99\x11\xae\
\x38\xe9\x46\xfa\xb3\xbb\x18\x49\xa4\x38\x7e\xe2\xd9\xbc\x61\xff\
\x9c\x32\x85\xab\x6c\x53\xde\x0b\xe9\xe1\x7d\x09\x44\x08\xc1\xb4\
\x69\xd3\x48\xa5\x52\x0c\xdc\x6b\xd2\x63\x9a\x08\x45\xe2\xe7\x6d\
\xfd\xc0\x24\xa4\x4a\xa4\x22\xcb\xa4\x69\x31\x30\x30\xb0\x6c\x64\
\x64\xe4\x94\xb7\xcf\x35\x67\xce\x9c\xb8\x69\x9a\xe3\x84\x10\x60\
\x40\x4d\x4d\x98\x3d\x7d\x7d\x78\x8e\x82\xa6\xe8\x64\xcd\x24\x4d\
\x91\x36\x76\xba\x71\x4c\xd3\x0e\xba\xef\xd8\xb6\x4d\x34\x1a\x0d\
\xd2\xb6\xfb\x7b\x72\x65\xb8\xfb\x80\x93\xe8\x82\x65\xfa\xc4\xed\
\xab\xae\xba\x0a\x21\x04\x0f\x3e\xf8\x20\xdf\xfe\xf6\xb7\xe9\xea\
\xea\x62\xe6\xcc\x99\x8c\x8c\x8c\x60\xdb\x36\x4d\x4d\x4d\xbc\xf2\
\xca\x2b\x84\x42\x21\xc6\x8f\x1f\xcf\xba\x75\xeb\xb0\x6d\xdb\xc7\
\xe6\x3c\xb0\x2c\x70\xa5\xcd\xb5\x2f\x9e\xc7\xff\x3d\xf5\x29\xee\
\xdd\xfc\xcf\xf4\xa4\xde\x42\x4b\xa8\x94\x0a\x26\x99\x8c\x79\xc0\
\x26\x78\x2f\x6c\xc6\xf7\x63\xd4\x39\xf2\xc8\x23\xb9\xec\xb2\xcb\
\x38\xe2\x88\x23\xb8\xf9\xe6\x9b\xe9\xec\xec\xa4\xa6\xa6\x86\x91\
\x91\x11\x74\xdd\x6f\x2e\x39\x38\x38\x88\xae\xeb\x68\x9a\xc6\xe0\
\xe0\x20\xe9\x74\xda\xfb\x63\x89\xa3\x52\xa9\xe4\xef\xc8\x2a\xc9\
\xb8\x71\x95\xfc\x68\xd9\xbf\x70\xd7\xdf\x2d\xe3\x97\x6f\x5c\xcf\
\x9b\xa9\x2d\x1c\x31\x69\x0e\xae\x37\xc6\x0e\x1c\xb3\x21\x53\xa7\
\x4e\x0b\xe2\x15\x55\x55\x59\xb1\x62\x05\xcd\xcd\xcd\xef\xea\xeb\
\xab\x21\x08\xcd\x4d\xd2\xb7\xb2\x48\x21\xe9\xb0\x6b\xd7\x2e\xae\
\xbc\xf2\x4a\xf6\xee\xdd\x4b\xb1\x58\xa4\xb1\xb1\x91\xca\xca\x4a\
\x74\x5d\x67\xd9\xb2\x65\xd4\xd5\xd5\xd1\xda\xda\x4a\x4f\x4f\x4f\
\x50\x4e\x5d\x2c\x16\x83\xb8\xc7\x49\x0b\x92\xfd\x7e\x31\xe9\xbf\
\x3e\x79\x3e\x15\x46\x98\x6c\xce\x25\xdd\x63\xd2\xb9\x22\x8b\xaa\
\xa8\x07\x6c\x9a\xb1\xcd\x11\x06\x98\x34\x69\x92\xdd\xdd\xdd\xed\
\x1e\xb2\x72\x84\xda\xda\xda\x35\x4d\x4d\x4d\xc7\x1d\x2c\xa7\xb6\
\x3c\x5c\xd7\xa5\xbf\xbf\x7f\xc6\xc8\xc8\xc8\xce\xfd\x7f\x3f\x7d\
\xfa\xf4\x78\xa9\x54\x1a\xa7\x28\x0a\xc3\x13\x5c\xfe\xf1\x5b\x13\
\xb1\x2c\x97\x9a\xc2\xb1\x1c\x35\x67\x0a\x4b\x77\xff\x06\x4d\xd3\
\x58\xf5\x83\x04\xa9\x2e\x5f\x17\x54\x56\x54\xb1\xe8\x96\x2a\x86\
\xee\x6f\x44\x3a\xbe\x5a\x58\xbb\x76\x2d\xd1\x68\x94\x70\x38\x1c\
\x30\xef\xcb\x69\x5d\xc7\xb5\x31\xa2\x21\xdc\x92\x0c\x9a\x0b\xec\
\xcf\x52\xf9\x53\x19\xd2\xe6\xe6\xe6\xfd\xaa\x7a\x64\x90\xb3\xf9\
\x63\x9b\xb5\xa7\xa7\x87\x6d\xdb\xb6\x21\x84\xa0\xb2\xb2\x32\x28\
\x8e\x2d\x6f\xbe\xb5\x6b\xd7\x7e\x29\x99\x4c\x3e\x70\x48\x4f\x88\
\x61\x18\xcc\x98\x31\x03\x80\x7e\x33\xc1\xe4\xb3\xf3\x44\xc2\x1a\
\xc2\x34\xb0\x87\x42\x38\xc3\x61\x9c\xe1\x03\x1b\x59\x46\x22\x11\
\xee\xbd\xf7\xde\xf4\xbb\x4d\x59\x26\x97\x85\x73\x50\xca\x78\x0c\
\x6d\xb1\xe8\xd8\xf3\x0a\xcf\xdf\xb9\x04\xcf\x14\xd8\x79\x0f\xd7\
\xf4\x8d\xf1\xbe\xbc\xb9\xc0\x76\x1c\xa4\xed\xeb\xfa\x39\x73\xe6\
\xd0\xda\xda\x4a\x2c\x16\x63\xdb\xb6\x6d\x8c\x8e\x8e\xd2\xd4\xd4\
\xc4\xd0\xd0\x10\xfd\xfd\xfd\x78\xa6\xaf\xba\x4c\xd3\x0c\xe8\x47\
\x0a\x02\xc7\x94\xd8\xb2\x84\x42\x08\xcb\x2d\xa1\xab\x1a\x48\x05\
\x4d\xd5\x28\x95\x8a\x0c\x0d\x0d\x21\x10\xac\x2c\x6d\xe5\x9c\xcb\
\x62\x28\x52\xe0\x9a\x80\xa5\xfb\xd7\x36\x60\x50\xda\x5d\x81\x97\
\xf5\xaf\xad\x50\x28\x50\x5b\x5b\x1b\x54\x1b\x47\x8c\x8a\x20\xce\
\x8a\x46\xa3\x7f\xd6\xe3\x7a\x5f\x02\x29\xe7\xb4\x15\x04\xa3\x5a\
\x9c\xf9\x2d\x61\x32\xd9\x34\xed\x77\x58\x4c\xac\x99\x82\xeb\x65\
\x11\x8a\xc0\x72\x1d\x74\x5d\x20\x4d\x0d\x44\x82\xb3\xce\x5c\x3c\
\x54\xb6\x01\x65\xa3\xd7\xd5\xd5\x45\x28\x14\xe2\xf4\xd3\x4f\x67\
\xfd\xda\x75\xb4\xdf\xec\x20\x85\x0e\x42\x47\x93\x6e\xd9\xd1\xc6\
\x88\x2a\x81\x8b\x5c\x66\xa2\x58\xa6\xef\x56\xab\xaa\xca\xc0\xc0\
\x00\xae\xeb\x52\x59\x59\x49\x26\x93\x09\x58\x1e\x8e\xe3\x10\x0e\
\x87\x39\xef\xbc\xf3\x48\xa7\xd3\x2c\x5f\xbe\x3c\xc8\x08\x0e\x47\
\x8b\x9c\x7e\x55\x15\xc7\x1a\x57\xd1\x30\xc5\x64\x51\xeb\x17\xb8\
\x7f\xc5\x4f\xf9\xf8\x91\x0b\xb9\xf3\xb9\x6f\xd3\xff\x98\x87\xa5\
\xfa\x0d\xd2\x0a\x58\x18\xba\x42\x23\x47\x72\xfa\x51\xe7\xf0\xe4\
\xb6\xbb\x50\x9d\x4a\x72\x2d\xbd\x8c\xe4\x25\xa5\xd1\x48\xe0\x31\
\x1e\x73\xcc\x31\x6c\xdf\xbe\x9d\x7c\xa6\x80\x31\x37\x49\x69\x57\
\x04\xa1\xec\xf3\x0e\x0f\xa9\x40\xaa\xab\xab\x03\x86\xa0\x82\x00\
\x4d\x62\x3b\x2e\x7f\x33\xf1\x26\x12\x8d\x77\x05\xbb\x20\xe5\x66\
\x19\x77\x41\x9c\x71\x0d\x3a\x4e\x1e\xec\xa1\x0a\xdc\x11\x03\x7b\
\xd0\x40\x16\x55\xdc\xbc\x8a\xa6\x69\x94\x4a\x25\x7e\xf3\x9b\xdf\
\x00\x70\xc6\x19\x67\xb0\x7c\xf9\x72\x1c\xc7\x21\x1e\x8f\x53\x53\
\x53\x43\x4f\x4f\x0f\x8b\x16\x2d\x62\x64\x64\x84\x35\x6b\xd6\x10\
\x8d\x46\xd9\xbb\xa7\x1b\x81\xc0\xb2\x6c\x3c\x9b\x20\x3a\x0e\x87\
\xc3\xa4\x52\xa9\xe0\x5a\xfb\xfa\xfa\x08\x87\xc3\x58\x96\x45\x22\
\x91\x60\xca\x94\x29\x81\x6b\x2c\x84\xc0\x0d\xbb\xa8\x8a\xc6\x26\
\xef\x7e\x2e\x09\xdd\xce\x53\xbb\x7f\xc4\x4e\x6f\x29\x3b\xdf\x78\
\x0e\x4f\x2a\x7e\x1a\xda\x94\x7e\x4f\x47\x43\x30\x38\x94\xe3\x3b\
\x17\xdc\xca\xff\x7e\xf6\x0c\xee\xfe\xfc\x12\xf6\xf4\xf7\xf0\x74\
\xef\xad\x0c\xda\x5d\x81\x0a\xf4\x3c\x8f\x78\x3c\x3e\x86\xe1\x29\
\x78\x8e\x87\x6b\x9a\x08\x95\x03\x02\xc9\xbf\x98\x4a\x1a\x90\x64\
\x0d\x23\xd8\x09\x25\xd3\xf4\xdb\x7c\xdb\x06\xe7\x9d\xb8\x98\xbc\
\x99\x09\xe8\xa1\x23\x4e\x9a\x8a\x50\x08\x33\xab\xb0\x70\xca\xf9\
\xd4\x4e\xd1\x88\x1e\x95\xa1\xf6\xec\x11\x2a\x4f\x89\x07\xdd\xb1\
\xcb\x5e\xd0\xc8\xc8\x08\xe1\x70\x18\x45\x51\x98\x33\x67\x0e\x86\
\x61\x30\x69\xd2\x24\x6c\xdb\x0e\x3a\x44\x9c\x78\xe2\x89\xa4\xd3\
\xe9\x31\x57\x52\x39\xa0\x4d\xb9\xeb\xba\x41\xcb\xf0\x72\xb3\x9c\
\x78\x3c\x1e\xd4\x3d\x76\x74\x74\xf0\xec\xb3\xcf\xa2\xaa\x6a\xd0\
\x81\xa2\x50\x2c\x62\x3b\x16\x28\x36\x77\xb4\x5f\xc6\x70\xb6\x0f\
\x89\x24\x3f\xe2\xb2\xfe\xde\x38\xae\xb7\xaf\xeb\xb5\xa2\x28\x8c\
\x0c\x98\xc4\x6a\x42\x28\xaa\x4b\x44\xaf\xa4\xa6\xb2\x86\x78\xae\
\x2f\x48\x96\x95\x89\x7f\x5d\x5d\x5d\x7e\x73\x4e\x55\x41\xba\xde\
\x01\x34\xd7\x3f\xe7\x02\xbf\x2f\xb7\xb7\xac\x3a\x04\x7e\xa0\x55\
\xf6\x2a\xf6\xb5\xff\xb6\x71\x15\x17\x45\x15\x7c\x7a\xd2\x37\xc9\
\xc8\x5d\x5c\xbf\xe8\x31\x1e\x5d\xfd\x20\xcd\x93\x74\x9e\x58\x71\
\x5f\x70\x93\xd1\x68\x94\x2b\xaf\xbc\x92\xc9\x93\x27\xd3\xdf\xdf\
\x8f\xeb\xba\x2c\x5d\xba\x14\xcf\xf3\x68\x6f\x6f\xc7\x71\x9c\x40\
\x1d\x95\x03\xcb\x90\x66\x04\xf1\x8b\x6b\xf9\x49\xa0\x50\x28\x44\
\x5d\x5d\x1d\x42\x08\x6a\x6b\x6b\xdf\x76\xc1\x63\x24\x03\x45\x04\
\xa5\xd0\x9e\xe7\x51\x9d\x51\xe9\x5a\x52\x20\xd7\xed\x62\x65\x3d\
\xd6\x14\x56\xe2\x14\xc1\xb3\x25\x8a\x2a\x70\x0d\x77\x5f\x5b\x43\
\x14\x74\x5d\xc1\x19\xe3\x35\x83\xff\xf4\x20\x04\xb8\xae\x87\x69\
\xfa\xfc\x33\xd7\x71\xa8\x6d\xae\x27\x99\x8f\x13\x8a\x99\x84\x2a\
\x55\xd2\xb6\xdf\x37\xf2\x60\x4a\x15\xde\x57\xa4\x5e\x5e\x78\x81\
\xdf\x30\xc0\xf1\x6c\x4a\xa6\x89\xa6\xe8\xfb\x0a\x60\x0c\x0f\x4f\
\x0a\x5a\xab\x67\xf0\x9b\x3d\xbf\xa0\x2d\x74\x22\xf5\x91\x16\x0c\
\x25\x83\xf4\x08\x04\x52\x2a\x95\x64\x28\x14\x12\x9d\x9d\x9d\xfb\
\x47\xef\x52\x08\x21\x5c\xd7\x45\xda\x60\x7b\xde\x98\x43\xa8\x82\
\x02\x96\x63\x63\x3a\x92\xd1\x91\x04\x72\x8c\x65\x5f\xce\x4c\xe6\
\xf3\xf9\x75\x1d\x1d\x1d\x0f\xd6\xd7\xd7\xff\x58\xd3\x34\xbf\x0f\
\x7c\x9d\xc7\xe1\x0b\x2b\xc0\xae\xa2\x32\x57\x45\x75\x75\x35\x97\
\x5c\x72\x09\xcb\x97\x2f\x67\x78\x70\x18\xc5\xc9\xe1\x86\x5c\x3c\
\xcd\xc3\x8d\xba\x01\xe4\x51\xde\x78\xca\x58\x43\x9c\xa9\xd3\x6b\
\xd8\xd1\xb5\x97\xd3\x0f\xfb\x3b\xde\x1c\x5e\x4d\x32\x51\x64\x76\
\xc3\x09\xbc\xe5\xfc\x01\xcb\xf2\xb3\x86\x85\x7c\x81\x07\x7f\xf9\
\x30\x3f\x79\xea\xfb\xfc\xcd\x27\x4e\xe5\x67\x8f\xfd\xc4\xcf\x82\
\xaa\x7c\x30\x27\x44\x4a\xa9\x4a\x29\x65\xa9\x54\x12\x02\x81\xe7\
\xb9\x54\x44\x15\xfe\xf9\xe1\x8b\x91\x9e\xc0\xb6\x7d\x42\xb6\xa7\
\xb9\x08\x11\x02\x24\xaa\xe2\x1b\xb3\x91\x54\x9c\x5c\x65\x7f\xa0\
\x6e\x00\x1a\x1b\x1b\xc5\x1b\x6f\xbc\xf1\x95\x62\xb1\xf8\x10\xe0\
\x45\xa3\xd1\xeb\xa7\x4d\x9b\xf6\xcd\x64\x32\x89\xa3\x48\xa6\x7f\
\xa5\x92\xd6\x96\x30\x76\x41\x62\x17\x3d\x52\x9d\x36\x99\x6e\x87\
\x25\xff\x6c\x07\x5e\xfb\xd8\xb3\xa6\x44\x2c\x16\x43\xd3\xb4\x5d\
\xf9\x7c\xfe\xce\x96\x96\x96\x1f\x97\x79\x58\x6e\xb5\xa0\xed\xd4\
\x28\x5a\x58\xf2\xd2\xfa\x7e\x7e\xf6\xfd\x5f\xb1\x77\xef\x5e\x6e\
\xbf\xfd\x76\x6e\xb8\xe1\x06\x74\x5d\xa7\xbe\xbe\x9e\x62\xb1\xc8\
\xee\xdd\xbb\x59\xb4\x68\x11\xeb\xd6\xad\x63\xfd\xfa\xf5\xe4\x72\
\x39\xc4\x98\x47\xa7\xeb\x2a\xdf\x79\xf5\x0b\xdc\x7e\xc1\x0b\x7c\
\x7f\xcd\x97\xc8\x5a\x09\x14\x5d\x90\xcf\x9a\x64\xb3\x3e\xea\xeb\
\x58\x2e\x23\x72\x33\x4e\xdd\x56\x96\xed\x4e\x21\x5d\x49\xb1\x54\
\x44\x28\x3e\x13\xf2\x90\x0b\x64\xd7\xae\x5d\x9f\x6a\x69\x69\x19\
\x48\xa7\xd3\x7e\x13\xfd\x71\x02\xd3\x89\xe2\xd5\xee\x61\xb4\x90\
\x25\x22\x6b\x7c\x81\x44\x20\xa4\x84\xe8\x4a\x77\x30\xbf\xfe\x34\
\x8e\x9e\x3e\x8f\xd7\x77\xbc\xc2\x11\xb1\xa3\x59\xea\x3d\x8f\x65\
\x39\x41\x8e\x22\x12\x89\xd8\xc5\x62\xb1\x34\xf6\xb3\x13\x0e\x87\
\x31\x0c\x03\x03\xe8\xff\x95\x43\xbf\x92\x7b\x17\x5a\xab\xbe\x3f\
\xc5\x55\x64\xb3\xd9\x6d\xbb\x76\xed\xba\x41\xd7\xf5\x37\x01\xb5\
\x4c\x44\x90\x42\xe2\xb9\x3e\x27\xca\x35\x21\x6a\x84\x79\xfa\xe9\
\xa7\xb9\xf6\xda\x6b\x79\xe9\xa5\x97\x28\x95\x4a\x44\xa3\x51\xfa\
\xfb\xfb\x89\xc5\x62\x0c\x0f\x0f\xd3\xd9\xd9\x49\x38\xec\x27\xd7\
\x4a\xa5\x12\x8a\x50\xb0\x5d\x0f\xc7\xb3\xa9\xab\x37\xb8\xfe\xb5\
\x4f\xfb\xea\xaf\x4f\x32\xb4\xb9\xc8\x9e\x65\x79\x14\x6d\x8c\x9d\
\xe8\x29\x24\xf2\x43\xa0\xba\x0c\x15\xf6\xb2\x77\x6d\x9a\x5c\xae\
\x04\x82\x43\xef\x65\x8d\x31\xef\x06\x03\xd6\x05\x10\xb2\xa0\x98\
\xf6\x18\x79\xd3\xc4\xca\x7b\xb8\x5e\x0e\xc7\x71\x28\x19\x2e\xb6\
\x5b\xc1\x92\xa1\x9f\x70\x4e\xe3\xb7\x78\x78\xeb\x2d\xec\x16\x2f\
\x92\xd8\xbe\x02\x4f\xba\x64\xb3\xd9\x20\x3e\x79\x3b\x95\xa8\xad\
\xad\x8d\x1b\x6f\xbc\x91\x89\x13\x27\xf2\xcb\x5f\xfe\x92\xae\xae\
\x2e\xc6\x8f\x1f\xcf\xb6\x6d\xdb\xa8\xaf\xf7\xbb\x33\x74\x75\x75\
\x61\x18\x06\x91\x48\x84\xae\xae\x2e\x84\x10\x83\x89\x44\xe2\x37\
\xe5\xb0\xa7\x5c\x60\x34\x61\xc2\x04\xd2\xc3\x69\x5e\xbf\x3e\xe7\
\xc7\x22\x8a\xca\xcb\x2f\xbf\xcc\x86\x0d\x1b\x82\x16\x1d\xe5\xe8\
\xbe\xfc\xb5\x9c\xb3\x28\x63\x76\xae\xeb\x52\xdb\xa9\xb1\xe6\xc7\
\x29\xac\x8c\x87\x53\x92\x38\x79\x89\xf4\x7c\xb7\xa8\xdc\x8b\x45\
\x51\x14\x8c\x90\xc1\x75\x5f\xf8\x21\x6e\xc1\x8f\x9f\x24\xa0\xee\
\xc7\x75\x3e\xa4\xd0\xc9\xfe\xf8\x90\xae\xeb\x2c\x5e\xbc\x98\x75\
\x6b\xd6\xb1\xe9\x66\x0b\x94\x10\x6e\xd1\xc4\x74\x0b\x38\xb6\x43\
\xa9\x52\xe2\x48\x0b\x43\x53\x79\x76\xe8\x26\x4e\x98\x72\x1a\xf9\
\x64\x89\x91\xb7\x7a\xd9\xf3\x72\x81\x62\xf1\xdd\x9f\x8a\xe6\xba\
\x2e\x33\x66\xcc\x20\x1e\x8f\xf3\xc9\x4f\x7e\x92\x19\x33\x66\x90\
\x4a\xa5\x38\xee\xb8\xe3\x68\x6b\x6b\xa3\xbf\xbf\x9f\x50\x28\x14\
\xa4\x46\xcb\x4d\x64\xde\x8e\x3a\x94\x69\x3f\xe5\xf4\xf1\xaa\x55\
\xab\x48\xa5\x52\xbc\xfa\xea\xab\x18\x86\x41\xb1\x58\x0c\xc0\xbf\
\x72\x92\x69\x2c\x6e\x19\x50\x14\xa5\xcc\x48\x98\xfe\xb9\xcf\x7d\
\x4e\x39\xf5\xd4\x53\x79\xf8\xe1\x87\xc9\x66\xb3\xe4\xd4\x5c\x60\
\x43\xcb\x0d\x30\x5d\xd7\x27\x51\x94\x89\xe5\xd5\x4a\x03\x4e\xd8\
\xc1\x52\x2c\x22\x91\x08\x85\x42\x21\xe8\x6c\x71\x48\xb9\xbd\xfb\
\x0b\x66\xcd\x9a\x35\x6c\xd8\xb0\x81\x4f\x7d\xea\x53\x6c\xde\xbc\
\x99\x81\x81\x01\x96\x2e\x5d\x4a\x3e\x9f\xc7\x75\x5c\xa2\x49\x85\
\xa1\x0d\x16\x85\x3e\x87\xe4\xce\x04\xcf\x75\xdf\x87\xa2\x0b\x14\
\x15\x14\x6d\x7f\xba\xa6\xfa\x0e\x44\x76\xd5\xaa\x55\x2c\x58\xb0\
\x80\xdb\x6e\xbb\x8d\xa5\x4b\x97\x22\xa5\xe4\x91\x47\x1e\x21\x9f\
\xcf\x13\x0e\x87\x83\xee\x0e\xa5\x52\x89\xe5\xcb\x97\xbf\x2b\xd5\
\xbf\x0c\xb5\x4b\x29\x39\xec\xb0\xc3\x58\xb2\x64\x09\xba\xae\x73\
\xe2\x89\x27\xb2\x70\xe1\x42\xee\xbd\xf7\x5e\x2a\x2b\x2b\xa9\xaf\
\xaf\x27\x9f\xcf\xb3\x67\x8f\xdf\x71\x63\xeb\xd6\xad\x57\x27\x12\
\x89\xc7\x00\x66\xcc\x98\x51\xb8\xe4\x92\x4b\x22\xa3\xa3\xa3\x5c\
\x7e\xf9\xe5\xdc\x7b\xef\xbd\x9c\x7f\xfe\xf9\xb4\xb7\xb7\x33\x67\
\xce\x1c\x9e\x7b\xee\x39\x66\xce\x9c\x49\x43\x43\x03\xc9\x64\x92\
\x55\xab\x56\xd1\xdb\xdb\x1b\xb8\xe7\x1f\xfb\xd8\xc7\x58\xbb\x76\
\x2d\xbb\x77\xef\xe6\xb0\xc3\x0e\x63\xd7\xae\x5d\x1f\x8c\x40\xca\
\x5e\x48\x2e\x97\xa3\xa1\xa1\x01\x21\x04\xb3\x66\xcd\xe2\xd5\x57\
\x5f\xa5\xb1\xb1\x91\xeb\xaf\xbf\x9e\x07\x1f\x7c\x90\xc4\x8e\x04\
\x8a\x69\x12\x8e\x39\x34\xce\xf6\x4b\xaa\xcb\xcc\xf1\x72\xd3\xe2\
\x77\xab\x1d\x0f\x87\xc3\xdc\x7c\xf3\xcd\xef\xfa\xd9\x65\x1a\x6b\
\xb9\x9b\x36\x10\x60\x45\x6f\xbf\xc6\x2b\xae\xb8\x82\x33\xcf\x3c\
\x93\xe5\xcb\x97\x07\xde\x61\xb9\xe1\x81\x94\x92\xd1\xd1\x51\xf6\
\xec\xd9\x13\x08\x74\x6c\x9e\xa0\xc2\xa8\xb2\xb2\x52\xbc\xfe\xfa\
\xeb\x9c\x7b\xee\xb9\xdc\x70\xc3\x0d\x58\x96\xc5\xe6\xcd\x9b\xe9\
\xea\xea\xa2\xae\xae\x8e\x5c\x2e\xc7\xde\xbd\x7b\xe9\xec\xec\xc4\
\xb2\x2c\x0a\x85\x02\x52\x4a\x8e\x38\xe2\x08\x1e\x7b\xec\x31\x74\
\x5d\x27\x1a\x8d\x52\x51\x51\x41\x22\x91\x38\x28\xd4\xf7\x7d\x55\
\x27\xb6\xb5\xb5\xc9\xa6\xa6\x26\x26\x4f\x9e\xcc\xc0\xc0\x40\xe0\
\x31\xc5\xe3\x71\x1e\x7c\xf0\x41\xde\x78\xe3\x0d\xbe\xfa\xd5\xaf\
\x72\xfb\xed\xb7\x33\x38\x38\x48\x24\x12\xa1\xbe\xbe\x9e\x95\x2b\
\x57\xd2\xdc\xdc\xcc\xae\x5d\xbb\x18\x1d\x1d\xa5\xae\xae\x8e\x81\
\x81\x01\x76\xef\xde\x7d\x51\x22\x91\xb8\x7f\x6c\x51\x66\x56\x55\
\x55\xcd\x14\x42\xbc\x97\x4e\x6d\x9e\xeb\xba\x5d\x83\x83\x83\x6f\
\x96\x6d\xc8\xe1\x87\x1f\x5e\x68\x6e\x6e\x3e\xa0\xcb\xf6\x1f\x2b\
\x77\xdb\x5f\x0d\x6f\xd8\xb0\xe1\xcb\xc9\x64\xf2\x3e\x80\x13\x4e\
\x38\xa1\x58\x28\x14\xc2\x65\x0c\x6d\x7f\xd2\xf4\xfe\x5f\xdf\x4e\
\xa5\x0d\x85\x42\x41\xcc\x54\xee\xef\x55\x6e\x3a\xbd\x65\xcb\x96\
\x2f\x25\x12\x89\x07\x0e\xb9\xca\xca\x64\x32\x74\x74\x74\x04\x79\
\xeb\x32\xd7\xf7\xe9\xa7\x9f\xe6\xea\xab\xaf\xe6\xb5\xd7\x5e\x63\
\xc7\x8e\x1d\x81\x5d\x28\x43\x0a\xa1\x50\x28\xc8\xe2\xbd\xdb\x43\
\x1c\xb3\xd9\xec\xf6\x6c\x36\xbb\xfd\xfd\x5c\x57\x7d\x7d\x7d\x99\
\xdd\x61\x0e\x0d\x0d\x2d\xee\xe9\xe9\x79\xaf\x53\xd8\x35\x35\x35\
\x9b\xcb\x0c\xfe\xe1\xe1\xe1\xe7\xa3\xd1\xa8\x5a\x2c\x16\x8f\x55\
\x14\x65\x3c\x80\xab\x42\xdd\x14\x8d\xe1\xad\x05\xa4\x2b\x29\xd9\
\x25\xc2\x5a\x05\x20\xc7\xf0\xbb\x12\x4d\x4d\x4d\x07\x08\x6f\xff\
\xb6\xeb\x42\x08\xfd\x90\x9f\x90\xfa\xfa\xfa\xdd\xab\x56\xad\x3a\
\xac\xbd\xbd\x9d\xa3\x8f\x3e\x9a\x27\x9f\x7c\xb2\x8c\xe6\x06\x39\
\xec\x70\x38\xfc\x0e\xaf\xe2\xdd\x76\xaa\x61\x18\x6c\xdb\xb6\x2d\
\x38\x21\xff\x2f\x8e\xf1\xe3\xc7\x3f\x1a\x0a\x85\x2e\x04\xc8\x55\
\x78\x9c\xf1\xcd\x46\x22\x06\xe4\x87\xab\xf8\xfa\x79\xff\xc6\xed\
\x2b\xaf\x81\x92\xce\xc0\x86\x22\x6f\x3d\x96\xf5\x1f\xb0\xe9\x37\
\x46\x7b\xa8\xbf\xbf\xff\x8b\xfb\xa9\xc1\xd2\x21\xf5\xb2\xf6\xc3\
\xb3\xe4\x4d\x37\xdd\xc4\x2d\xb7\xdc\xc2\x4b\x2f\xbd\x44\x6f\x6f\
\xaf\xff\x28\x87\x74\x1a\x5d\xd7\x83\x27\xcd\x1c\x4c\x72\x3f\x9b\
\xcd\xfe\xc9\xd6\x1a\xff\x8f\x0c\x25\x70\xf3\x6d\x81\xe7\x49\x4a\
\x49\x41\x72\x47\x9c\xcb\xbe\x7c\x15\x99\x2e\x2b\xd8\xdb\x46\x24\
\x14\xe4\xed\x85\x28\x3f\x68\xe3\xe0\x1f\x36\xf9\xbe\x73\xea\xed\
\xed\xed\x2c\x5e\xbc\x38\x68\x3e\x2c\xa5\xa4\xba\xba\xfa\x8f\x16\
\x5c\xc6\x62\x31\x76\xee\xdc\x79\x6c\x7f\x7f\xff\x7a\xfe\x9b\x8e\
\x53\x4e\x39\x85\xae\xae\x2e\xf6\xdc\x99\xc3\x73\x25\x52\x1a\x08\
\xa9\x53\x5d\x1d\xf1\x0b\x78\xc6\x6c\x46\x39\x3c\xb0\x6d\xfb\x3d\
\xb7\xbc\x7e\x5f\x02\x31\x4d\xf3\x65\xd7\x75\xb7\x95\xbd\x93\x83\
\xe9\x10\x94\x4a\xa5\x6c\xcf\xf3\x46\xff\x3b\x0a\x42\x4a\x69\x5e\
\x72\xc9\x25\x9c\x72\xca\x29\xb4\xb4\xb4\x70\xc7\x1d\x77\x90\x4a\
\xa5\x68\x68\x68\xa0\xb7\xb7\x97\x9a\x9a\x1a\xda\xda\xda\x58\xb1\
\x62\x45\x90\xa2\xd8\xb0\x61\x03\xb6\x6d\xaf\xfb\xc0\x05\x52\x5d\
\x5d\xcd\xf0\xf0\xf0\x65\xc3\xc3\xc3\xfc\xff\x68\xc8\x32\x0e\x15\
\x0e\xfb\x7d\x80\xcb\x7d\xbd\xa6\x4c\x99\xc2\xe6\xcd\x9b\x99\x30\
\x61\x02\x53\xa7\x4e\xa5\xa7\xa7\x87\xee\xee\xee\x32\xb9\xc1\xfe\
\xc0\x05\x72\x28\xdb\x6a\xff\x77\x19\x9a\xa6\x71\xff\xfd\xf7\xf3\
\xc2\x0b\x2f\x90\xcd\x66\x83\xb8\xa5\x1c\x78\x4a\x29\xe9\xee\xee\
\x0e\xbe\xf7\x3c\xef\x7d\x37\xe8\xd7\xf8\x68\xfc\xd9\xe1\x38\x4e\
\xc8\x30\x0c\xd2\xe9\xf4\x01\x1e\xe2\x9f\x8a\x69\xc6\x82\xe7\xf7\
\xec\xad\x88\x8f\x96\xfb\xcf\x8f\x48\x24\xd2\x54\x2a\x95\xaa\xde\
\xe3\xba\xba\x75\x75\x75\x83\xa3\xa3\xa3\x85\x8f\x56\xf0\xa3\xf1\
\xd1\xf8\x68\x7c\x34\x3e\x1a\x1f\x8d\x8f\xc6\x47\xe3\xa3\xf1\xd1\
\x38\x60\xfc\x7f\xa7\x8c\x4b\xc9\xd0\xc3\x6c\x1c\x00\x00\x00\x00\
\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x1a\x78\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x69\x08\x06\x00\x00\x00\xcc\x7c\x86\x8a\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x88\x26\x00\x00\x88\x26\x01\
\xac\x91\x9d\x06\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd9\x03\x03\
\x0e\x04\x3a\x04\x0e\x03\xc5\x00\x00\x19\xf8\x49\x44\x41\x54\x78\
\xda\xed\x9d\x7b\x74\x54\xf5\xbd\xe8\x3f\xbf\xbd\x67\xcf\x64\x92\
\x0c\x79\x12\xc2\x23\x01\x4a\x28\x8f\x4a\x90\xa2\x16\x6b\xc3\xa9\
\xa0\x02\xf5\xd4\x2e\xbd\xda\x2e\x5b\xee\xa5\x85\x7a\xb4\x75\xf5\
\xae\x16\xef\x83\xf6\xf6\x9c\x73\x5b\xd0\x7a\x7b\xaf\x4b\xaf\xbd\
\xb4\x97\xd5\x7a\x3d\x2a\xb7\xd6\x1e\x3d\xe7\xd4\x5a\xa1\x2a\x78\
\x05\x0f\x4a\x00\x51\x08\xa4\x09\xaf\xc8\x23\x04\xf2\x9a\x24\x33\
\x99\xcc\xec\xc7\xef\xfe\x31\xcc\x66\x1e\x7b\x87\x09\x4c\x12\x70\
\xe5\x3b\x6b\xaf\x64\xf6\xfe\xfd\x7e\xfb\xf7\xfb\xbe\xbf\xdf\xdf\
\x63\x04\x40\x65\x65\x65\x9d\x10\x62\x07\x63\x30\xea\xa0\x00\xaa\
\x65\x59\xda\x18\x2a\xae\x0e\xf0\x00\x85\xa6\x69\x16\xaa\xaa\x0a\
\x80\x94\x12\xcb\xb2\xc6\x30\x33\x42\x20\x84\x40\x51\x94\x14\x82\
\x94\x1b\x86\x51\xe6\xf5\x7a\x01\xc8\xcf\xcf\x67\xe5\xca\x95\x63\
\x98\x1a\x21\xd8\xbf\x7f\x3f\x7b\xf7\xee\x4d\x21\xc8\x44\xc3\x30\
\x2a\xd3\x0b\x9a\xa6\x89\xae\xeb\x63\x18\x1b\x46\xc9\xf0\xf9\x7c\
\x08\x21\x32\x54\xd6\xf8\x0b\x97\x0d\x52\x4a\xfa\xfb\xfb\x69\x6b\
\x6b\x1b\xc3\xdc\x30\x41\x5e\x5e\x1e\x53\xa6\x4c\x41\x4a\x99\x41\
\x90\x80\x65\x59\x81\xf4\x0a\xc3\x65\x4b\x84\x10\x28\x28\x08\x40\
\x20\x88\x19\x26\x31\xcb\x40\xf5\x9b\xf1\x3b\x31\x0f\xaa\xa5\x82\
\x00\xa1\x48\x10\x20\x91\x9f\x38\x82\xa4\x13\x22\x99\x20\x5e\x21\
\x84\xe6\x54\x30\x5d\x9c\x72\x01\x41\xa3\x8f\x93\xfa\x39\xba\x8d\
\x10\x03\x32\xc6\x67\x6e\xc8\x67\x76\x6d\x01\x49\x76\x8d\xfe\x1e\
\x89\x6c\xcf\x27\x76\xce\x87\xd5\xee\xc7\x33\x90\x17\x27\x90\xdd\
\xb1\x4f\x36\x41\x14\xcb\xb2\xd4\x64\x22\x58\x96\x95\x53\x09\x11\
\x40\x8f\x19\xa6\x61\xe0\x04\x1d\x66\x0f\x42\x2a\x4c\x9d\x9e\xc7\
\x8a\xa5\x65\xe4\xe5\x29\x98\x26\x48\x09\xe2\x82\x34\x14\x94\x0a\
\x28\xeb\x47\xcc\x09\x83\x02\xb1\x01\x89\xec\xd3\x30\x7a\x35\x64\
\x9f\x86\x19\xf2\x60\x85\x34\xac\xb0\x07\x2b\xa4\xa2\xe8\x9a\x2d\
\x4d\xf1\x4b\x5e\x13\x04\x49\xe0\x39\x9d\x20\xc2\x49\x12\xdc\x28\
\x78\x39\x10\xb2\x22\xfc\x6b\x7f\x03\x52\x4a\x54\xa9\x52\x31\xc9\
\xcb\x17\xbf\x54\x8a\xb4\xc0\x30\x24\x42\x08\x12\x5d\x10\x71\x1d\
\x05\x52\x20\x11\x60\x48\x34\x55\x81\x62\x13\x6f\xb1\x09\x0c\x10\
\xef\xda\x85\x72\xc0\x40\x48\x62\x9c\xcf\xc3\x68\xf7\xa1\xb7\xf9\
\xf0\x84\xf3\x6d\x4e\x10\x57\xa9\x44\x0d\x26\x21\xae\x94\xcb\x85\
\x84\x18\xd2\x64\x57\xff\xa1\x0b\x12\x20\xf0\xe5\x09\x6e\xbb\xab\
\x0c\x69\x65\xa9\x16\xed\xe7\xc2\xf1\x96\x04\xfc\x45\x02\x8a\x07\
\x10\xb3\x07\x40\x01\x3d\x66\x61\xf6\x7a\xb0\x7a\xbd\x18\xbd\x2a\
\xb2\xcf\x8b\x19\x56\x91\x21\x0f\x46\x48\x41\x89\x7a\x11\x42\xc6\
\xc3\xe2\x51\x92\xa8\xc1\x24\xc4\xb5\x42\x2e\xa4\xe4\xa4\x7e\x0e\
\x43\x9a\x28\x17\xb0\x58\x33\xb7\x00\x45\x11\xc8\x1c\x31\xae\xdd\
\x86\x14\x48\x13\xa4\x09\x1e\xa1\xe0\x29\x92\x50\x14\xbd\x30\x96\
\xfe\x14\x89\xd2\xa3\x92\x81\x36\x0d\xeb\x9c\x9f\x48\x9b\x07\x4f\
\x77\xbe\xad\xee\x46\x4a\xa2\xb2\x96\x90\x64\x62\xe4\x82\x20\xed\
\x46\x4f\x8a\x14\x54\x4e\xf6\x0e\xab\x5d\x16\x4e\xff\x25\x4b\x94\
\x94\x78\xfd\x02\xef\x0c\x1d\x51\xa3\x53\xa2\x48\x4c\xab\x83\x58\
\x8f\x02\x3d\x5e\x62\x3d\x2a\xf4\x7a\x31\xc2\x0a\x84\x35\x62\x21\
\x81\x12\xd1\x10\x6a\x6e\x25\xc9\x0d\xbf\xc3\x4e\x90\x90\x8c\xa4\
\x7c\x2f\x2c\x52\x47\x3d\x20\x03\xc0\x8a\x4b\xa9\x34\x40\x11\x0a\
\x79\x85\x12\x0a\x63\xe4\x4d\x06\x49\x04\x64\xc2\x96\x81\x65\x40\
\xd7\x9f\x4b\x90\x5d\x79\x39\x27\x48\x56\x2a\x2b\x97\x04\x31\x30\
\xe3\x86\xfa\x02\xf8\x7c\xa3\x4b\x10\x57\x02\xa5\xcb\x56\xc2\x63\
\x93\xa0\xfa\x40\xd1\xc0\x90\x72\xe4\x09\x92\x4b\xef\x8a\xf8\x78\
\x6c\xfb\x01\xa0\x88\x6b\x2c\x8c\x10\x49\x03\x19\x01\x18\x54\x42\
\xc6\xb2\xbe\xc3\x97\xb9\x18\x92\x0d\xc9\xb5\xa4\x64\xb6\x25\x87\
\x50\xf7\x82\x5a\x11\xf1\x7a\x71\xc4\x48\x5b\xdd\x08\x2e\xc6\x30\
\xc3\x4a\x90\x1c\x8b\xc8\x90\xbc\xac\x04\x27\xe4\x82\x23\x84\x88\
\x1b\xcd\x54\x50\xb2\xaa\xab\xa8\xd0\xd5\xae\x73\xea\x64\x1f\x35\
\x15\x73\xb8\x61\xc6\x4d\x54\x96\x4e\x60\x7a\xe5\xa7\x50\x15\xe8\
\xec\x6f\xe3\x64\xf0\x2f\xb4\x85\x3e\xe6\x78\x77\x03\x8a\x50\x1d\
\xde\x95\x33\x0c\xe6\x5c\x42\x86\x1c\x87\x0c\xb7\x63\x7a\x29\xc9\
\x78\xfb\x4f\xdd\xf4\xb4\xc3\xab\x3f\xfb\xbf\xcc\x9f\x3e\x1f\x8f\
\xea\x71\xec\xa7\x29\x0d\xce\xf4\x1e\xe5\xd7\x7b\xff\x96\xf3\xa1\
\x93\xa8\x8a\x73\xb9\x44\x46\xc0\x92\x56\x5c\xf2\x10\x59\xe7\xeb\
\xe4\x30\xa8\x40\x27\x3c\x2b\xd9\x78\x01\x57\x72\x5d\x0e\xc4\x06\
\x2c\x5e\x7d\xb1\x0d\x2d\x56\xc6\x47\xcf\xec\x60\x61\xcd\x42\x54\
\x45\x75\xf5\x92\x3c\x8a\xc6\xd4\xe2\x39\x6c\xb8\xed\x65\x6e\xfd\
\xd4\x7d\x18\x96\xee\x58\x2e\xd4\x6b\xb0\xed\x4f\xed\x4c\xb6\xea\
\xb8\xbf\x76\x2d\x4b\x3e\xf5\x55\xe6\x8c\xff\x5c\xd6\x0c\x92\x2b\
\x9c\x0c\x86\x97\x61\x77\x7b\x11\x82\xf4\x66\x06\x6b\xd7\xb2\xe0\
\xf5\x7f\xec\x24\xe0\x2f\xe5\xfd\x5f\xbe\x89\xdf\xeb\xcf\x2a\xc5\
\x92\x90\x80\xfb\x6b\xff\x23\xc5\x79\x15\xfc\x63\xc3\xff\xc4\xab\
\xfa\xec\xe7\xaa\x47\xd0\x72\x64\x80\xe6\xc3\x3d\xdc\xff\xf7\x7f\
\x43\xcd\xc4\x99\x00\xb4\x74\x1f\xe6\xf0\xf9\xf7\xb3\xa0\x88\x95\
\x53\xad\xe1\xa4\xae\x06\x55\x59\xb9\xf3\xf3\xe3\xae\x6e\x32\xa7\
\x2a\x8a\x3b\x72\x0f\x7f\xd0\x47\xa4\xdf\xe4\xe7\x0f\x3e\x62\x13\
\x23\x1d\xa2\x46\x84\xf7\xff\xb2\x1b\x4d\xf8\xb9\x61\xd6\x02\xbc\
\x1e\x6f\x0a\x61\x56\x7c\x7a\x15\x4d\x1d\x7b\x69\xee\xfc\xe0\x22\
\x41\x54\xc1\xc9\x63\x03\x54\x56\x94\x50\x33\x71\xa6\x4d\x40\x04\
\x29\x73\xda\x57\xaa\x6a\x73\xee\xf6\xe6\x42\xe5\x5c\xb6\x1f\x23\
\x05\x87\x3f\x0c\x51\x5e\x12\xe0\xdf\xde\x76\xbf\x63\xa9\x73\xa1\
\x53\xfc\xb7\x9d\xab\xe9\xe9\x0b\xb3\xf5\xe5\x4e\xaa\x27\x54\xf1\
\xde\x2f\xde\xca\x08\xf4\xfe\xcd\x67\xfe\x3d\x8f\xbd\xb3\xca\xfe\
\x1e\x09\x5b\xf4\xf5\x98\xdc\x3c\x6f\x61\x76\x49\xcd\x61\xca\xed\
\x5d\x4a\x0b\x29\x83\x89\x93\x65\x59\x57\x7c\x25\x5c\xc6\xc4\x27\
\xfd\x7b\xe2\xa3\xa8\x82\x86\x7d\x7d\x74\xf7\x84\xf9\xdf\x8f\x3c\
\xe1\x6a\x04\x5f\xf8\xf0\x51\x22\x46\x98\xbc\x7c\xc1\xed\x77\x97\
\x70\xf0\x48\x13\x77\xfe\xf8\xde\x0c\x04\x57\x15\xcd\xa4\xa6\x74\
\x3e\xa6\x65\x20\x91\x74\x77\xe8\x48\x24\xd7\xd7\x5c\xe7\xe8\xd2\
\x5e\xf2\x93\x23\x7c\x24\x2e\x37\x2f\x4b\x71\x32\x7e\xb9\xe4\x06\
\x91\xf6\x71\xba\x27\x10\x28\x42\xa1\xf9\x60\x3f\x15\xe5\x45\x2c\
\xb9\x7e\x89\xa3\x51\xee\x8e\x9c\xa3\xb1\xbd\x1e\x81\x40\x5a\x50\
\x50\xa8\x32\x73\xf6\x38\xde\xfb\x70\x3f\xef\x37\xd6\x67\xd4\x59\
\xb3\xf0\xa7\x58\xc4\xd0\x54\x85\x73\x6d\x61\xf4\x98\xc1\xbc\x4f\
\xcd\xc9\x70\x9f\x84\x14\x5c\xea\x93\x6b\x37\x2b\xeb\xc0\x30\xd9\
\x1d\xcb\xdd\x8c\xa1\xc8\x20\x51\x3a\x74\xb4\xc5\xd0\x63\xb0\xe2\
\xe6\xbf\x72\x8d\x25\xde\x39\xf1\x4f\xa8\xc2\x13\xaf\x7f\xa1\x89\
\x09\x93\x7d\x9c\x3c\x16\xe5\x9d\x8f\xfe\x95\x45\x73\x6e\x4a\x19\
\x47\xb1\x7f\x3c\x4d\x3b\x4b\x68\x3a\x76\x8a\xed\xbf\x78\x99\xfe\
\xaf\xc7\x98\x33\x25\x95\x20\x01\x5f\x09\x9f\xab\x5a\xce\x80\x11\
\x41\x37\x23\xb4\x87\x5b\xe9\xe8\x3f\xe3\x28\x45\xb9\x8e\x43\x46\
\xc7\xcb\xca\x12\xba\xda\x75\x84\x80\x05\x33\xe7\xb9\x96\x69\xee\
\xfc\x00\x55\xa4\xba\xbf\x25\x65\xf1\xe5\x00\x07\x8e\x35\x38\x26\
\x0d\xe7\x55\xcd\xa7\xe1\x2f\x1f\x33\xa9\x64\x32\x25\x53\xca\x33\
\xda\x2c\xcb\x9f\xc8\xaa\x05\x7f\x1b\x4f\xec\x4a\x8b\xff\x77\xe2\
\x65\x5e\x39\xf4\xf4\xd5\x65\x43\x86\x23\x0f\x94\xfa\x72\x99\x71\
\x85\x7a\x4d\xa4\x94\x54\x55\x4c\x74\x6d\xe7\x74\x4f\x33\x42\x28\
\x29\xf5\x0a\x02\x2a\x52\x4a\xce\x76\xb6\x3b\xd6\x99\x3b\xf5\x33\
\x59\x05\x76\x82\x78\x46\x41\x49\x6b\xff\xe2\x75\x15\x24\x17\x73\
\xc1\x11\x8a\x48\x55\x51\xc2\xc5\x7d\x0c\xf7\x59\x08\x21\xa8\x99\
\x3c\xd3\xb5\x2d\xc3\x32\x33\x8c\xb7\x3f\x5f\x41\x08\x41\x6b\xc7\
\x19\xc7\x3a\xd3\x26\x4d\xc6\x32\xa1\xc0\xe7\x4f\x89\x57\x06\x57\
\xaf\xc2\xd9\xf0\xcb\x51\x9a\xa0\x4a\xcf\x69\x5d\x79\x2c\x22\x2e\
\xe9\xd3\x87\x43\x71\x64\x57\x96\x95\x39\x47\xef\x66\xd4\x11\x91\
\xaa\x27\x9e\xfe\x08\x47\x23\xce\x12\x32\xb3\x9a\xaf\xfd\x4d\x25\
\x13\xef\x9b\x45\x59\x51\x11\x47\xfe\xe1\x50\xaa\x1b\xdd\x77\x8a\
\x7f\x3e\xf8\x0c\x9a\xe2\xa3\xc0\x57\x48\x47\xf4\x63\x67\x82\x58\
\x57\x81\x0d\x19\x49\xd0\x63\xf1\xc1\x96\x14\x16\x3b\x3f\x37\x07\
\x5c\x93\x4c\x9a\x4f\x10\x89\x39\x3f\x2f\xce\x1b\x8f\xc7\xa3\x90\
\xe7\xc9\xc7\x32\x32\x35\x74\xc4\xe8\xe3\x40\xc7\x9b\xa9\x32\xe2\
\xb4\x0a\x67\xd8\xd5\xf8\x20\x5e\x56\x6e\xd7\x65\x65\x17\x80\x99\
\x46\xbc\x63\xf9\xde\x80\xab\x84\xb8\x81\xd7\xab\x10\xee\x77\x5e\
\x87\xec\x55\xf3\x2e\xa6\x0c\x84\x73\xf7\x44\x36\x19\xe2\xd1\xcc\
\xf6\x26\x62\x91\x5c\xac\x5c\x8c\xe3\x41\xa4\x7d\x77\x20\x88\x39\
\x78\xf4\xac\x9b\x51\x57\xd2\xfa\x7c\x0a\xba\x61\x3a\x3e\xd3\x54\
\xef\xc5\xf5\x73\x0e\xed\x0b\x57\xab\x96\x8d\xea\xcd\xb5\x2a\x1f\
\x64\x3e\x64\xa4\x67\x0c\x0d\x43\x0e\x3a\xdf\x1e\x1b\x54\x65\x29\
\x18\xba\x85\x69\x99\x19\x59\x61\x4d\xf1\x5d\x94\x84\x2b\xc8\x47\
\xe5\x1a\x1f\xc9\x11\xfb\x88\x67\x7b\xb3\x51\xc0\x8a\x10\xf6\x4c\
\xe0\x50\xb8\x53\x42\xea\xaa\xc7\x61\xe2\xea\x38\x3e\x86\x3f\x52\
\x57\x46\x42\x34\xb3\x01\x55\x15\xe8\x86\x3b\x07\xda\xb6\xc0\x81\
\xde\x7a\xcc\x42\x55\xe3\x3b\x91\xd2\x07\x69\x5a\xc6\x05\x0c\x08\
\x57\x09\x1b\x2d\xc8\x5a\x65\xb9\xcd\x66\x5d\x49\x2e\x2b\xcd\x8a\
\x3a\xbb\xaf\x03\xee\x8c\xa0\xa9\x3e\xc7\x7a\x08\x81\x11\x13\x68\
\x9a\xc7\x71\x90\xba\x15\x03\x71\x31\x8b\xe6\x4a\xd5\x51\xca\xf6\
\x66\x6d\x43\x92\xb3\xb5\x23\x25\x21\x08\x88\x9a\x11\x7c\x6a\xe6\
\x3c\x88\x6d\x0b\x5c\x5c\x66\xaf\xe6\x71\x71\x06\x62\xb9\x41\x20\
\x16\xb9\x44\x47\xd6\xd9\xde\x5c\x4a\x87\x9b\x4d\x72\x4a\x6f\xfb\
\xfc\x71\x75\x73\xae\xfb\xbc\x8b\xca\xf2\x39\xd6\x13\x02\x62\x31\
\x49\x9e\xe6\x75\xce\x00\xc4\x82\x17\xd3\xfe\xd2\x0d\xd9\x97\xfe\
\x0c\x47\xb6\x77\x48\x81\x61\xee\x88\x92\x39\x43\xa8\x38\x98\xae\
\xfc\x02\x15\x45\x51\xe8\xea\x09\x52\x5d\x3e\xd5\x51\x65\x09\xe9\
\x10\xb4\x49\xe2\xa9\x91\x7c\x67\x1b\xd3\x1b\xed\x8e\xef\xd8\x12\
\x02\xb7\x89\x41\x25\x8b\x94\x9e\xcc\xf1\x14\xee\x90\x92\x8b\x39\
\x9d\x0f\x11\xd9\x39\x0c\xfe\xfc\xb8\xbb\xda\xda\xd9\x36\x88\x1d\
\xf1\x66\x1a\x6d\x23\xfe\x8e\xc2\xbc\x80\x0b\x41\x3a\x93\xe2\x53\
\x71\x45\x4a\x2b\xd7\x30\x6a\xb9\xac\x6c\xa0\x70\x5c\x9c\x20\xc7\
\x5b\x5b\x5c\xcb\x54\x17\xcf\xe2\x48\xc7\x47\x29\x29\xf8\x70\x28\
\x1e\x10\x56\x8f\x9f\xe2\x58\xa7\x2d\xd4\x92\x23\xe4\x31\xba\xb9\
\x2c\xb7\x55\x11\x97\x3d\x9a\x34\x9d\x9d\x0e\x25\xe5\x1a\x48\xc9\
\xfe\xa3\x07\x5c\x9b\x99\x51\x32\x9f\xa6\xf6\x0f\x52\x54\x4c\x7f\
\xc8\x02\x29\x19\x5f\x52\xea\x58\xe7\x6c\xdf\x09\xdb\x06\x38\x8e\
\x27\xcb\x4d\xa5\x72\x98\x56\x9d\x64\x6d\xd4\x73\x16\x8b\x08\x01\
\x8a\x62\x5f\x82\xf8\x2e\xdc\xf4\x4f\x69\xb9\x06\x8a\xc2\xa1\x13\
\xcd\xae\x1c\xf4\xb9\xaa\xe5\x58\xd2\x4c\xa9\x17\xec\x34\x40\x51\
\x58\x38\x6b\x81\xe3\x38\x4e\x06\x9b\xe2\x04\x54\x04\xc2\x61\xb5\
\x8b\xa6\xe6\xa1\xc8\xe4\x16\x9d\xfb\x37\x12\x69\x93\x41\x6d\x48\
\x32\x05\x47\x62\xa1\x9c\xd7\x27\x98\x32\xcd\xc7\xc1\xa3\xcd\x1c\
\x3d\xdb\x9c\xd1\x61\x29\x25\x13\x0a\xab\xa9\x2c\x9c\x9a\x82\xf0\
\xf3\xad\x31\x2c\xcb\xa2\xae\xf6\x73\x19\x03\x6e\x09\x1e\xe6\x7c\
\xe8\x54\xdc\xad\x56\x04\xb1\x68\x66\x02\x72\x42\x41\x15\xba\x15\
\x45\x37\x74\x4e\x9f\xee\xa3\x37\xe8\x9c\x13\x93\x96\xcc\xe9\x42\
\xb9\xac\x25\xc4\x2d\x1e\xb9\xa2\x55\x27\xe9\x04\x72\x98\x90\x33\
\x0d\x58\xb0\x28\x80\x47\xd5\xf8\xbb\xff\xf3\xb8\x2b\x47\xdd\x32\
\xf5\x2e\x4c\xcb\x44\x51\x05\xbd\x41\x83\xa3\xcd\xbd\x2c\xbd\x71\
\x11\xb5\xd3\x6b\x33\xea\xbc\xd6\xf8\x1b\x3c\x8a\x06\x12\xbc\x3e\
\x85\x48\x34\x13\xd9\xaa\xe2\xe1\x3f\xd7\x3d\xcb\xd4\xe8\xdd\xac\
\xb9\xfe\x67\xac\xff\xeb\x67\x1d\xfb\x37\x1c\xab\x4e\xb2\xb6\x21\
\x09\x24\xe6\x26\xdb\xeb\xb0\x3a\x5d\x38\xc7\x02\xf9\x85\x2a\x53\
\xa7\x17\xb0\x6d\xdf\x7b\x1c\x6d\x3d\x4a\xcd\xa4\x9a\x8c\x72\x77\
\xd4\xac\xa4\xab\xff\x2c\xbf\xdd\xf9\x02\x3b\xdf\xe8\xe3\xcb\x75\
\xb7\xf3\xcc\x7f\xf8\x5f\x19\xe5\x4e\x74\x1f\xa2\xa9\x73\x9f\xbd\
\xce\xb7\xb8\xd4\x43\x4f\xb7\x41\xd3\xe9\x26\x66\x4d\x99\x95\x32\
\xce\x69\xa5\xb3\x59\xf7\xb5\xd9\x00\x9c\xea\x69\x1a\x24\xa0\x1f\
\xfe\x94\x92\x32\x98\x77\x95\x2b\x09\x89\x13\x45\x5c\xdc\x56\xe0\
\x70\xd9\x8b\x12\x6e\x28\x04\x09\x3f\xfb\xed\x53\xae\xb6\xe4\xfe\
\xf9\xff\x89\x8d\x5f\x7b\x9d\xc3\xcf\xed\x64\xf3\x0f\x7f\x8d\x4f\
\xf3\x65\x38\x0c\x6f\x1c\x7d\x21\xe5\x7d\x53\x6b\xf2\x40\xc2\x3f\
\xed\x7c\x6d\x50\x5b\x29\x50\x1c\xfb\x97\x58\x75\x32\xe2\xeb\xb2\
\x46\xc3\xe5\x4d\x86\xf2\x4a\x8d\x1b\x6e\x19\xc7\x2b\xdb\xff\xcc\
\xaf\xfe\xb4\x29\x13\x61\x17\xbe\x57\x8f\xaf\x62\x52\x49\x55\xc6\
\x80\x04\x82\x57\x0e\xfd\x82\x03\x6d\xef\xa6\xdc\x9f\x54\xed\xe3\
\xc6\xc5\xe3\x78\xea\xa5\x67\xf8\xf5\x96\xe7\x1c\x6d\xd4\xbb\x2d\
\x7f\xe0\xf9\x0f\x1f\xcd\xc6\x51\xcc\x99\xdb\x9b\x55\xfa\x3d\x97\
\xfb\x43\x86\xec\x0e\x9a\x92\x9a\xb9\x7e\x06\x06\x8a\xf9\x2f\x9b\
\xfe\x3b\x05\xbe\x00\xff\xee\xb6\xfb\x5d\x83\xba\x64\xc4\x1a\x96\
\xce\x96\xe6\x7f\x60\xdb\xf1\xdf\xc5\x6d\x47\xca\x98\x24\x33\x66\
\xfb\x99\x3e\xd3\xcf\x96\x8f\x7f\xc1\x9f\x36\x6e\xe4\xf3\x73\x6f\
\xa4\xbc\xa8\x9c\x96\xee\xc3\x9c\xed\x6b\xc1\xa3\x78\xf0\x28\x5e\
\x67\xd5\x34\x4c\x33\x86\x23\x1e\x87\x88\xcb\xb0\x39\x52\xc2\xdc\
\xeb\xf3\x99\x5c\xed\xe3\xd1\x17\x1f\xe7\xb7\x6f\xfc\x0b\xcf\xae\
\x7b\x8a\x89\xa5\x93\x5c\xeb\xed\x6f\x7d\x9b\xd7\x9b\x9f\xe5\x6c\
\xa8\x25\x83\x18\xc9\xed\x0a\x05\xa6\x54\x15\x02\x92\x93\xe1\x83\
\x9c\x0c\x4b\x04\xf1\x39\xf7\xc1\x6c\x84\x35\x42\xab\x4e\x94\x6c\
\xf3\x2d\x23\xb9\x3f\x24\x81\xc0\xe2\x32\x0f\x7f\xfd\xb5\x32\x2a\
\xae\x3b\xc3\xc2\x87\x3f\xcf\xb2\x75\x77\xd3\xda\x75\xd6\xb1\xfc\
\xef\x0e\x3e\xc1\xb9\xf0\xc9\x21\xec\xa0\x12\x17\xd6\x61\xa9\x17\
\xed\xdb\x10\x82\xdb\x2b\xc5\x99\x5b\xc4\x7e\xc9\x19\x43\xd3\x34\
\x87\x1c\xfe\xa7\xa4\xbf\x0d\x23\x65\xbf\xbd\x69\x19\x08\x4b\xb8\
\xe7\x8b\x64\x6a\xe6\xc8\x30\xa1\xb0\x04\x66\xce\x2e\x66\xf7\x9e\
\x8f\x78\x63\xef\x36\xbe\x79\x47\xe6\x89\x77\x9f\xa9\x58\xc4\xfb\
\xa7\x5e\xbf\x44\xe6\x49\x66\x9d\xaa\x92\x69\x37\x23\xfd\x11\xfa\
\xfb\x8d\xdc\x79\x53\x8a\x92\xbd\xca\xb2\x2c\x0b\x5d\xd7\x09\x87\
\xc3\x57\x2c\xa6\x51\xdd\x40\x58\xa9\xf3\x13\xf2\x32\xdc\xc7\x92\
\xf1\x60\x18\x06\xef\x35\xec\xe5\x9b\x77\xac\x8c\xa7\xde\x93\x14\
\xe2\x8a\x4f\x7f\x93\xe6\x8e\x0f\xe8\x8b\x06\x31\xa4\x8e\x31\xc8\
\x2a\x95\x6c\x74\x6b\xfa\x84\x56\xb4\xdf\xc0\x30\x72\x47\x10\xc3\
\x30\x18\x18\x18\xc8\x38\xb5\xcf\x51\xbe\x63\xb1\x18\xb1\x58\x2c\
\x67\x36\x24\xd9\xed\x4d\xec\xeb\x1b\xea\x35\xb1\xca\x87\xd7\xab\
\xf0\xcf\x17\xdc\xd6\x74\x8e\x1e\x5f\x30\x85\xbf\xbb\xf5\xb7\xfc\
\xd7\x25\xbf\xe3\xce\x4f\xaf\xc6\xb0\x62\x43\x7b\x47\xc6\x2a\x7d\
\x71\x31\x1e\x53\xa0\xaf\x2d\x77\xc4\x18\x72\x1c\x62\x9a\x66\x4e\
\xf7\x87\xe4\x2a\x3f\x79\xcb\xed\xc5\xb4\x77\xf5\xf1\xf7\x2f\x6c\
\x70\xd4\xf9\x5e\x4f\x1e\x25\xf9\x15\x14\xfa\x8a\x73\xb6\x8d\x59\
\x51\x05\x67\x0f\x45\x31\x42\x66\x4e\xe3\x10\xb7\x88\x5d\x19\x2e\
\x63\x6e\x1b\xaf\x1c\x05\xb7\x96\x25\x99\x54\xed\xa3\xf6\xb3\x25\
\xfc\x8f\xcd\x9b\x78\x6b\xff\xf6\xac\xed\xd8\x15\x31\x82\x80\xc3\
\x2f\xf6\xa2\x78\x73\x8b\x97\xcb\xf2\xb2\xae\x26\x48\x48\xc4\xe7\
\x97\x14\x31\x77\x41\x80\x6f\x3c\xb6\x9a\x97\x77\xbd\x44\x7b\xef\
\x79\x42\x03\xbd\xe8\x66\x94\xa8\x11\x21\xa2\x87\xe9\x8f\xf5\x72\
\xa5\x9c\x90\x60\xa6\x13\x6f\xf7\x13\x0b\x9a\x23\x36\x4e\x8f\x7b\
\xfe\x49\xe4\x26\x77\x93\x63\x0e\xb6\x2c\x58\x78\x4b\x80\x79\x0b\
\x0b\xf8\x7d\xf3\x63\xfc\xf9\xf4\x46\x4a\x03\x25\xf8\x7d\x7e\x4c\
\xcb\x24\x66\x46\x08\xc7\x7a\xd1\x14\xef\x15\xbd\x47\xf3\x2b\x9c\
\xd8\xd5\xcf\x91\x7f\x09\x0d\x4b\x0e\xcb\x0d\xbf\x1e\xae\x41\x90\
\x16\x68\x5e\x85\x12\x6f\x1e\x10\x25\x18\x6b\x23\x18\xbb\xcc\x44\
\xe0\x85\x55\x49\x52\x82\x11\x95\x98\x31\x0b\x23\x22\x39\xb6\xbd\
\x9f\x33\x3b\x22\xa8\x9a\x18\x56\x89\x77\x3a\xb7\x77\x78\x25\xe4\
\x6a\x52\x7b\x0a\x08\x35\x6e\xa8\x0d\x43\xd2\x73\x46\xa7\xb7\xc5\
\xa0\xe7\xb4\x4e\xff\x19\x13\xbd\x5f\xa2\xf7\x5b\xc4\xfa\x2c\xa4\
\x89\x4d\x8c\xe1\xc2\x43\x62\x0d\xf5\x35\x2f\x21\xd9\xc6\x13\x66\
\x4c\xda\x57\x34\x64\x11\x3c\x65\xd0\x73\x4c\xa7\xfb\x63\x9d\xc8\
\x19\x23\xce\x74\x4a\x9c\x50\xe9\xd6\x54\x0c\xb3\x75\x1d\x35\x09\
\x91\x62\xf8\xce\x98\xb4\x39\x5e\x11\x98\x52\xd2\x7b\xd6\xa0\xaf\
\xc5\xa0\xe7\x8c\x41\xf8\xb4\x41\x2c\x2c\x31\xc2\x16\xb1\x7e\x89\
\x19\x89\xaf\xe1\x4a\xa8\x28\x4f\xde\xe8\xfb\x33\x59\xdb\x90\x5c\
\x8a\x68\x7e\x97\x42\xa4\xd8\xe2\xb2\xd7\x9e\x27\x73\xbc\x2e\xb1\
\x62\x12\x53\x97\xc4\xfa\x25\xc1\xd3\x3a\xbd\xc7\x0c\xba\x3e\xd6\
\xe9\x3f\x69\xc6\xc3\x39\x9b\xe3\x45\x4a\xf4\xa8\x5c\x65\xba\xc0\
\x8d\xe9\x3d\x43\xa5\xe0\x50\x61\xdc\x79\x15\x8f\x2e\xe8\xad\x30\
\x11\x52\x64\xc5\xf1\x42\x11\x48\x24\xbd\xe7\x4d\x42\x1f\x27\x38\
\xde\x24\xd6\x67\xd9\x1c\x6f\xf4\x4b\x7b\xa9\x95\x10\xe0\xf1\x89\
\x1c\xe4\x9a\x47\x1f\x5c\x93\x8b\xb9\x5b\x75\x02\x05\xdd\xf1\xb3\
\xdc\x43\x65\x16\xc9\x7b\x2a\x2d\x23\xae\xdf\x2d\x5d\xa2\x0f\x48\
\x7a\x5a\x8d\x38\xc7\x9f\xd4\x09\xb7\x98\x60\xe2\xaa\xe3\x15\x95\
\x6b\x1e\xb2\x5e\x28\xe7\x66\x70\xae\x84\x28\x85\xdd\x2a\xaa\x2e\
\x68\xff\x4b\x8c\x70\x9b\x49\xf8\x94\x49\xb4\xcf\x42\x0f\x49\xf4\
\xb0\x85\x1e\x96\x71\x5b\xa3\xc4\x39\x5e\xf5\x88\x4f\xa2\xcb\x91\
\xa1\xb2\xb2\xf6\xb2\x72\xed\xea\x49\x01\xfe\x90\xca\xa1\x5f\x87\
\x3e\xb1\x1c\x9f\x0b\xc3\x3e\xb2\x71\x88\x00\x45\x13\x8c\x81\x7b\
\xa4\xae\x8c\xa1\xe6\x1a\x30\xea\x39\xb7\x21\x63\x70\x65\x6e\xaf\
\xc8\x72\x8b\x17\x80\xaa\xaa\x68\x9a\x86\x65\x59\xf6\x74\x6f\x2e\
\x37\x5a\x5e\x6d\x5e\x50\x2e\xeb\x0d\x29\xb9\xe8\xa6\xe3\x2c\xcb\
\x62\x60\x60\x80\x3b\xef\xbc\x93\x3b\xef\xbc\x93\xf1\xe3\xc7\x13\
\x08\x04\x88\xc5\x62\x04\x83\x41\x76\xef\xde\xcd\xee\xdd\xbb\xd1\
\x34\x2d\xc5\x8b\x70\xfa\x9b\xed\xb3\xa1\x96\x4b\xf4\x33\x19\x41\
\x6e\xe5\xae\xf4\x5d\x4e\x7f\x13\x53\xe0\xb1\x58\xcc\x95\x39\xdd\
\xee\x7b\xb2\x49\x78\x25\xc3\x67\x3f\xfb\x59\x7e\xf9\xcb\x5f\x52\
\x96\x76\x26\x49\x22\x76\xb9\xf5\xd6\x5b\xe9\xed\xed\xe5\xe9\xa7\
\x9f\xe6\xe8\xd1\xa3\x19\x03\x4f\xb4\x9d\x3e\x5b\x96\x3e\xd8\xc1\
\xbe\x67\x5b\x6e\xb0\xff\xdd\xbe\x5b\x96\x95\x72\x88\xdb\x60\xfd\
\x75\xeb\x97\xc7\xe3\xc1\xeb\xf5\xe2\xf7\xfb\xe9\xef\xef\xb7\x67\
\x60\xb3\x91\x24\xe5\x52\xfa\x2d\x71\x19\x86\xc1\x8f\x7e\xf4\x23\
\x5e\x7a\xe9\x25\x9b\x18\xe9\xcb\x45\x13\x10\x08\x04\x58\xb7\x6e\
\x1d\x5f\xff\xfa\xd7\x5d\x17\x06\xe4\xe7\xe7\x53\x57\x57\x47\xe2\
\x07\x2d\xaf\xc6\x20\x2d\x5d\x33\xdc\x7a\xeb\xad\x8c\x1f\x3f\x3e\
\x2b\x35\x24\xa5\x44\x51\x14\x0a\x0a\x0a\xc8\xcb\xcb\x73\x9d\xcb\
\x1f\xb2\x97\x95\xa8\xf4\xfd\xef\x7f\x9f\x55\xab\x56\xd9\x1d\xaf\
\xaf\xaf\x67\xf5\xea\xd5\x6c\xdd\xba\xd5\x2e\x6b\x18\x06\x3f\xff\
\xf9\xcf\x59\xb3\x66\x0d\x07\x0e\x1c\xe0\x8e\x3b\xee\xe0\x9e\x7b\
\xee\x71\x1c\xec\x8a\x15\x2b\xf8\xce\x77\xbe\xc3\x4d\x37\xdd\x34\
\xea\x84\x50\x14\x85\x69\xd3\xa6\x0d\x5a\x6e\xee\xdc\xb9\x6c\xd8\
\xb0\x81\x87\x1e\x7a\x68\xc8\xf6\xc1\xe7\xf3\xe1\xf3\xf9\x2e\xcf\
\xcb\xba\x78\x02\xb4\xb0\x7f\x20\x6c\xce\x9c\x39\x7c\xef\x7b\xdf\
\x4b\x79\xd1\xb9\x73\xe7\xf0\xf9\x7c\x4c\x99\x32\xc5\xae\xf7\xf4\
\xd3\x4f\xd3\xd8\xd8\xc8\xda\xb5\x6b\x99\x37\x2f\x7e\x32\xdc\x3d\
\xf7\xdc\xc3\xfe\xfd\xfb\x69\x69\x69\x49\xe9\xe4\xce\x9d\x3b\x89\
\x46\xa3\x1c\x3c\x78\x70\x54\x09\x12\x8d\x46\x79\xe8\xa1\x87\xb8\
\xf9\xe6\x9b\x79\xe0\x81\x07\x5c\xcb\x1d\x3f\x7e\x9c\x27\x9f\x7c\
\x92\x03\x07\x0e\x5c\x56\xe0\xe7\xf3\xf9\x30\x0c\xc3\x79\x01\xfa\
\x50\x24\xc4\x30\x0c\x36\x6d\xda\x94\x71\xff\xdc\xb9\x73\x48\x29\
\xa9\xac\xac\x44\x4a\xc9\x0b\x2f\xbc\x40\x53\x53\x13\xab\x57\xaf\
\x66\xde\xbc\x79\x29\x52\xb1\x76\xed\xda\x94\x05\x77\x7e\xbf\x9f\
\x50\x28\xc4\x5b\x6f\xbd\x45\x38\x1c\x76\x7c\xaf\xa6\x69\x68\x9a\
\x86\x94\x92\xbc\xbc\x3c\x02\x81\x00\xf9\xf9\xf9\x59\x9e\xb1\x1b\
\x7f\x47\x20\x10\xa0\xa0\xa0\xc0\x9d\x1b\x3d\x1e\xaa\xab\xab\x59\
\xb9\x72\x25\xcd\xcd\xcd\xe4\xe7\xe7\xe3\xf7\x67\xee\x91\xf7\xf9\
\x7c\x78\xbd\x5e\xb6\x6c\xd9\xc2\xf1\xe3\xc7\x07\x95\x36\x55\x55\
\x29\x2c\x2c\x64\xdc\xb8\x71\x14\x16\x16\xda\x2a\x59\x08\xe1\xd8\
\x76\x56\x46\x3d\xf9\x5a\xb4\x68\x11\x13\x26\x4c\xc8\x48\x38\xb6\
\xb5\xb5\x21\x84\xa0\xa2\xa2\x82\x17\x5f\x7c\x91\x37\xdf\x7c\x93\
\x07\x1e\x78\x80\xba\xba\xba\x0c\x5d\x5a\x52\x52\xc2\xdc\xb9\x73\
\x69\x6e\x6e\xa6\xa4\xa4\x84\xa7\x9e\x7a\xca\x26\xc4\xb7\xbf\xfd\
\x6d\x47\x3b\x72\xef\xbd\xf7\xe2\xf1\x78\xe8\xea\xea\xe2\xab\x5f\
\xfd\x2a\x7e\xbf\x9f\x48\x24\xc2\x9b\x6f\xbe\xc9\xb3\xcf\x3e\x3b\
\x28\x92\x1f\x7e\xf8\x61\x6e\xb9\xe5\x16\x2c\xcb\xc2\xe3\xf1\xb0\
\x67\xcf\x1e\x1e\x7d\x34\x75\x55\x7b\x2c\x16\xe3\xc1\x07\x1f\xe4\
\xde\x7b\xef\xb5\x55\xe8\xb2\x65\xcb\xa8\xaf\xaf\xe7\x87\x3f\xfc\
\xa1\x5d\x2e\x12\x89\xb0\x71\xe3\x46\x6a\x6a\x6a\x88\x46\xa3\xfc\
\xf8\xc7\x3f\x66\xff\xfe\xfd\x19\xef\xd5\x75\x9d\xeb\xae\xbb\x8e\
\x47\x1e\x79\x84\xda\xda\x5a\x2c\xcb\x42\x55\x55\x36\x6c\xd8\xc0\
\x1f\xff\xf8\x47\x9b\x58\xaa\xaa\xda\x8e\xc3\x65\x49\x48\x42\xc7\
\x27\x57\x94\x52\xd2\xda\xda\x4a\x75\x75\x35\x3b\x76\xec\x60\xfb\
\xf6\xed\xdc\x77\xdf\x7d\x2c\x5e\xbc\x38\x73\x7b\xc0\x85\x7a\xb3\
\x67\xc7\x37\xc4\x74\x77\x77\xb3\x6a\xd5\x2a\xf2\xf3\xf3\xe9\xec\
\xec\x74\xfd\xbd\xdd\xea\xea\x6a\x96\x2c\x59\xc2\xf2\xe5\xcb\x79\
\xf0\xc1\x07\x59\xbe\x7c\x39\x67\xce\x9c\xe1\xae\xbb\xee\xc2\xe3\
\xf1\xb8\x7a\x2d\x6b\xd7\xae\xe5\xb6\xdb\x6e\x63\xe3\xc6\x8d\x7c\
\xe5\x2b\x5f\x61\xf3\xe6\xcd\xd4\xd5\xd5\xb1\x64\x49\xea\xd1\xb3\
\x5e\xaf\x97\x4d\x9b\x36\xb1\x79\xf3\x66\x00\xd6\xad\x5b\xc7\xd2\
\xa5\x4b\x59\xb7\x6e\x5d\x86\xa4\x3d\xfc\xf0\xc3\xd4\xd7\xd7\x53\
\x5c\x5c\xcc\xd9\xb3\xce\x6b\x8b\x67\xcf\x9e\xcd\xf3\xcf\x3f\x4f\
\x51\x51\x11\x2b\x56\xac\xa0\xb6\xb6\x96\x65\xcb\x96\xd9\x2a\x2e\
\x81\x87\x4b\x39\x31\x97\xf4\xb2\x26\x4c\x98\x90\xf1\x3c\x18\x0c\
\xd2\xd7\xd7\xc7\xc0\xc0\x00\x9b\x37\x6f\x66\xd9\xb2\x65\xdc\x75\
\xd7\x5d\x83\xfa\xd7\x25\x25\x25\x36\xd2\x4a\x4b\x4b\x51\x55\x95\
\x8f\x3e\xfa\xc8\xb5\x83\x93\x26\x4d\xa2\xa0\xa0\x80\xf5\xeb\xd7\
\x13\x0e\x87\xf1\x7a\xbd\xec\xde\xbd\x1b\x80\xb2\xb2\xb2\x0c\x82\
\x18\x86\xc1\x82\x05\x0b\x58\xbc\x78\x31\x7f\xf8\xc3\x1f\xd8\xb2\
\x65\x0b\x3e\x9f\xcf\x56\x31\x45\x45\x45\x8e\x04\xac\xae\xae\x06\
\xe0\xe4\xc9\x93\x83\xaa\xa2\xa9\x53\xa7\x62\x9a\xa6\x63\x39\x29\
\x25\x3f\xf8\xc1\x0f\x6c\xf5\xdc\xd9\xd9\x49\x61\x61\x21\x5d\x5d\
\x5d\x9c\x38\x71\x22\x15\xe1\x8a\x32\x74\x2f\x2b\xb9\xe0\xb8\x71\
\xe3\x32\x9e\x1f\x3a\x74\x28\xbe\xd1\xb2\xae\x8e\x68\x34\x3a\xe8\
\x82\xec\x04\x24\x74\xb9\x69\x9a\xd4\xd6\xc6\xf7\x03\x1e\x3c\x78\
\x30\x83\x20\x09\x44\x4f\x9c\x38\x91\x96\x96\x16\x4e\x9c\x38\x61\
\x3b\x17\x55\x55\x55\x00\xb4\xb7\xb7\x67\x1e\x32\xa3\xeb\xb6\xe3\
\xf1\xcc\x33\xcf\x50\x58\x58\x48\x5e\x5e\x1e\xb7\xdf\x7e\x3b\x96\
\x65\xd1\xd8\xd8\xe8\x38\xce\xaa\xaa\x2a\x2c\xcb\xe2\xd4\xa9\x53\
\x83\x06\x71\xd3\xa6\x4d\xe3\xd0\xa1\x43\x19\x63\x95\x52\x52\x54\
\x54\xc4\x8d\x37\xde\xc8\x87\x1f\x7e\x48\x73\x73\xb3\x6d\xe7\x14\
\x45\xc9\xb0\x79\xc9\xef\xb8\xac\x19\xc3\x60\x30\x98\x71\xbf\xb1\
\xb1\x11\x55\x55\x59\xba\x74\x29\x07\x0e\x1c\x60\xdb\xb6\x6d\x2c\
\x5d\xba\x94\xf2\xf2\x72\xd7\xb6\x42\xa1\x90\x4d\x90\x84\x07\x96\
\xdc\xf9\x74\x6e\x04\xd8\xb1\x63\x07\x9a\xa6\xd9\xf5\xae\xbf\xfe\
\x7a\x82\xc1\x60\x4a\xf0\x96\x80\xc9\x93\x27\x53\x51\x51\x41\x24\
\x12\xe1\x27\x3f\xf9\x09\x81\x40\x80\xea\xea\x6a\xda\xda\xda\xd8\
\xb0\x61\x03\x0d\x0d\x0d\x78\x3c\x9e\x8c\xd8\x62\xfa\xf4\xe9\x34\
\x36\x36\xa2\xeb\xba\xfd\xae\xf4\x32\x09\x29\xaa\xaf\xaf\xcf\x60\
\x20\xc3\x30\x58\xbe\x7c\x39\x00\x2f\xbd\xf4\x12\x89\xdf\xa5\x1f\
\x4c\xda\x06\x4b\x2d\x79\x2e\x15\x24\x39\xe9\xcc\xc3\x87\x0f\x53\
\x59\x59\x89\xaa\xaa\x2c\x5e\xbc\x98\xe6\xe6\x66\xde\x7e\xfb\x6d\
\xee\xbb\xef\x3e\xd7\x17\x76\x74\x74\xd8\x03\xbc\xee\xba\xeb\xe8\
\xea\xea\xc2\x34\xcd\x0c\xc4\x26\x10\x0f\xa4\xa8\xb4\xd2\xd2\x52\
\xc6\x8d\x1b\x47\x43\x43\x83\x63\x36\x21\xc1\x0c\x67\xce\x9c\x61\
\xd7\xae\x5d\x74\x74\x74\xd0\xd0\xd0\x40\x30\x18\xc4\xe3\xf1\x64\
\x10\x43\x4a\xc9\xc4\x89\xf1\x33\x82\xf7\xec\xd9\x93\xf1\x3c\xb9\
\x3f\x8b\x16\x2d\x02\x60\xf7\xee\xdd\x19\x04\x31\x4d\x93\x19\x33\
\x66\x00\x70\xe2\xc4\x89\x4b\x7a\x81\xc9\x7b\x1b\x2f\x2b\x52\xaf\
\xaf\xaf\x4f\x69\x28\x12\x89\x10\x0e\x87\xed\xc1\xdc\x74\xd3\x4d\
\x04\x02\x01\xde\x79\xe7\x1d\x0c\xc3\x70\xdc\xbb\x97\x20\x62\x42\
\xff\xe7\xe7\xe7\xd3\xda\xda\xea\x8a\x80\xf9\xf3\xe7\x03\xa4\xa8\
\x91\xc9\x93\x27\xdb\x08\x1f\x6c\xa0\x47\x8e\x1c\xe1\xb5\xd7\x5e\
\xe3\xbd\xf7\xde\x23\x1c\x0e\x3b\x72\x7d\xfa\x7b\xf6\xec\xd9\xe3\
\x6a\xcb\x0c\xc3\xb0\x1d\x9b\xa6\xa6\xa6\x8c\x72\x42\x08\xa2\xd1\
\xa8\xad\x36\x5d\x4f\xbe\x4b\x4b\xcd\xb8\xd9\x5b\xe5\x52\xe9\xf7\
\x7d\xfb\xf6\x71\xe4\xc8\x11\xfb\x5e\xc2\xdd\x4d\x36\xf6\xab\x56\
\xad\x22\x18\x0c\xf2\xfb\xdf\xff\xde\xb1\xad\xd3\xa7\x4f\xd3\xd4\
\xd4\x64\x1b\x6b\x37\xc9\x4b\xc0\x9c\x39\x73\x6c\x82\x25\xde\x9b\
\x08\x40\xdd\x08\x72\xfe\xfc\x79\x5b\x05\x79\xbd\x5e\x54\x35\x7e\
\xba\x50\x79\x79\x39\x85\x85\x85\x99\x87\xd6\x98\x26\x73\xe7\xce\
\x4d\x41\xb4\xdb\x8f\x34\x2f\x58\xb0\x80\xd6\xd6\x56\xc7\xd5\xfc\
\xaa\xaa\xda\xc1\xed\x97\xbe\xf4\x25\x3b\x6f\xa5\x69\x1a\xaa\xaa\
\xa6\x48\x44\xe2\xd9\x60\x46\xfd\x92\x33\x86\x7e\xbf\x9f\xef\x7e\
\xf7\xbb\x6c\xdd\xba\x15\x55\x55\x39\x75\xea\x14\x91\x48\x84\x8a\
\x8a\x0a\xbb\xc3\xb5\xb5\xb5\x4c\x9f\x3e\x9d\xad\x5b\xb7\xb2\x64\
\xc9\x12\x2a\x2b\x2b\x53\x74\xf0\x13\x4f\x3c\x61\xeb\xd6\x04\x62\
\xdd\x08\x32\x69\xd2\x24\x54\x55\xa5\xad\x2d\xf5\x54\xa0\x84\x41\
\x77\x23\x48\x7b\x7b\x3b\xcf\x3d\xf7\x1c\xdf\xfa\xd6\xb7\x78\xfc\
\xf1\xc7\x39\x73\xe6\x0c\xe5\xe5\xe5\xcc\x9e\x3d\x9b\x35\x6b\xd6\
\x64\x72\xa2\xa2\xd8\x1e\xd8\x4f\x7f\xfa\x53\x0c\xc3\xe0\xf5\xd7\
\x5f\x67\xe7\xce\x9d\x29\xea\x6b\xca\x94\x29\xf8\x7c\x3e\x4e\x9f\
\x3e\xed\x1a\xf7\xbc\xf1\xc6\x1b\x7c\xe3\x1b\xdf\x60\xcd\x9a\x35\
\xcc\x98\x31\x83\x48\x24\xc2\xc2\x85\x0b\x59\xbf\x7e\x3d\xbb\x76\
\xed\xb2\xdd\x74\x5d\xd7\x6d\x95\xe6\x46\x10\x15\xb8\x41\xd3\xb4\
\xea\xbc\xbc\xbc\xda\x44\x47\xa7\x4d\x9b\xc6\xc0\xc0\x00\x3d\x3d\
\xf1\xdf\xb1\x0d\x87\xc3\x74\x76\x76\x52\x57\x57\x47\x69\x69\x29\
\x5f\xf8\xc2\x17\x98\x35\x6b\x96\x9d\x62\x07\x58\xb8\x70\x21\x75\
\x75\x75\x04\x02\x01\x3b\x6f\x23\xa5\xe4\xf9\xe7\x9f\xa7\xb1\xb1\
\xd1\x2e\x57\x54\x54\x44\x5f\x5f\x1f\xf5\xf5\xf5\xf4\xf6\xf6\x66\
\x74\x28\x10\x08\x20\xa5\xa4\xbe\xbe\x3e\x05\xf9\xa5\xa5\xa5\x74\
\x74\x74\xf0\xee\xbb\xef\xda\x2a\x22\x9d\x53\x0f\x1d\x3a\xc4\xbe\
\x7d\xfb\x28\x2b\x2b\x23\x10\x08\xd0\xd0\xd0\xc0\x63\x8f\x3d\x46\
\x5f\x5f\x5f\x66\x00\xa6\x28\x34\x36\x36\xd2\xd7\xd7\x47\x55\x55\
\x15\x67\xcf\x9e\x65\xfb\xf6\xed\x0c\x0c\x0c\xa4\x94\x4d\xb8\xcb\
\x3b\x76\xec\xa0\xa5\xa5\xc5\xd5\xf9\x79\xe5\x95\x57\x08\x87\xc3\
\x4c\x98\x30\x81\x68\x34\xca\xaf\x7e\xf5\x2b\xf6\xee\xdd\x6b\x4b\
\x89\xae\xeb\x29\x52\xaa\x69\x1a\xa5\xa5\xa5\x04\x83\x41\xba\xbb\
\xbb\x2f\xb6\x05\x3c\xe8\xf7\xfb\xbf\x50\x5c\x5c\xbc\x32\x41\xf1\
\x2f\x7e\xf1\x8b\x04\x83\xc1\x14\x9f\xdb\x30\x0c\xee\xbe\xfb\x6e\
\xd6\xaf\x5f\x9f\x75\xd2\xee\x37\xbf\xf9\x0d\x3b\x76\xec\xb0\x3b\
\x25\xa5\xc4\x30\x0c\x4c\xd3\x4c\x51\x11\xc9\x69\x6c\xd3\x34\x6d\
\x4e\x4a\xae\xa7\xeb\xba\x1d\x79\x3b\xa5\xc7\x93\xf7\x44\x26\x72\
\x46\x42\x08\x9b\xdb\xdd\xd2\xed\xb1\x58\xcc\xb6\x7d\x89\x54\x4d\
\x72\xfb\x96\x65\x11\x8b\xc5\x52\xfa\xe3\x96\x7e\x8f\xc5\x62\x29\
\xef\x4d\xd8\x1b\x5d\xd7\x6d\x07\x26\x39\xdb\x5d\x53\x53\xc3\xf1\
\xe3\xc7\x53\xd2\x31\x9e\x6c\x53\xd2\x1e\x8f\x87\x57\x5f\x7d\x95\
\xfa\xfa\x7a\x36\x6e\xdc\xc8\xf4\xe9\xd3\x1d\x0d\xa1\x65\x59\xb4\
\xb6\xb6\xf2\xe4\x93\x4f\xd2\xd9\xd9\x99\x51\x26\x91\x3e\x70\xdd\
\xb0\xa2\x28\x78\xbd\x99\x07\x95\xa5\x23\xd6\x35\xd2\x4d\xaa\x9f\
\xcd\xc9\x78\x9a\xa6\xa5\x44\xfe\x4e\x99\x06\x9f\xcf\x97\xd5\xae\
\xe2\x64\x97\x37\x79\xa2\x6a\x28\x27\xf4\x0d\x69\xc6\x50\x08\xc1\
\xd9\xb3\x67\xf9\xf2\x97\xbf\x4c\x6d\x6d\x2d\xb5\xb5\xb5\x4c\x9a\
\x34\x89\x8a\x8a\x0a\x42\xa1\x10\xed\xed\xed\x1c\x3b\x76\x8c\x63\
\xc7\x8e\x39\xba\x99\x83\xcd\xd4\x39\xfd\xbd\xd4\x0c\xa3\x5b\xbb\
\x43\xbd\x7f\xb9\xe5\x2e\x35\xbb\xe8\x34\x57\x74\xd9\x04\x71\x6b\
\x28\xc1\xe1\xcd\xcd\xcd\xb6\xe7\x94\xf3\x33\xb6\x3e\x01\x0b\x18\
\x2e\x85\x8f\x21\x4d\xe1\x66\xbb\x2e\x6b\x8c\x00\x57\x4e\xb8\x21\
\x67\x7b\xc7\x60\x78\xa5\x68\xc8\x71\xc8\x18\x8c\x2c\x51\xc6\x24\
\x64\x4c\x42\xc6\x60\x4c\x42\xae\x21\x4f\xcc\x55\x42\xc6\x8d\x1b\
\xc7\xac\x59\xb3\xc6\x30\x37\x4c\x90\x98\x39\x74\x54\x59\xe9\x01\
\x8f\x53\xae\x68\x0c\x72\x0b\x96\x65\x11\x8d\x46\x33\x16\x12\x7a\
\x00\x4b\x88\x8b\xfb\x64\x0d\xc3\x60\xdb\xb6\x6d\x63\x18\x1b\xa5\
\x58\xc4\x03\xe8\x40\x2c\x5d\x9c\xc6\x60\x94\x54\x19\x10\x16\x42\
\x0c\x8c\x79\x54\xa3\x0f\x52\xc6\x77\x16\x2f\x05\x26\x6a\x9a\x56\
\x21\xa5\xf4\x70\x2d\xee\x25\xbe\x76\xc0\x14\x42\xc4\x80\x5e\x45\
\x51\x3a\x54\x55\x3d\xa7\x69\x5a\x50\xd3\xb4\x88\xc7\xe3\x31\x00\
\xf9\xff\x01\xfa\x90\x4b\xa0\xc0\x4f\x7e\x35\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x3f\xd1\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x50\x00\x00\x00\x50\x08\x06\x00\x00\x00\x8e\x11\xf2\xad\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\
\x95\x2b\x0e\x1b\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd9\x03\x03\
\x0e\x36\x08\x21\xa9\x06\x34\x00\x00\x20\x00\x49\x44\x41\x54\x78\
\xda\x9c\xbc\x69\x98\x24\x57\x79\xe7\xfb\x3b\xb1\xe6\x9e\x59\x99\
\x59\x5b\xd7\xd2\x55\x5d\xbd\xaa\x17\x49\x2d\xb5\xf6\xd6\x0e\x12\
\x12\x18\x8c\x6d\x60\x7c\x19\x6c\x23\x1b\xcc\xb0\x08\xcf\x18\xdb\
\x63\x3f\x9e\xe5\xde\x67\xbc\x8c\xf1\x78\x7b\xbc\x8c\x3d\x36\xc6\
\xf6\x18\x06\x63\x1b\x0c\xd8\x80\x05\x5a\x11\x12\xdd\x48\xa8\x5b\
\xea\x7d\x51\x55\xd7\xbe\xe4\x16\xfb\x7a\xee\x87\xac\xcc\xae\x06\
\xc6\x73\x9f\x9b\x1f\xba\xb3\x9e\x38\x71\xe2\x8d\x13\x27\x22\xce\
\xef\xfd\xff\xdf\x14\xef\x7a\xd7\xbb\xa4\xe3\x38\xd4\x6a\x35\x5e\
\x7f\xfd\x75\x7c\xdf\x67\x61\x61\xc1\xfa\xe8\x47\x3f\x5a\x2c\x97\
\xcb\x84\x61\xc8\x8b\x2f\xbe\x88\xaa\xaa\x84\x61\x48\x18\x86\x74\
\x3a\x1d\xd6\xd7\xd7\xa9\xd5\x6a\x9c\x39\x73\x66\x75\x68\x68\xa8\
\xf8\xfe\xf7\xbf\x3f\x5b\xab\xd5\xf8\xf4\xa7\x3f\xcd\xf0\xf0\x30\
\x17\x2f\x5e\x64\x68\x68\x88\xf9\xf9\x79\x4c\xd3\xc4\x34\x4d\x4e\
\x9e\x3c\x19\x25\x49\x12\xfc\xe2\x2f\xfe\x62\x61\x78\x78\x98\x67\
\x9e\x79\x06\xc7\x71\xb8\x72\xe5\x0a\x83\x83\x83\x34\x1a\x0d\x86\
\x87\x87\xd9\xd8\xd8\x60\x6e\x6e\x0e\xdb\xb6\x3b\xb7\xdf\x7e\xbb\
\x78\xc3\x1b\xde\x50\xac\xd7\xeb\x3c\xf3\xcc\x33\xac\xac\xac\x90\
\xa6\x29\x8e\xe3\x50\x2a\x95\xb8\x7c\xf9\x32\x51\x14\xe1\xba\x2e\
\xf3\xf3\xf3\xb3\xef\x79\xcf\x7b\xb6\x3f\xf0\xc0\x03\x7c\xe9\x4b\
\x5f\xe2\xca\x95\x2b\x14\x0a\x05\x82\x20\x20\x8a\x22\x66\x67\x67\
\xa9\x56\xab\x34\x9b\x4d\x2e\x5d\xba\x74\xee\xae\xbb\xee\xda\xf9\
\xb3\x3f\xfb\xb3\xca\x73\xcf\x3d\xc7\x4b\x2f\xbd\xc4\xf4\xf4\x34\
\x97\x2f\x5f\x46\xd3\x34\x6c\xdb\x66\x72\x72\x92\x73\xe7\xce\xb1\
\xb8\xb8\xb8\xb4\xb2\xb2\xb2\xfc\xeb\xbf\xfe\xeb\x37\x66\xb3\x59\
\x4e\x9c\x38\xc1\xfa\xfa\x3a\xb6\x6d\xa3\x0c\x0d\x0d\x91\xcb\xe5\
\x10\x42\x30\x39\x39\x89\xa6\x69\x08\x21\xc8\xe7\xf3\xfc\xc8\x8f\
\xfc\x08\x83\x83\x83\x54\xab\x55\xa6\xa6\xa6\xd8\xb1\x63\x07\xc5\
\x62\x91\x52\xa9\x84\xae\xeb\x8c\x8d\x8d\x01\x10\x86\x21\x0f\x3c\
\xf0\x00\xd7\x5f\x7f\x3d\xa6\x69\x32\x35\x35\xc5\xe0\xe0\x20\xdb\
\xb7\x6f\xa7\x5e\xaf\x63\x9a\x26\xfb\xf7\xef\x47\x08\x81\xef\xfb\
\xec\xdf\xbf\xbf\xbf\x6d\x6c\x6c\x8c\x52\xa9\xc4\xbe\x7d\xfb\xc8\
\xe7\xf3\x14\x0a\x05\x76\xee\xdc\x49\xa1\x50\x40\x55\x55\x56\x57\
\x57\xb9\xfd\xf6\xdb\x29\x95\x4a\xa4\x69\x4a\x26\x93\xa1\x54\x2a\
\x11\x04\x01\xba\xae\x23\x84\xa0\x5e\xaf\xa3\x69\x1a\x52\x4a\xf6\
\xee\xdd\xcb\x7d\xf7\xdd\x47\xb1\x58\x64\x70\x70\x90\xe9\xe9\x69\
\x06\x06\x06\x98\x9c\x9c\xc4\x30\x0c\x86\x87\x87\xc9\xe5\x72\x00\
\x68\x9a\xc6\xbe\x7d\xfb\x30\x0c\x83\x5c\x2e\xd7\xdf\x5e\x28\x14\
\x30\x0c\x83\x28\x8a\x28\x16\x8b\xa8\xaa\x8a\x10\x82\xc1\xc1\x41\
\x6e\xbe\xf9\x66\xc6\xc6\xc6\xd8\xbb\x77\x6f\x77\xdc\x0e\x1f\x3e\
\x2c\x01\xa4\x94\xfd\x00\x3b\x9d\x8e\x95\xcf\xe7\x8b\x99\x4c\x06\
\xd7\x75\x11\x42\x20\xa5\xa4\xf7\x91\x52\x22\x84\x40\x08\x41\xa7\
\xd3\x59\x1d\x1c\x1c\x2c\xa6\x69\x9a\x05\x88\xa2\xa8\xbf\x3d\x4d\
\xd3\xfe\xbe\x52\x4a\xe2\x38\x8e\xa2\x28\x0a\xf2\xf9\x7c\xa1\x58\
\x2c\x62\xdb\xf6\xf7\xf4\xbb\xf5\x7f\x4d\xd3\x3a\xed\x76\x5b\x94\
\x4a\xa5\x62\x92\x24\x28\x8a\x72\xcd\xf1\xb7\xf6\x5f\xad\x56\x79\
\xed\xb5\xd7\x66\xaf\xbb\xee\xba\xed\x8e\xe3\x90\xa6\x69\xff\x9c\
\x7a\x6d\x7a\xfb\xe8\xba\xce\xec\xec\xec\xb9\x3d\x7b\xf6\xec\x6c\
\xb7\xdb\x8a\xa6\x69\xd7\x1c\x77\xeb\x47\x4a\x89\xa2\x28\x4b\x6b\
\x6b\x6b\xcb\x43\x43\x43\x37\x9a\xa6\x89\xef\xfb\x08\x21\xba\x31\
\xa6\x89\x6c\x08\x01\x08\x50\x14\x05\xcf\xf3\x52\xcb\xb2\x2e\x67\
\xb3\x59\xdd\xb6\x6d\x04\x82\x34\x95\x80\x40\x08\x09\x9b\x6d\x7b\
\x07\x8b\xe3\x78\xa5\xd3\xe9\xec\xd1\x34\xad\x24\x10\x9b\x07\xdd\
\xfc\x47\x00\x9b\xfb\x6c\x1e\x30\xf4\x3c\x6f\x31\x93\xc9\x28\xed\
\x76\x1b\x05\xe5\xfb\xf6\xdd\x0b\xae\xd3\xe9\x34\x92\x24\xd1\xd3\
\x34\x2d\x0a\x04\x71\x9c\x76\x8f\x20\x05\x42\xa4\xdd\x38\xe8\xc6\
\xb1\xbe\xbe\x1e\x03\x8d\x76\xbb\x5d\x4d\xd3\x34\x12\x88\xee\x96\
\x14\xa4\x90\x88\xcd\xb6\x42\x08\x92\x24\x51\x84\x10\xad\x8d\x8d\
\x8d\xbf\x35\x0c\xc3\x90\x69\x4a\x9a\x00\xdd\x50\xba\x31\x28\xb2\
\x7b\x0a\x42\x08\xd7\x75\x97\xc2\x30\x2c\x48\x29\x35\xcf\x75\x13\
\xa4\x42\x2a\x53\x40\xa0\xed\xb9\x7b\xb4\x9a\xac\x99\xc8\x50\x41\
\x28\x82\x28\x8a\x08\xc3\x30\xbb\x6d\xdb\xb6\x7c\x1c\xc7\xc4\x15\
\x87\xc2\x64\x44\xe2\x81\x6c\xe6\x88\x1b\x3a\x32\x50\x51\x0c\x89\
\xa2\x28\x44\x51\xb4\x32\x33\x33\x53\x52\x55\x35\x1b\x11\x11\x4f\
\x34\xa9\xd6\x35\x22\x4f\x22\x5a\x05\xa2\x0d\x1d\x02\x15\xa1\x82\
\xeb\xba\xe1\xdc\xdc\x5c\x6e\x64\x64\xa4\xa4\x28\x0a\x6b\xe6\x3a\
\x3b\xae\xd3\x08\xda\x02\xd9\xca\x10\x37\x0d\x64\xa0\xa0\x18\xdd\
\x19\xdb\x6a\xb5\xbc\x34\x4d\xe3\xed\xdb\xb7\x17\x9d\xc8\xa3\x7c\
\xd8\x41\x20\x49\x5c\x05\xda\x39\xa2\x75\x1d\x22\x05\x54\x89\xa1\
\x1b\xac\xac\xac\xfc\xf3\xae\x5d\xbb\x8a\x32\x96\xac\x15\x97\x99\
\xdc\xab\x13\x78\x09\x58\x19\xd2\xa6\x49\xdc\xd2\x11\x0a\x18\x86\
\x4e\xa7\xd3\xb9\x65\x64\x64\xe4\xfa\x8c\x99\x31\x37\x42\x8b\xa1\
\xeb\x43\x14\x14\xe2\xa6\x4e\xda\xcc\x90\xb6\x55\x84\x01\x42\x01\
\xcb\xb2\x08\x82\xe0\x89\xd1\xd1\xd1\x83\x41\x18\x11\x8d\xb6\xa9\
\x8d\x2a\x44\x36\x68\xf9\xbb\x56\x21\x51\x69\x7f\xad\x4a\xb8\x6a\
\x20\x14\xd0\x34\x2d\xcd\x66\xb3\xd8\x9e\x8b\x32\x1d\x92\xbf\xa9\
\xc3\x9e\xea\x11\x4e\x2f\x1e\xc3\x4c\xf3\xd0\xc9\xd1\xfc\xe7\x01\
\x54\x55\x45\xd3\x34\xd1\xbb\x75\x6d\xcd\xc1\xd8\xb5\xc1\xd4\xae\
\x03\x8c\x57\xb7\xf3\xe4\xe9\xcf\x53\xd2\xca\xd8\x4f\xd7\x09\x16\
\x75\xa2\x28\x12\x9a\xa6\x29\xa6\x69\x22\x84\xc0\x38\xe0\x33\x70\
\xab\xc6\xb6\xdc\x1e\x2e\x2f\x9f\x45\xa4\x90\xce\xd6\xb0\x5e\xce\
\x23\x63\xc8\xe5\x72\x51\xa7\xd3\x11\x69\x9c\xb2\x21\x5b\x6c\x3b\
\xec\x30\x6a\xec\x43\xd3\x53\xe6\xd6\x2e\x10\x77\x34\xdc\x63\x55\
\xfc\x05\x0d\xd3\x30\x91\x52\x66\xd2\x34\x45\x06\x12\x6b\xef\x2a\
\xc5\x43\x75\x0e\x97\x8f\x70\x6e\xfd\x18\xad\xe6\x32\xb9\x76\x85\
\xc6\xd7\x4a\x28\x8a\xe8\x3d\xe7\x43\x53\xd5\xcd\x97\xc3\x59\x0e\
\xdf\x5f\x24\x8d\x15\x0a\x6a\x9e\xc5\x8d\xd7\xc9\xca\x2a\xd6\xf3\
\x65\xc2\x15\x83\x5c\x2e\x01\x28\x9b\xa6\x49\x33\x08\x98\xba\x1b\
\xcc\x41\x9b\x1d\x85\x9b\x50\x7e\x74\xdf\xaf\x70\xc3\x8e\x3b\x89\
\x65\x80\xe7\xf9\x04\x41\x80\x94\x12\xdf\xf7\xf1\x83\x80\x50\xba\
\xdc\x51\xf9\x71\xee\xdb\xfe\x4e\x7e\xe5\xc1\x2f\x71\xf3\xe4\x0f\
\x50\x1b\xeb\x3e\xc4\x83\x20\x20\x49\x12\xc2\x30\xc4\xf7\x7d\xa2\
\x24\xa4\xb1\x16\xf2\x8e\xeb\xfe\x1d\x39\x6f\x86\xf7\xdf\xfc\x9b\
\xbc\xe7\x9e\x9f\x21\x90\x6e\xbf\x7d\x9a\xa6\x5b\xf6\x8d\x79\x74\
\xfb\x87\xd9\xad\x3f\xcc\x8f\xdd\xf2\x4b\xdc\xbf\xe3\x31\x44\xd1\
\x27\x08\x7c\x7c\xdf\x27\x8e\x63\xa2\x38\xc2\x73\x5d\xa2\x24\x66\
\xbc\xb0\x9b\xa3\xa3\xef\xe4\x07\xa7\x7e\x81\x01\xef\x76\xee\xbf\
\xe1\xcd\xb8\x49\x1b\xdf\xf3\x89\xa2\x08\x00\x3f\xf0\x71\x5d\x17\
\x5d\xd3\x98\xd2\x6f\xe7\x81\xf1\x9f\xe0\x86\xf2\xdb\xf9\xaf\x6f\
\xf9\x12\xb2\xe0\xe1\xfb\x3e\x9e\xeb\x91\xa6\x69\x37\x6e\x2f\x20\
\x11\x09\xae\x1b\xf0\xa1\x83\x7f\xc2\x1b\x76\xbd\x9b\x8f\xde\xfe\
\xe7\x4c\x4c\x8f\x12\xe2\xe1\x7b\x3e\x61\x18\xf6\xc7\x24\x08\x03\
\xa4\xf0\xb9\x5d\x7b\x9c\x5b\x27\xde\x80\xa2\xc4\x3a\x6e\xe0\x10\
\x04\x11\xbe\xe7\xf5\x1b\x7b\x9e\xd7\x7d\xfd\x27\x01\xfb\x87\x6e\
\xe3\xb3\xaf\xfd\x2e\xf3\x4b\xeb\x1c\x1c\xb9\x15\x05\xa5\x3b\xc0\
\xbe\x4f\x92\x24\x44\x51\x84\xe7\x79\x84\x41\xc8\x80\x3e\xc1\xb2\
\x35\xcb\x8b\xb3\x4f\x70\xc7\xf4\x43\x78\x91\x43\x14\x84\xfd\xfe\
\x92\x24\xe9\xef\x6b\xaa\x79\x06\x72\x75\x9e\x5f\xfa\x1b\x6e\xa8\
\xbf\x81\x5a\x6e\x84\x92\x5e\xc7\xf3\xaf\x0e\x60\x12\x27\x78\xbe\
\x4f\x1c\x45\xe4\xf4\x02\x4a\xc6\xe5\x5b\xe7\xbe\x85\x48\x55\x6c\
\xbf\x49\x1c\x26\x78\x7e\x77\x60\x00\x02\x3f\xc0\x75\x3d\x34\x55\
\xc5\x69\x09\xbe\x7e\xfe\xf3\x28\x71\x9e\x6a\xa5\x40\x92\x24\x78\
\x9e\x87\xe7\x79\xf4\xee\x1a\xcf\x73\x51\x15\xc1\xc5\xb3\x16\x03\
\x43\x2a\x1b\xce\x32\xd7\x4f\xde\x4c\x35\x33\x46\x10\x76\xe3\x88\
\xa2\xa8\x7b\xe1\x37\xbf\xb7\x3a\x1e\x3f\xf5\xc8\x63\xfc\xd3\xf9\
\x4f\xa0\x24\x32\x25\xa3\x94\x88\xe2\x88\x20\x08\xfa\x03\xd8\x9b\
\x25\x0a\x2a\x7e\xe2\x50\xcf\x8e\x33\x50\xa8\x32\xb7\x3e\x8b\x84\
\xef\x19\xc0\x30\x0c\x89\x93\x18\x3f\xb1\x19\x2c\x6c\x43\x13\x59\
\x5a\xde\x1a\x59\x2d\x47\x14\xc7\x84\x61\x78\x35\x90\x20\xc0\xf7\
\x7d\x24\x29\x0a\x1a\x9e\x03\xa9\xea\xd1\x72\xdb\xac\xdb\x2b\x84\
\x9b\xdb\x93\x24\x21\x8e\xe3\xcd\x81\x4f\x49\x64\x42\x1a\x19\xcc\
\x8c\xee\xa2\xe3\xd8\x94\x33\x43\x84\x71\xd0\x8f\x15\x20\x08\x43\
\x7c\xdf\x43\x51\x14\xf2\x45\x9d\xbd\x43\x37\x62\xf9\x4d\x32\x4a\
\x09\x4d\x18\xdd\xb6\x61\xf7\x2e\x8b\xa2\x08\xcf\xf7\x91\x52\x50\
\x19\x30\x49\x02\x1d\x29\x05\x48\x85\x86\xbb\x4c\x12\xa7\xfd\x01\
\x94\x52\xe2\xf9\x3e\x61\x14\x62\xe8\x1a\x73\xcb\xcb\x18\x4a\x16\
\xed\x58\xfb\xd3\x78\x4a\x03\xdf\xf3\xb0\xec\x88\x6c\x9a\x41\x4a\
\x89\xe3\x38\xf8\x71\x44\x11\x83\x2f\xbf\xfe\x47\xbc\xf7\xd0\xaf\
\x72\xb2\xf3\x0f\x5c\x72\x4f\x10\x8b\x00\xc7\x71\x50\x55\xb5\x3f\
\x78\xae\xeb\x92\x1a\x30\x52\x57\xf8\xc6\x99\xe7\xf8\xf9\x47\xff\
\x1f\x7e\xeb\x9b\x1f\x64\x7c\x74\x9c\x34\x49\x71\x9c\xcd\x19\xb5\
\x39\x0b\xa4\x94\x0c\x68\x82\xa7\x2e\xfd\x1d\x3f\xff\xc6\xdf\xe4\
\x53\xaf\xfd\x1a\x96\xef\x50\xca\x54\x70\xdd\x79\xe2\x40\x62\x18\
\x06\x61\x18\x62\x5b\x16\xbe\x1e\xb3\x64\x5f\x62\x89\x45\xf6\x6f\
\xaf\x21\x07\x2f\x70\xa9\xd1\x5d\x20\x5b\x96\x8f\x61\xea\x00\xb8\
\x8e\x83\xdb\x71\xd1\x52\x9d\xa5\xe8\x3b\xdc\x9b\x7d\x2b\x51\x71\
\x8e\x8f\x3f\xfd\x41\x14\x15\x6c\xdb\x46\xd3\xf4\xab\x93\xc4\xf1\
\x89\xf2\x31\xdb\xb7\x0f\xf2\xd4\xa9\xaf\x32\xb3\x7d\x86\xdf\x7b\
\xf1\x71\x32\x46\x86\x30\x0e\xb0\xed\x18\xdd\xd4\x48\xd3\x14\x77\
\x73\x4c\xca\xf9\x22\xbf\xf5\xcc\xe3\x7c\xec\xc1\x3f\x40\x5b\x5c\
\x9b\xa5\xd1\x59\x63\xf9\x6c\x9b\xc8\x4d\x91\xa4\xa4\x69\x4a\xab\
\xd5\x22\x48\x22\x8a\x41\x1e\x57\x5b\xe4\xf7\xbe\xf5\x38\x8f\x5e\
\xff\xc3\x9c\x5d\x3d\xc6\xea\x0b\x92\x76\xdb\x46\x51\x14\xd2\x34\
\xc5\xb6\x6d\xda\xed\x36\x52\x15\x08\xa5\xc2\xf3\x8d\x4f\x30\x3e\
\x37\xca\x6a\x67\x8e\xc5\xd5\x39\x16\xce\x34\xf1\xda\x49\x7f\x0d\
\x16\x86\x21\x69\x9a\x52\x48\xf2\xcc\x06\xdf\xe2\xd9\x85\xcf\x62\
\xf9\x4d\x4e\x2f\xbc\xc2\xec\x31\x9f\x8d\x15\x07\x04\x94\x4a\xa5\
\x2e\xf9\x44\x29\x41\x26\x25\x51\x4a\x3c\xb3\xfa\x57\xb4\xd3\xa3\
\xf8\x9e\xc5\x4b\x17\x4f\xb0\x72\xda\xc1\x77\x62\x32\x59\x13\xe8\
\x0e\x90\x6d\xd9\x14\x92\x02\x8e\x58\xe5\x8b\xb3\xbf\x8d\x65\x59\
\x2c\xaf\x2d\xb0\xf2\x4a\x40\xbb\x61\x61\x64\xb5\xfe\x24\xb1\x1a\
\x6d\x9c\xa2\x24\x91\x15\xbe\xd5\xfc\x14\xcb\xfe\x8d\x5c\x6e\xbd\
\x8c\xe3\xb8\x5c\xfa\x66\x03\x99\xa6\xe4\x92\x1c\x69\x9a\xd2\x68\
\x34\xe9\xc8\x84\x30\x32\x71\x33\x97\xf9\x95\xaf\x7c\x04\x31\x3d\
\xb1\x53\x0a\x04\x66\x5e\x23\x8c\x42\xb2\xd9\x2c\x51\x14\x59\x69\
\x9a\x16\x6b\xd5\x3a\x72\xac\x43\x69\x58\x67\xe5\x3b\x1e\xeb\x17\
\x5d\x4c\xc3\x44\xa8\xa0\x1a\xdd\x37\x59\x10\x04\xab\x03\x03\x03\
\x45\x45\x51\xb2\x52\x4f\x29\xdd\x2c\x89\x97\x52\x16\x5e\xe9\x80\
\xa7\x21\x14\x81\x6a\x76\xd7\x60\x69\x9a\x46\x42\x88\xc0\x30\x8c\
\x42\xa5\x52\x41\xee\xb1\x28\x19\x3a\x57\x8e\x5b\x38\x4b\x09\x9a\
\xa6\x81\x2a\x51\x75\x41\x92\x24\x14\x0a\x85\x4e\xb3\xd9\x14\x3b\
\x67\x76\x16\x1b\x9d\x06\xa3\x37\x1b\xcc\xbf\x68\x63\x37\x42\x34\
\x45\x43\x92\xa2\x67\x54\xe2\x24\xa6\x52\xa9\x30\x3b\x3b\xfb\xec\
\xce\x9d\x3b\x8f\x2a\xa8\x24\x43\x16\x6a\x2a\x58\x7e\xd5\x21\x8d\
\x25\xaa\xa2\x22\x34\x50\xba\x6b\x66\xc2\x30\xa4\x5c\x2e\x5b\x02\
\x51\x6c\xe2\x73\xe3\x5b\x0a\x2c\x1c\x77\x59\x3f\xe7\x21\x43\x15\
\x64\x8a\x96\xed\x4e\x10\xc3\x30\xe8\x74\x3a\xc7\xf2\xf9\xfc\x11\
\x33\x5b\x64\xfb\x23\x12\x67\x21\x61\xfd\x9c\x8f\x76\xc7\xdd\xb7\
\xd2\x6c\x36\x19\x1d\x1d\xe5\xc2\x85\x0b\x78\x9e\xc7\xca\xca\x0a\
\x3f\xf6\x63\x3f\xc6\xbe\x7d\x7b\x99\xbb\x3c\xcf\xcb\x2f\xbf\x42\
\x7d\xbb\x24\x9d\xec\xbe\x00\x3c\xcf\xa3\xd1\x68\x50\x2c\x16\xb9\
\x74\xe9\x12\x9a\xa6\xf1\xd3\x3f\xfd\xd3\x0c\x54\x06\xf8\xab\x4f\
\xfe\x35\x95\x6a\x85\xf2\x9e\x25\x6a\xb5\x1a\x0b\x0b\x0b\x18\x86\
\x81\xa2\x28\x5c\xb8\x70\x81\x30\x0c\xf9\xc0\x07\x3e\xc0\xc1\x83\
\x07\xf9\xe4\x9f\xfe\x05\xaa\xd4\x30\x46\x97\x19\xbc\xbe\xce\xc6\
\xc6\x06\x03\x03\x03\x34\x9b\x4d\x16\x16\x16\x08\x82\x80\x3b\xef\
\xbc\x93\xc7\x1e\x7b\x8c\x30\x0c\x79\xe6\xc9\xe7\x18\xbd\x71\x19\
\x55\x53\xe9\x74\x3a\x14\x0a\x05\x16\x16\x16\x70\x5d\x97\x24\x49\
\x50\x55\x95\x87\x1e\x7a\x88\xb7\xbf\xfd\xed\x7c\xe1\x73\x5f\xe4\
\x95\x13\x27\x98\xbe\x53\x23\x9f\xcf\xb3\xb0\xb0\x80\xe3\x38\x64\
\xb3\x59\x9a\xcd\x26\xcb\xcb\xcb\x54\xab\x55\x3e\xf2\xf8\x47\xb8\
\x7c\xf1\x12\xcf\x3e\xf5\x4d\xf6\x0c\x64\xd9\x7d\x5b\xca\xfa\xc6\
\x3a\x85\x42\x01\x21\x04\x4b\x4b\x4b\x58\x96\x45\x92\x24\x7c\xe0\
\x03\x1f\x60\x6a\x6a\x3b\x4f\x7f\xed\x1b\x34\xd3\x06\x83\x13\x2e\
\xda\xe0\xe0\x20\x8e\xe3\x60\x18\x06\x53\x53\x53\x9c\x3b\x77\x8e\
\x24\x49\x98\x99\x99\xe1\xe8\xd1\xbb\x39\x55\x3b\xc5\x5a\x63\x95\
\xc1\xc1\x41\x14\x45\xe1\x95\x57\x5e\x61\x74\x74\x14\x80\xe1\xe1\
\x61\x2e\x5d\xba\x44\x9a\xa6\xdc\x76\xdb\x6d\x64\xb3\x59\xbe\xfc\
\x95\x2f\x73\xfd\xf5\xd7\x73\xe6\x4c\x16\xc3\x30\xfa\x57\xfb\xc0\
\x81\x03\x5c\xbc\x78\x11\x80\xc3\x87\x0f\x33\x3e\x3e\xce\x2d\x77\
\x1c\x61\x65\x65\x05\x2f\xb2\xd9\xb1\x63\x07\x69\x9a\x32\x34\x34\
\xc4\xae\x5d\xbb\xf8\xca\x57\xbe\xd2\x9b\x25\x94\x4a\x25\x86\x86\
\x86\x78\xf5\xd5\x57\x11\x9a\x24\x9f\xcf\x73\xfc\xf8\x71\xc6\xc6\
\xc6\x68\x36\x9b\xe4\xf3\x79\xd6\xd6\xd6\x10\x42\x50\x2a\x95\xd8\
\xbd\x7b\x37\x03\xf5\x0a\x95\x6a\x89\x72\xb9\x8c\x94\x92\x5a\xad\
\x86\x65\x59\x0c\x0d\x0d\x11\x04\x01\x42\x08\x0a\x85\x02\x37\x5c\
\x7f\x03\x19\x33\xc3\x99\xb3\x67\xd9\xbb\x77\x2f\x6b\x6b\x6b\x20\
\xa0\x50\x28\x30\x3a\x3a\x4a\x36\x9b\xe5\xe4\xc9\x93\x00\xdc\x71\
\xc7\x1d\x94\x4a\x25\x3a\x1d\x0b\xcf\xf3\x38\x79\xf2\x24\xe2\xc6\
\x1b\x6f\xec\x03\x60\x8f\x85\xdb\xed\xb6\x55\x2a\x95\x8a\x41\x10\
\xf4\xf9\xf3\xfb\x7d\x14\x45\xa1\xd3\xe9\xac\x0e\x0d\x0d\x15\x1d\
\xc7\xc9\xf6\x10\xec\x7f\xf7\x89\xe3\x38\x0a\xc3\x30\xc8\xe7\xf3\
\x85\xef\xc7\x9d\xdf\xcd\xda\x8a\xa2\x74\x3a\x9d\x8e\x28\x95\x4a\
\xc5\xff\x53\xfb\x72\xb9\xcc\xa9\x53\xa7\x9e\xdd\xb9\x73\xe7\x51\
\xcf\xf3\xf8\xdf\xc5\x22\xa5\x44\xd3\x34\x36\x36\x36\x18\x1c\x1c\
\xb4\x82\x20\x28\xfe\x4b\x71\xf7\x62\x59\x5b\x5b\x3b\x36\x34\x34\
\x74\xa4\xc7\xd6\xbd\x8f\xb6\x35\xae\x4d\x16\xc6\xb2\xac\x20\x9f\
\xcf\x67\x15\x45\x91\x48\x90\x89\xd8\xc2\xb4\x9b\x8d\x05\xa4\x69\
\x2a\xe2\x38\x8e\x3b\x9d\x0e\x3d\x20\x47\x0a\x64\x17\x53\x41\x91\
\xfd\xb6\x9b\xc1\x48\xdf\xf7\xe3\x4c\x26\x13\x2b\x8a\x22\x85\x14\
\x5d\x06\x15\x12\x94\xcd\x66\x5b\x58\xd8\xb2\x2c\xdf\xf7\xfd\x0d\
\x21\x44\x19\x90\x24\xa2\xcb\xd9\xca\xb5\xb1\x48\x29\x85\xe7\x79\
\x91\x94\x72\x61\x63\x63\x63\x5d\x4a\x19\x90\x8a\x2e\x27\xa7\x02\
\x94\x2e\x0b\x6f\xc6\x21\x36\x9f\xdd\x0d\xcb\xb2\xb6\x9b\xa6\x19\
\x0b\x90\x69\xb2\xd9\xd9\x26\x37\xb3\x25\x96\x20\x08\x94\x24\xe9\
\x36\x50\xc4\x26\xbf\xcb\xcd\x64\xc2\xf8\xae\x21\xa4\xa7\x76\x0f\
\xd4\x1d\x14\x96\x96\x96\x2e\xed\xd8\xb1\xe3\x80\xe7\x7a\x69\x98\
\x73\xc9\xef\x08\x10\xae\x46\xd2\x36\x49\x2d\x1d\x19\x75\x77\x56\
\x55\x55\x00\x72\x78\x78\x58\x64\x32\x19\x02\x11\xc2\x74\x8b\x52\
\x41\x25\xee\xe8\xd0\xce\x90\x5a\x1a\x32\x51\x40\x82\xe7\x79\x72\
\x71\x71\x31\x1a\x1b\x1b\x0b\x14\x45\x91\x8d\xcc\x06\x63\xbb\x35\
\xa2\xa6\x86\x6c\x9b\x24\x8e\x0a\x49\xff\xea\x8a\x76\xbb\x1d\x04\
\x41\x30\x36\x39\x31\x59\xf2\x93\x10\x75\x47\x9b\xac\xae\x93\x34\
\x0d\x12\x4b\x47\x06\xdd\x7e\x01\x4c\xd3\xe4\xc9\x27\x9f\xbc\x70\
\xe8\xd0\xa1\xba\x8c\x25\xab\xb9\x65\xb6\x1f\xd0\x08\xda\x20\xdb\
\x19\x92\x8e\x01\xbe\x8a\x4c\xc1\x30\x0c\x8e\x1f\x3f\xbe\x6d\x62\
\x62\x62\xde\x34\x4c\xa5\x15\xda\x72\xe0\x90\x8f\x96\xea\xc4\x0d\
\x9d\xb4\xd3\x65\xf2\x6e\x92\x03\x61\x59\x56\x9c\x24\xc9\xc4\xf4\
\xf4\x34\x96\xef\x93\xd9\x63\x51\xa8\x28\x04\x9d\x14\xad\xfe\x03\
\x1b\xe8\x8a\x49\xf3\xc9\x2e\xf7\x19\x86\x40\x55\xd5\x01\xd3\x34\
\x73\x9e\xe7\xa3\x6e\xf7\xa9\xdc\xee\xa0\x87\x45\xc2\x78\x0d\x25\
\x36\x90\x81\x46\xe3\x8b\xdd\x1c\xdc\xe6\x2d\xae\x26\x49\x82\x9d\
\xb1\x19\xbc\xbe\xc3\xee\xf1\xfd\x90\xa6\xcc\xad\x5c\x44\x55\x54\
\x3a\xcf\xd4\x09\x17\x75\xd2\x34\x55\x54\x55\xcd\x69\x9a\x96\x17\
\x42\xc0\x1e\x8b\x81\xdb\x35\x72\x69\x9d\x56\x7b\x1d\x21\x15\xc2\
\x73\x15\xac\x13\x39\x84\x90\x68\x9a\x96\x7a\x9e\x07\x52\xb2\x21\
\x5b\xec\xbf\xdf\x46\x4b\x32\x04\x6e\x03\x3d\xcd\x10\xb9\x02\xfb\
\x78\x99\x60\x51\x47\xc9\x2a\x48\x29\x37\x1f\xba\xe0\xee\x5d\xa7\
\x74\xc3\x00\x55\x75\x8a\x75\x6b\x0e\xcb\xb2\xd1\x3a\x55\x1a\x4f\
\x14\xe9\xa5\xc6\x0c\xc3\xa8\xe8\xaa\x9e\xbf\x98\xac\xf2\xe6\xbb\
\x73\x44\x81\x4f\x96\x12\x1d\x7b\x03\x43\xe4\xe8\x7c\xa3\x4c\xb8\
\x6c\xf4\x72\x8d\x91\x4c\x25\x56\xea\x30\x7a\x38\xc0\x53\xda\xbc\
\x71\xe7\x0f\xa2\x3c\xba\xfb\x71\x8a\xd5\x22\xb1\xec\xf2\xec\x26\
\x0b\x4b\xdf\xf7\xf1\x7c\x9f\x54\x84\x4c\x72\x37\x1f\xbc\xf3\xbf\
\xf2\xaf\xaf\xfb\x0d\xf6\x0c\x1d\x65\x78\xdb\x60\x9f\x26\x92\x24\
\x91\xbd\x4c\x75\x9c\x46\x24\xcd\x0a\xf7\x8e\xbf\x93\x43\x85\xb7\
\x72\xcf\xf6\x77\xf3\xa6\x5b\xde\x41\x78\x2d\x0b\xcb\xde\x71\xe2\
\x34\xe1\xd1\xed\x1f\x61\x52\x79\x80\x07\x0f\xbc\x8b\xa3\x33\xef\
\xc2\x11\x1b\x5b\x29\x47\xc6\x71\x8c\xe7\xfa\x84\x49\xc8\x48\x76\
\x9a\x47\x26\x3e\xca\xcf\xdd\xfd\x49\xbc\x66\x9d\x1d\xd3\x53\x84\
\xd2\xed\xf3\x6a\x8f\x85\x3d\xcf\xc5\x30\x54\xa6\xd5\xbb\x78\xcb\
\xd4\xe3\x1c\xaa\xbc\x95\x5f\x79\xcb\xdf\x93\xe6\xec\xcd\x55\x84\
\x8f\x94\x92\x30\x0c\xa5\xef\x7a\x44\x32\xc2\x73\x23\x3e\x7c\xfd\
\x27\xb8\x7f\xf7\xbb\xf8\xd0\x1d\x7f\x48\x65\x34\x47\x24\x83\x3e\
\x67\x4b\x29\xf1\x3d\x8f\x38\x89\x68\x76\x5a\xfc\xee\x43\x4f\xb3\
\xb4\xbe\x86\x32\x5e\xd8\xc3\xc1\x91\x3b\x37\x07\xa4\xcb\xc2\x42\
\x08\x3c\xcf\xc3\x0f\x7c\xc2\x38\xe0\xba\xa1\x5b\x79\x7e\xe1\xef\
\x79\xe1\xe4\x09\x6e\x9f\x7a\x08\x4d\xd1\xfb\x4c\x99\x24\x09\x41\
\x10\x74\x59\x38\x0a\x19\x2b\xee\x66\xb1\x73\x91\xf3\x6b\x27\xb9\
\x6d\xfa\x01\x92\x34\x21\x0e\x63\x7c\xbf\x7b\x92\x5b\xdb\x67\xd4\
\x3c\x95\xcc\x20\xaf\x7b\x2f\xb0\xbf\x7c\x3f\x3b\x87\x0e\x50\xd2\
\xeb\xf8\xbe\x8b\xe7\x79\xc4\x71\x4c\x1c\xc7\xf8\x7e\xf7\x7b\x29\
\x53\x21\x57\x4c\x78\xf6\xc4\xf3\x1c\x1c\xbd\x83\xdd\xb5\x9b\x70\
\x03\xe7\x7b\x59\xd8\xf3\x51\x55\x15\xa7\x2d\x78\x71\xee\x9f\xc9\
\x8a\x2a\x43\x03\x43\xa4\x49\x17\x41\x03\xdf\xef\x27\x13\x3c\xcf\
\x43\x51\x15\x2e\x9f\xb3\x18\x1a\xc9\xe2\x47\x2e\x37\x4c\xdc\xc2\
\x64\xe9\x3a\xfc\xb0\x1b\x47\x0f\x41\x3d\xdf\x27\x0a\x23\x34\x99\
\x61\xb8\x3a\xc4\x46\x7a\x06\xcd\x09\x6c\xce\xae\xbc\x4a\x14\xc5\
\xf8\x5e\xda\x7d\xa0\x43\x77\xc6\xf8\x01\x86\x14\xb8\xa1\xc5\x60\
\x69\x02\x59\xc8\xf3\xfa\xfa\xb9\xfe\x76\x55\x55\xfb\x2c\x1c\xc7\
\x31\x69\x92\xd2\xf4\xd7\x18\x2d\x3f\x4c\x73\x3d\x65\xc5\x9d\xc5\
\x54\xb3\x44\x51\x44\x10\x24\x7d\x02\xe9\x65\x74\x75\x52\x14\x45\
\xc1\x77\x25\x46\xce\x67\xfe\xd2\x02\x1b\xf6\x0a\x81\x6f\xf6\x5f\
\x3c\x71\x1c\x77\x93\x09\x5a\xd2\x7d\x46\x27\x0a\x23\x03\x23\xbc\
\x7c\xf9\x18\xb2\xa2\x40\xd2\x1d\xb4\x1e\xc3\x87\x61\x48\xe0\xfb\
\x18\x42\xa5\x50\x30\x98\xa9\xed\xe7\xcc\xc2\x59\x4c\x91\x45\x11\
\x4a\x3f\x23\xd4\xeb\x3b\xf6\x23\xc8\x40\xa9\x62\x92\x84\xda\x66\
\xf6\x1a\x96\x3a\x97\x49\xe2\x6e\x06\x46\x8a\xb4\x8f\x7e\xb1\x1a\
\x23\x45\x42\xe0\xc7\x04\xae\x8e\xf2\xcd\xf5\xbf\xc2\xc8\x4a\x82\
\xd0\xef\xb2\xde\xe6\x95\xec\xcd\x30\x91\x6a\x1c\x6f\xfc\x2d\x39\
\x6f\x86\xfc\xf8\x3a\x67\xed\x67\x71\x93\x36\x8e\xe3\xe0\x38\x4e\
\xf7\x04\x3d\x0f\xdb\xb6\x09\xfc\x90\x38\xbf\xc8\x2b\xe7\xcf\xf0\
\xc0\xf5\xf7\xf1\x77\x67\x7e\x9b\xb3\xad\x6f\x11\xc6\x21\x96\x65\
\xe1\x38\x4e\x3f\x1b\xe3\xba\x2e\x8a\x96\xf0\xb5\x8b\x9f\xe1\x67\
\xee\xff\x35\x3e\xf3\xea\x6f\x73\x39\x78\x8e\xac\x56\xc0\x71\x5d\
\x1c\xc7\xe9\x5f\x18\xcf\x75\x09\xc3\x88\x65\xfb\x75\xd6\x5b\x2d\
\x28\xb4\x48\xab\xb3\x04\x69\x17\xee\x1d\xc7\xe9\x67\x58\x1c\xc7\
\xc1\xea\x58\x24\x69\xca\x7c\xf4\x6d\x34\x91\xc1\xcf\x5d\xe4\x77\
\xbe\xf9\x33\xa8\x9a\x8a\x65\x59\x58\x1d\xab\x3f\xd8\xae\xe3\x12\
\xc7\x29\x53\x53\x15\xbe\xfc\xca\xe7\x19\x2a\x8d\xf0\x3b\x2f\xfe\
\x9b\xae\x26\x12\xfb\x38\x8e\xd3\xcf\x22\x59\x9d\x0e\xae\xeb\x91\
\xcf\x65\xf9\xb5\xa7\xdf\xcf\x2f\xdd\xf3\xe7\x68\xae\xb2\xce\xdc\
\x85\x39\x96\xce\xb5\x08\xac\x84\x84\x04\x80\x66\xb3\x49\xab\xd3\
\x21\xef\xe6\x09\x55\x95\xbf\xbb\xfc\x6b\xbc\xe5\xf0\x3b\x99\xbb\
\xf8\x1a\x6b\x2f\x49\xda\xed\x4e\x7f\x8d\xd8\x9b\x85\x94\x14\x14\
\xd5\xe0\x78\xe7\x53\xd4\xd7\x14\xda\xd1\x32\xf3\x5f\xd9\x60\xfe\
\x44\x9b\xc8\x4e\xbf\x2f\x0b\x2f\xf8\xdf\xe1\xb9\xa5\xff\x45\xa0\
\xd9\x5c\x7a\xfd\x3c\x57\x4e\x3b\xb4\x9a\x1e\x42\xe9\xb2\xb0\xef\
\xfb\xc4\x41\x44\xa0\x4b\xfc\x34\xc7\xb3\x4b\x7f\xcd\xbd\x7b\x7e\
\x00\x99\x6b\xf1\x9d\x6f\xaf\xb0\x7c\xce\xc2\xe9\x44\xe8\x66\x77\
\xf6\xd8\xb6\x8d\xdd\xb1\xc9\xa5\x05\x7c\x75\x9d\x63\xf6\x5f\xd3\
\xf1\x2c\x5e\xfe\xce\x2c\xcb\xaf\x04\x74\x5a\x0e\xba\xa9\xf6\x19\
\xde\x6a\xb6\xb1\x4b\x92\x58\x96\xf9\x8e\xfd\xf7\x74\x16\x6f\x60\
\xae\xf3\x2a\xeb\xb3\x0e\x97\xbf\xd5\x20\x89\x52\xb2\xd9\x2c\x69\
\x9a\xd2\xb1\x2c\xbc\x44\x92\x92\x65\x39\x7e\x95\x3f\x7c\xf9\xe7\
\x10\xe3\x43\x53\x52\x48\x05\xbd\xd0\x5d\xf7\xe4\xf3\x79\xd6\xd7\
\xd7\xcf\x55\xab\xd5\xdd\xaa\xa2\x62\x4e\x47\xd4\x76\x1a\x6c\x9c\
\x0e\x58\x3e\xe5\x90\x76\x34\xf4\x9c\x82\x96\xed\xb2\x70\x1c\xc7\
\xb6\xaa\xaa\x66\x2e\x97\xd3\x63\x23\x62\xec\x1e\x83\x60\x21\x61\
\xe9\x94\x83\x73\x05\xb4\x8c\x40\xcf\x8a\x3e\x0b\x2b\x8a\x12\xa8\
\xaa\x5a\xc8\xe5\x72\x18\x87\x22\x6a\x15\x9d\xc5\x93\x0e\x1b\xa7\
\x62\x44\x22\xd0\x73\x02\x61\x80\x4c\x25\x85\x42\xa1\xb3\xb1\xb1\
\x21\x86\x86\x86\x8a\x9d\xd8\xe6\xd0\x0f\x17\x59\x3b\x15\xb2\x7c\
\xd2\xc6\x5f\x56\x50\x0c\x30\xf2\x5d\x7d\xa2\x5a\xad\x32\x3b\x3b\
\xfb\xec\x8e\x1d\x3b\x8e\x8a\x54\x90\x4e\x38\xe4\x32\x1a\x8b\x2f\
\x3b\x78\xeb\x09\xba\xa9\xa2\x1a\x20\xb4\xee\xda\x2e\x0c\x43\x4a\
\xa5\x92\x15\x87\x71\xb1\x63\x46\xdc\xfc\xd6\x22\xab\xaf\xf9\xac\
\x9e\xf6\xf0\x57\xba\xed\x8c\x42\x97\x85\x4d\xd3\xc4\x75\xdd\xf5\
\x7c\x3e\x5f\xcf\x97\x8b\x54\x0e\x44\x74\xe6\x23\x5a\x97\x43\xb4\
\x3b\xee\xbd\x05\xc7\x71\xa8\x56\xab\x5c\xb9\x72\xa5\xa7\x0b\xf3\
\xe0\x83\x0f\x72\xf0\xe0\x41\x3a\x2d\x8b\x63\xdf\x3a\xce\xd8\x48\
\x96\xf1\x4c\x83\x62\xa9\xc0\xd2\xe2\x12\xcd\x66\xb3\xa7\x0b\x33\
\x3d\x3d\xcd\x23\x8f\x3c\xc2\xf0\xd0\x30\xff\xf3\x2f\x3f\xc5\xde\
\xc9\x09\x4a\x13\x0b\x8c\xde\x3a\xca\xca\xca\x0a\x61\x18\x92\xcb\
\xe5\x38\x79\xf2\x24\x69\x9a\xf2\xde\xf7\xbe\x97\x83\x07\x0f\xf2\
\x99\x4f\xfd\x0d\x19\x37\x8b\x9a\x9b\xe7\xae\x1f\x9e\xdc\xd4\x90\
\x0d\x54\x55\xe3\xdc\xb9\x73\x84\x61\xc8\xe1\xc3\x87\x79\xdb\xdb\
\xde\x46\x12\x27\x9c\x78\xf9\x55\x0a\x6a\x8b\x6d\xfb\x7d\xb4\x1b\
\xbb\x69\xfc\xd7\x5f\x7f\x1d\x45\x51\x70\x5d\x17\x55\x55\x79\xe3\
\x1b\xdf\xc8\xdb\x7f\xa8\xcb\xc2\xaf\x9e\x7c\x95\xd1\x1b\x54\x86\
\x86\x07\x39\x77\xf6\x1c\xae\xeb\x92\xcb\xe5\x68\xb5\x5a\x7d\x16\
\xfe\xc9\x9f\xfc\x49\x56\x96\x57\x78\xfa\xc9\xe7\xd8\x3b\x58\x65\
\xfb\xf5\x1e\xcd\x56\x83\x72\xb9\x8c\xa0\xcb\xc2\x8d\x46\x03\x21\
\x04\x1f\xff\xf8\xc7\x11\x42\xf0\xd4\xd7\x9e\x21\x2d\x27\x5c\xc8\
\x9c\x47\x1b\x1b\x1b\xe3\xd2\xa5\x4b\x14\x8b\x45\xb6\x6d\xdb\xc6\
\xe5\xcb\x97\x49\x92\x84\x3d\x7b\xf6\xf0\xf6\xb7\xbf\x9d\xb9\xb9\
\x39\x56\xd7\x57\xa8\x54\x2a\xb8\xae\xcb\xf2\xf2\x32\x03\x03\x03\
\x58\x96\x45\xa5\x52\xe9\xeb\xab\xef\x78\xc7\x3b\x50\x14\x85\x63\
\xc7\x8f\x31\x3d\x3d\x4d\xa1\x9c\x47\xd7\x75\xb2\xd9\x2c\xcb\xcb\
\xcb\xec\xd9\xb3\x87\xd7\x5e\x7b\x8d\x28\x8a\xb8\xe7\x9e\x7b\xb8\
\xe9\xa6\x9b\x38\x7f\xfe\x3c\xcd\x66\x13\xcb\x6d\x33\x39\x39\xd9\
\xcf\x5a\x6f\xdf\xbe\xbd\xcf\xcd\xdb\xb7\x6f\xef\xeb\xbc\xae\xe7\
\xb2\xb6\xb6\x46\xa5\x52\xe1\xc5\x17\x5f\x64\x68\x70\x88\x6a\xb5\
\xda\x17\xfc\x01\x6a\xb5\x1a\x07\xf6\x1f\xe0\xf9\x6f\x3c\x4f\xb9\
\x5a\xa2\x58\x2c\x02\x30\x30\x30\x80\xeb\xba\x94\xcb\x65\x7a\xa8\
\x57\x2e\x97\x79\xd3\x9b\xde\xc4\xf1\xe3\xc7\x39\x7b\xee\x2c\x3b\
\x77\xee\xc4\x75\x5d\x2e\x5c\xb8\x40\x3e\x9f\xa7\x56\xab\x21\x84\
\xa0\xd5\x6a\x91\xa6\x29\x53\x53\x53\x84\x61\xc8\xd0\x68\x1d\x29\
\x25\x8b\x4b\x0b\x88\x1b\x6e\xb8\xe1\x1a\x16\xce\x66\xb3\xcc\xce\
\xce\x9e\x9b\x9e\x9e\xde\xdd\x4b\x7c\xaa\xaa\xfa\x3d\x9a\x6d\x0f\
\xb7\x3c\xcf\xb3\x33\x99\x8c\xa9\xaa\xaa\xbe\xb5\xcd\xa6\x7c\xd8\
\x6f\x9b\x24\x09\x52\xca\x28\x8a\xa2\x40\x55\xd5\x42\x3e\x9f\x27\
\x8e\xe3\x7e\x3f\xbd\x37\x63\xef\x99\xaa\x28\x0a\xba\xae\x77\x1a\
\x8d\x86\xa8\xd5\x6a\x45\xdf\xf7\xd1\x75\xbd\xdf\x56\x51\x14\xe2\
\x38\x46\x51\x14\x92\x24\xa1\x56\xab\x71\xea\xd4\xa9\x67\xa7\xa6\
\xa6\x8e\xb6\xdb\x6d\xb2\xd9\xec\x35\xcc\xba\xf5\xbb\x61\x18\xac\
\xae\xae\x52\xaf\xd7\xad\x30\x0c\x8b\x3d\x51\xbe\xa7\x25\xf7\x74\
\xe4\x5e\x4c\x9b\xcc\xbf\x5e\x2c\x16\xeb\x61\x18\x62\x18\x46\x5f\
\x17\xd7\x0a\x85\x62\x2a\xb6\xf0\x6a\x18\x86\x8a\x10\x42\xc9\xe5\
\x72\x64\x32\x19\xc4\x26\xdb\xf6\xf4\xd2\x4d\x61\xb6\x1f\x94\xeb\
\xba\x81\xa6\x69\x6a\x26\x93\x51\x85\x10\x90\x6e\x8a\xf0\xdf\xc5\
\xcd\x9b\x4b\x9f\x68\x7d\x7d\xdd\xaf\xd7\xeb\x79\x21\x84\xcc\x9a\
\x59\xd2\x64\x53\x0f\xde\xc2\xc2\xbd\xcf\xfa\xfa\x7a\x92\xcd\x66\
\xf5\x4c\x26\x93\x66\x32\x99\x2e\x93\xb3\x45\x3f\xde\xd2\x7e\x73\
\xad\x16\x46\x51\x44\xbe\x90\x4f\x45\xda\xe3\x6c\xfa\x4b\xb3\x5e\
\x2c\x9b\x59\x74\x99\x24\x49\x52\x2c\x16\xbb\x2a\xf6\xbf\xd0\xb7\
\x65\x59\x22\x08\x02\xaf\x54\x2a\xc5\xa6\x69\x4a\x52\x90\x49\x57\
\x6f\xd6\x46\x76\x97\x93\xc4\xd2\x20\xee\xbf\x14\x12\xdb\xb6\x6b\
\x13\x13\x13\x84\x51\x44\x52\xf4\xc8\x8d\xc4\xa4\xbe\x02\xbe\x4e\
\xea\xa8\xa4\xae\xda\xbf\xa2\xb6\x6d\xaf\xd7\xeb\xf5\x9a\x69\x9a\
\x5a\x24\x62\xc4\xa8\x4d\x2e\xab\x75\xb5\x5b\xd7\x20\xb1\x55\x44\
\xd2\x1d\x1d\xcf\xf3\x9c\x20\x08\xd6\xea\xf5\xfa\x80\xaa\xaa\x69\
\x3b\xd3\x64\x68\x52\x25\x76\x14\xf0\x74\x12\x57\x45\xfa\x2a\x3d\
\x81\x3e\x0c\xc3\x28\x9b\xcd\x16\x6b\xb5\x5a\xe2\xa4\x1e\xa5\x5d\
\x01\x24\x0a\xa9\xab\x76\xdb\xdb\x2a\x24\x0a\x20\x31\x4d\x53\x5c\
\xba\x74\x29\x3e\x78\xf0\x20\x69\x94\x26\x8d\xfc\x2a\xdb\x76\x69\
\x84\x36\xe0\x99\xa4\xae\x42\xea\xe8\xc8\x50\x60\x9a\x26\xed\x76\
\x3b\x19\x1e\x1e\x8e\x73\x99\x6c\xd2\x0c\x6d\x59\xdb\x17\x23\xd2\
\x6e\xdf\xd2\xd3\x49\x6d\x15\x19\x2b\x28\xaa\xc0\x30\x0c\xc5\xf7\
\x7d\x67\xef\x9e\xbd\x81\x1f\x86\xa9\x1c\xb6\x29\xd6\x15\x12\x4f\
\xa2\x95\x1e\x5c\xd5\x55\x55\xa5\xf1\x85\x1a\xb1\xd5\x65\x5b\x21\
\x84\x6e\xdb\x36\x96\xef\x92\x8c\xad\x91\xd9\x1f\xb1\x2d\x3f\xcd\
\x4a\xeb\x75\x34\xa9\x90\xba\x3a\x8d\x2f\xd6\x7a\x6f\xb3\x2c\x90\
\x49\xd3\x54\xef\x18\x16\x43\x47\x9a\x8c\x56\x07\x51\x11\x34\x3a\
\xcb\xe8\x19\x95\x8d\x2f\xd6\x88\x9b\x1a\x49\x92\x64\x85\x10\x85\
\x34\x4d\x55\x40\xf5\x77\xac\x53\xbe\x25\x4b\x4d\x1f\x67\xa9\x79\
\x05\x45\xa8\x78\xa7\x0b\xd8\xaf\x14\x00\x89\xae\xeb\x39\x40\x13\
\x09\x2c\x69\xeb\x6c\xbf\x4b\x62\xa4\x25\x64\x9a\x10\x04\x6d\xd2\
\x40\xa1\xf3\x62\x89\x60\xde\x40\xcd\xa9\x24\x49\x92\xf3\x3c\x8f\
\xd4\x4d\xf5\xd5\xa9\x39\xa6\x0f\x0e\x30\xae\x4d\xb3\x62\xcd\x11\
\x79\x29\x71\x5b\x63\xe3\xcb\x15\xc2\x30\x42\x08\xa1\xab\xaa\x1a\
\x93\xa2\x9d\xf2\x17\x78\xeb\xd1\x3c\x22\x36\x30\x44\x86\xb6\xbd\
\x8e\xae\x99\xb4\x9e\x2a\x13\x2e\x19\xbd\x5b\xba\x1e\x45\x51\x7e\
\xd5\xea\x30\xfd\x88\x8d\x5a\xf6\x98\x2a\xed\x46\x79\x64\xe7\xe3\
\x0c\x54\x86\x08\x93\xb0\xbf\xa2\x07\x64\x10\x04\x84\x41\x48\xac\
\xfa\xdc\x5e\x7b\x37\x77\x6f\x7f\x27\xff\xe9\x0d\x7f\xcb\x74\xe5\
\x36\x06\xea\xa5\xfe\x83\xbb\xc7\x94\x41\x10\x10\xcb\x90\xd6\x1a\
\xbc\xeb\xe0\xcf\xf3\xd0\xc4\xe3\x1c\x9a\x38\xca\xdd\x07\x1f\xc6\
\x0f\x9d\x5e\x7b\xd9\xd3\x91\xc3\x30\x24\x8c\x23\xde\x3c\xf3\x41\
\x86\xd2\xdb\x79\xd3\x0d\xef\xe2\xc8\xe8\x0f\xe1\xd2\xc2\xdf\x8c\
\xa3\xc7\xc2\xbe\x1f\x90\xc8\x88\x89\x81\xdd\xfc\xc8\xde\x7f\xcf\
\xbf\x3b\xfa\xdf\x59\x9e\xd7\xd9\x31\x39\x43\x94\xfa\x04\xfe\x55\
\x55\xae\xcb\xc2\x1e\xba\xa1\x32\x63\x1c\xe5\x8d\x13\xff\x86\xbd\
\x85\x87\xf8\xc5\x37\x7e\x92\xd8\xb0\xfb\x9a\xf3\xa6\x2a\x27\x7d\
\xcf\x27\x25\xc6\x0f\x42\xde\x77\xf0\x0f\xb9\x77\xe7\x3b\xf9\xe0\
\xad\x7f\x40\xa9\x96\x23\x96\x5d\x1a\xbb\xaa\x26\xfa\x04\x51\x48\
\xa2\x7a\xdc\x5b\xfa\x59\xf6\x0e\xde\x8a\xb2\x7f\xe4\x30\x23\x95\
\xb1\x2e\x1d\x78\x2e\xfe\x26\x27\xf6\xde\x88\x71\x1c\xb0\xb7\x7e\
\x84\xaf\x5c\xfc\x73\x2e\x5f\x59\xe4\xd0\xb6\xdb\x50\x64\x77\xd9\
\xd0\xe3\xd5\x3e\x0b\x87\x21\xbb\xaa\x37\x71\x7e\xfd\x15\x8e\xcd\
\x3d\xc9\x1b\xa6\xff\x35\xba\x92\x21\x0c\xbb\xf6\xb3\x5e\xdf\x3d\
\x12\xc9\xe9\x45\x8a\x66\x95\x0b\xce\xb3\x5c\x57\xb8\x9f\xd1\xca\
\x24\x06\x19\x3c\xcf\xed\xaa\x7c\x69\xda\xd7\x6e\xd3\x24\x25\xa3\
\x64\x49\xf4\x16\x4f\x7c\xfb\x49\x0e\x8d\xdc\x41\x2d\xb3\x0d\x3f\
\xf0\xfa\x71\xf7\x58\xd8\x73\x5d\x34\x55\xc5\x6a\xc2\x4b\x8b\x4f\
\x52\x54\x87\x19\xa9\x0e\x41\x2a\xf0\x3d\x1f\xdf\xef\x0a\xeb\x41\
\x18\xe0\x79\x2e\x42\x11\x5c\x3c\x63\x31\x34\x6a\xe0\x86\x1d\x0e\
\x8e\xdf\x44\xc5\x1c\xee\x8a\xf4\x5b\x58\xd8\x75\x5d\xc2\x30\xa0\
\xdd\xf6\xf8\xf1\x87\xde\xcd\x4b\xcb\x5f\x41\xf1\x43\x1f\x3f\xec\
\x66\x1c\xc2\x4d\x1f\xdd\x56\x5d\x18\x54\xfc\xd8\x61\xa8\x30\xc1\
\x40\x7e\x80\xa6\xdd\x04\xa1\x74\x99\x73\x13\x71\xba\xac\x1b\x90\
\xc6\x29\x4e\xd8\x61\x20\x37\x48\xd1\xac\xd2\x8e\x56\x51\x95\xab\
\xbe\xc2\x28\x8a\xfa\xc9\x84\xee\xec\x4d\x11\x08\x94\x24\x4b\xa2\
\xdb\xb8\x5e\x80\x17\xba\x84\xc1\xb5\x7d\xf7\x66\x8c\x50\x54\x34\
\x91\x65\x64\x60\x94\x75\x6b\x0d\x2f\xb2\x49\xe2\x84\x30\xb8\xba\
\x8c\x09\xc2\x00\xcf\xf7\x51\x84\x4a\xbe\xa0\x33\x53\x3f\x84\x15\
\x36\x50\x31\x00\x05\x7f\x73\x06\x02\x44\x61\x57\x17\x46\x42\xa9\
\x6c\x92\x04\x1a\x9a\xa2\x23\x53\x08\x22\x97\x24\x4e\xfa\x62\x7a\
\x6f\x4c\xa2\x28\xc2\xd4\x35\xae\x2c\x2f\x51\xcc\x54\xd1\x3e\x77\
\xf1\xe3\x78\xda\x3a\x41\xe0\xe3\xba\x29\x19\x99\x5e\x75\x26\xc4\
\x11\x65\x25\xcb\x17\x2e\xff\x2e\x3f\x7d\xf8\xe3\x3c\xb9\xf8\xdf\
\x99\xeb\x9c\xc7\x4f\xec\x2e\xcb\x6e\x2e\x21\x7a\x59\x16\x19\x43\
\x98\x9f\xc3\xee\x24\x1c\xd9\x73\x23\x7f\x73\xea\xb7\xa8\x0c\x16\
\x88\xa2\x18\xd7\x0d\x89\xe3\xb8\x6f\x91\x00\xc8\x18\x0a\x4f\x5c\
\xf8\x5f\x7c\xe4\xde\xff\xc2\x5f\x9c\xfa\x45\xe2\x58\xa2\xa2\xe1\
\xba\x5d\x59\x53\xd7\xf5\x6e\xf0\x91\x24\x4d\x25\x0b\xed\x0b\x2c\
\x8a\x55\x4a\x25\x03\x6d\x78\x8e\x85\xb6\x4e\x1c\x77\x6d\x19\x99\
\x9c\xd9\x8d\xdb\xf5\x70\x2d\x17\x43\x6a\x5c\x09\x5f\x62\xc6\xbb\
\x81\x96\xf6\x1a\xbf\xf9\x8d\x0f\xa0\xeb\x1a\xb6\xd3\x46\x57\xf5\
\xbe\xb0\x1e\xba\x01\x69\x51\x32\x35\x35\xc0\x57\x4f\x7e\x81\x99\
\xc9\x51\x7e\xfd\xf9\x1f\x27\x10\x36\x51\x1c\xe1\xba\x11\xaa\xae\
\x5c\xbd\x2b\xe3\x88\x72\xa1\xcc\xef\x7c\xf3\x71\x3e\x72\xf4\xd7\
\xd1\x6c\x7d\x99\xf5\xc5\x16\x2b\xaf\x37\x89\x83\x94\x5c\xd4\xd5\
\x40\x9b\xcd\x26\x7e\x12\x91\x77\x73\x84\xea\x3a\x7f\xf0\xed\x8f\
\x72\xc3\xcc\x8d\xb4\x5a\x57\x38\xff\x4f\x01\xad\x96\xdb\xd7\x85\
\xdb\xed\x36\x71\x1c\x43\x4e\x20\x45\x95\xcf\x5f\xfe\x6f\x3c\x9c\
\xf9\x57\x34\xd3\x79\x16\x8f\xe9\xac\xce\x36\x89\x9c\xee\xba\x4a\
\xd3\xb4\xbe\xff\x26\x17\xe5\x58\x8e\x5e\xe5\xef\xce\xfe\x0e\x8e\
\x6c\xd2\xf6\x9a\x2c\x5d\xb4\x68\x6e\x61\x61\xcf\xf3\x48\xfc\x98\
\x70\x50\xe0\xcb\x0c\x5f\x5f\xf8\x53\x0e\x6b\x47\x49\x0b\x1d\xe6\
\x2f\x84\x2c\x9c\x6e\xe1\xb5\x63\x34\xa3\xbb\x32\xb0\x2c\xab\xcb\
\xc2\x71\x81\x40\x59\xe3\x45\xfb\x2f\x48\x8c\x80\x85\x53\xeb\xbc\
\xfe\xb4\x4b\xa7\xe9\x7e\x17\x0b\x77\xe8\x14\x52\x62\x4a\x1c\x6b\
\x7e\x86\xf9\x78\x27\x4d\xed\x32\xeb\x97\x5c\x2e\x7c\xa3\x2b\x30\
\x65\x32\x19\x92\x24\x61\x63\xa3\x41\x47\x26\x04\xa1\x8e\xa5\xbc\
\xce\xc7\x9f\xfe\x08\x62\xfb\xc4\xb4\x4c\x3c\xc8\x54\xba\x9d\xe6\
\x72\x39\xa2\x28\xb2\xa4\x94\x45\xdd\x30\x30\x27\x13\x8a\x83\x2a\
\xf6\x52\x44\x63\xd6\x23\x6a\xa8\x68\x86\x82\x9a\xe9\x2f\x90\xe7\
\x34\x4d\x1b\x30\x4d\xb3\x18\xeb\x11\x43\x87\x0d\xe2\x46\xc2\xc6\
\x15\x07\x7f\x59\x25\xf1\x25\x66\x59\x25\x4d\x13\x00\x0b\x68\x1a\
\x86\x31\xa9\x69\x1a\xe6\x9e\x84\x72\x51\xa7\x31\xe7\x61\xcf\x27\
\xf8\x1b\xa0\xe7\x15\x14\xbd\x6b\x6f\x2b\x16\x8b\x96\xe7\x79\xc5\
\x8c\x61\xe2\x19\x01\x33\x77\xe6\x68\xcf\x85\x34\xe7\x5d\xfc\x55\
\x95\xd8\x97\x64\x4a\x57\x59\x78\x6e\x6e\xee\xd9\x89\x89\x89\xa3\
\x49\x94\xa2\x8c\x85\xe4\x73\x2a\x8d\xd7\x03\xac\x85\x10\x81\x8a\
\x66\x8a\x4d\x5b\x9e\x4a\x10\x04\x94\xcb\x65\x2b\x0a\xc2\xa2\x65\
\xc4\xec\xbf\xa7\x40\x73\x36\xc0\x5a\x0a\x70\x16\x24\x69\x04\x66\
\x45\x21\x4d\xba\xba\xb0\xe7\x79\xeb\x86\x61\xd4\x51\x75\x26\xef\
\xd6\x88\xec\x14\x77\x2d\x46\x3b\x72\xcb\x4d\x34\x5b\x4d\x0a\x85\
\x02\xed\x76\x9b\x20\x08\x58\x58\x58\xe0\xb1\xc7\x1e\x23\x9f\xcf\
\x93\x84\x29\x2f\xbd\xfc\x32\x5a\x4d\xc5\xcd\x39\x28\xaa\x82\xe3\
\x38\xac\xae\xae\x52\xad\x56\x39\x7b\xf6\x2c\x33\x33\x33\xdc\x7f\
\xff\xfd\x0c\x0f\x0d\xf3\xe9\xff\xf9\x19\x76\x4d\x4d\xb0\xe0\x2e\
\xb0\xed\xe0\x28\x1b\x8d\x0d\x6c\xdb\xa6\x52\xa9\xf0\xca\x2b\xaf\
\x20\xa5\xe4\x7d\xef\x7b\x1f\xf5\x7a\x9d\x27\xbe\xf2\x35\x74\x0c\
\x32\xc6\x22\xe3\xf7\x8c\xb1\xba\xb6\x82\xd1\xb5\xa9\xf5\xfd\xda\
\x77\xde\x79\x27\x37\x1e\x3e\x8c\xa1\xe9\x7c\xfb\x5b\x2f\x33\x5c\
\x0a\x68\x8f\xb7\xd1\xa6\x55\xaa\xd5\x2a\x67\xce\x9c\x21\x93\xc9\
\x6c\x5a\x36\x34\xee\xbb\xef\x3e\x1e\x7d\xf3\xa3\xbc\xf0\x8d\x17\
\x79\xe9\xa5\x97\x98\xda\x97\xa1\x7c\x7b\x89\xd9\xd9\x59\x2c\xcb\
\xa2\x5c\x2e\xb3\xbe\xbe\xce\xd2\xd2\x12\xd5\x6a\x95\x77\xbf\xfb\
\xdd\x34\x36\x36\x78\xe6\xa9\xe7\xd9\x57\x1f\xc0\xcf\x7a\x34\x06\
\x1a\x0c\x0e\x0e\x12\x86\x21\x2b\x2b\x2b\xb4\xdb\x6d\x3c\xcf\xe3\
\xc3\x1f\xfe\x30\x99\x4c\x86\x57\x5f\x39\x4d\x4b\x69\xe2\x97\x3d\
\xb4\xa9\xe9\x29\xdc\x53\x2e\x9a\xa6\x31\x33\x33\xc3\x99\x33\x67\
\x48\xd3\x94\x5d\xbb\x76\xf1\xc8\x23\x8f\xf0\xe2\x8b\x2f\xb2\xde\
\x5c\xa3\x56\xab\x21\xa5\xe4\xfc\xf9\xf3\xd4\xeb\x75\x3c\xcf\x63\
\x60\x60\xa0\x6f\x9b\x7d\xec\xb1\xc7\xf0\x7d\x9f\x13\x27\x4f\x30\
\x33\x33\x43\xa1\x94\xa7\x5c\x2e\x53\x1f\xac\x73\xfa\xf4\x69\x6e\
\xba\xe9\x26\x4e\x9c\x38\x41\x10\x04\xdc\x7f\xff\xfd\x4c\x4d\x4d\
\xd1\x6e\xb7\xd9\xd8\xd8\xc0\x0b\x1d\xf6\xee\xdb\x0b\x02\x5c\xd7\
\x65\xf7\xee\xdd\xcc\xcf\xcf\x03\xb0\x6d\xdb\x36\xde\xf9\x8e\x77\
\xe0\x79\x1e\x17\x2e\x5e\x40\xd7\xab\xec\x1f\xb8\x8e\x6f\x7f\xfb\
\xdb\xd4\x6a\x35\xea\xf5\x3a\xba\xae\xf7\xb3\xe3\xbb\x77\xef\xe6\
\xbe\x7b\xef\xe3\xe4\x89\x93\xd4\x06\xab\x98\xa6\x49\xb9\x5c\xa6\
\xd5\x6a\xf5\x2f\xa4\x65\x59\x00\x94\xcb\x65\xde\xf6\xb6\xb7\xf1\
\xc2\x0b\x2f\x70\xfe\xc2\x05\xf6\xee\xdd\x8b\x65\x59\x5c\xbc\x78\
\x91\x81\x81\x01\x4a\xa5\x12\xa6\x69\xf2\xda\x6b\xaf\x21\xa5\xe4\
\xc8\x91\x23\x4c\x4c\x4c\xf4\x66\x24\xaf\xbd\xf6\x5a\xd7\x23\xdd\
\x63\xbf\x28\x8a\xc8\x66\xb3\x58\x96\x65\x65\x32\x99\xe2\xb6\x6d\
\xdb\x58\x5a\x5a\x42\xd3\xb4\x1e\xcb\xf6\x59\xb4\xb7\x8f\xeb\xba\
\x73\xd9\x6c\x76\x60\x64\x64\xa4\xd8\x4b\x3e\xf6\xb6\xa7\x69\xda\
\xff\xbe\xe9\xdf\xb3\xc2\x30\x6c\xe6\xf3\xf9\xc9\xf1\xf1\x71\x16\
\x17\x17\x7b\x0b\xf7\xee\x33\x74\x93\xa3\x7b\x8c\xab\x69\x9a\xe5\
\xba\x6e\x71\x66\x66\xa6\xef\x70\x48\xd3\xb4\xcf\xca\x3d\x96\x4e\
\x92\x84\x6a\xb5\xda\x67\xe1\x9e\x51\x60\x2b\xe3\xf7\x72\x97\xbd\
\x9c\x64\xa3\xd1\x60\x68\x68\xc8\xca\xe5\x72\xc5\xad\x2b\x84\xad\
\xb1\xf7\x58\x5e\x51\x14\xda\xed\xf6\x7a\xb5\x5a\xad\xd7\x6a\x35\
\x9a\xcd\x66\x9f\x99\xb5\x6c\x26\x1b\xf4\x98\x6f\xf3\xf9\x27\x6d\
\xdb\x6e\xd4\xeb\x75\xa3\xd1\x68\x90\xcd\xe6\x20\x05\x5d\xdd\xc2\
\xc1\x5b\x18\xdd\xb6\x6d\x3f\x93\xc9\x48\xc7\x71\x10\x42\x90\x35\
\x73\x7d\xdf\x72\x9f\x27\xaf\xb2\xb0\xf0\x3c\x8f\x4c\x26\x13\xae\
\xaf\xaf\xcb\x6c\x26\x4b\x9a\x82\x40\xa2\x6b\xc6\x35\xcc\x2a\x13\
\x6a\x82\x26\x00\x00\x1f\x45\x49\x44\x41\x54\x84\x60\x75\x75\xd5\
\xca\xe5\x72\x99\x76\xbb\xad\x17\x8b\xc5\x2e\x93\x7f\x9f\x7e\x7b\
\x03\x93\xa6\x69\x1b\xe8\x14\x8b\xc5\x10\x49\xbf\xfd\x77\xf3\xb0\
\xaa\xaa\x04\x41\x10\x85\x61\xa8\xab\xaa\x6a\x02\x52\x53\x74\x34\
\x45\xbf\xf6\x1c\x05\x08\x04\xb6\x6d\x8b\x24\x49\x56\x72\xb9\x5c\
\xd9\xb6\xed\xd4\xd0\xcd\xbe\x9c\xaa\x8d\xec\x2d\x99\xf1\x86\x81\
\x48\x05\xa8\x5d\x9d\xe0\xf2\xe5\xcb\xcd\xb1\xb1\xb1\xed\x9e\xe7\
\x93\x16\x3d\xb2\x63\x11\x69\x92\x82\xab\x93\xb4\x32\x24\x2d\xad\
\xef\x91\x76\x1c\xa7\x30\x31\x31\xa1\xaa\xaa\x4a\xac\x24\x24\xdb\
\xda\x94\xcb\x2a\x49\x24\x51\x82\x0c\x51\x43\x43\x3a\x3a\x02\x81\
\xe3\x38\xba\xe7\x79\xa5\x7a\xbd\x6e\xa8\xaa\x4a\x33\xdb\x60\x62\
\x87\x4e\xe4\x27\xc8\x4e\x96\xa4\xad\x21\x5d\xb5\x9f\xf4\x74\x5d\
\x37\xc9\x64\x32\xf6\xf6\xc9\xc9\x01\x37\x0e\xc8\xef\x75\x50\x84\
\x20\x75\x55\xd2\x8e\x49\xd2\xd4\x21\xa5\xeb\x91\x36\x0c\xce\x9c\
\x39\xa3\x4f\x4c\x4c\x94\x64\x22\x69\xe6\x36\x18\x99\xd6\x88\xfc\
\x14\x6c\x93\xa4\xad\x93\x74\xba\x71\x1b\x86\xce\xca\xca\x0a\x43\
\x43\x43\xcd\x5c\x36\x67\xac\x07\x1d\x86\x0f\x74\x0d\xec\xa9\xab\
\x20\x2d\x93\x68\xa3\x1b\xb3\xa2\x77\x97\x53\xed\x76\x7b\x6e\x68\
\x70\x70\xbf\x17\x46\x88\x6d\x36\xc5\xaa\x42\x1c\x26\x68\xe5\x37\
\xae\xa2\x48\x9d\xd6\xd7\xab\x44\x2b\x46\xaf\xf6\x22\xaf\x69\x1a\
\x71\x9a\x20\x47\x6d\xb4\xc3\x2d\xf6\xd5\x6e\x66\xc5\xbe\x8c\xed\
\x36\x50\xbc\x0c\x8d\x2f\x0c\xf6\xec\x17\x5a\x1c\xc7\x4a\x18\x86\
\x58\x9a\x83\xb9\x63\x89\xfd\x3b\x0f\x32\x59\x9d\xe6\xe9\x73\xff\
\x40\x25\x57\x63\xfd\xab\x39\x82\x05\x13\xd7\x75\x15\x45\x51\xb4\
\xde\xed\x14\xef\x6c\x52\xbc\x39\xc3\x74\xe9\x00\xe7\x57\x4e\x92\
\xa6\x92\xe8\x74\x15\xfb\x44\xbe\x97\x20\x50\xa3\x28\xd2\x7d\x37\
\x60\x21\x5d\xe1\xe0\x4d\x0e\x25\x31\x8c\x24\xc0\xf6\x56\x89\x2c\
\x81\x73\xac\x4a\xb0\xa0\xf7\x6e\xd1\x9c\xaa\xa9\xc8\x50\x62\x4f\
\xad\x31\x70\x7b\x99\xaa\x36\xc1\xba\x33\x8f\x65\x3b\x18\xd6\x00\
\x8d\x7f\x2e\x03\xa2\x97\x2e\xd3\x55\x29\x38\x1f\x2e\xb2\xff\x68\
\x19\x2d\xc9\x92\x33\xb2\xcc\x6f\xbc\x4e\x45\x14\xe8\x3c\x3b\x40\
\xb8\x64\xf4\xd2\x68\x75\x10\xb4\x02\x8f\x1d\xb7\xfa\x50\x6c\x73\
\xd3\xf0\x3d\x28\x6f\xde\xf1\xb3\x8c\x0c\x4e\x11\xa7\x61\xdf\x23\
\x0d\x48\xdf\xf7\xf1\x3d\x9f\x98\x80\x5d\xc6\x03\xfc\xab\x1b\x3e\
\xc6\x8f\x5d\xf7\x9b\x5c\x3f\xf8\x28\xa3\x63\xa3\x7d\xed\x36\xde\
\x74\x9f\x06\x41\x40\x9c\x44\xac\xaf\x06\xfc\xe8\xc1\x9f\x43\xb6\
\x07\xf9\xa9\x23\xbf\xce\x0f\x1d\x79\x1f\x41\xe2\x5f\xe3\x50\xf5\
\x3c\xaf\xef\xfc\x7c\xf3\xcc\x87\xd8\xa1\x3f\xc8\xdd\x3b\x7f\x90\
\x37\xed\xfc\x20\xa1\x66\xe3\x79\x57\xfb\xee\x89\x56\x51\x1a\x31\
\x9c\x9f\xe2\xa1\x89\x0f\xf1\x73\x77\xfd\x19\xf6\xd2\x20\x37\xee\
\xb9\x95\x20\xb5\xaf\xd5\x85\xfd\x00\xd7\x75\xd1\x74\xc1\x8e\xdc\
\xed\x7c\xf8\xc8\x1f\x70\xa0\xfa\x46\x7e\xe9\xbe\x4f\x11\x67\xba\
\x6d\xb7\x78\xa4\xa5\xeb\x7a\x24\xa4\x78\x81\xcf\xfb\x0e\xfc\x01\
\xf7\xed\xf8\x51\x7e\xe6\x96\x4f\x32\x36\xbe\x8d\x30\xbd\xd6\x23\
\xed\xf9\x1e\x7e\x10\x10\x0b\x97\xbb\x73\x1f\xe3\xc0\xf0\x9d\x28\
\x35\x73\x9c\xa1\xfc\x58\x9f\x67\x7b\xce\xa5\xfe\x49\xc6\x01\xd7\
\x0d\x1f\xe1\x89\x4b\x7f\xc1\x97\x5f\x78\x8a\xbd\x43\x37\xf4\x6c\
\x1a\xfd\x37\x5f\xdf\x23\x1d\xfa\xd4\x8c\x49\xe6\xdb\x17\x78\x75\
\xe5\x05\x6e\x99\x78\x00\x3f\xb6\xfb\xfa\xeb\x56\x8f\xb4\xe7\x79\
\x64\xf5\x22\xe5\x4c\x95\x67\x16\x3e\xcd\x91\x91\x47\x29\x64\x4a\
\x68\xa9\x89\xe7\x79\xb8\xae\xbb\xc5\xc7\xec\x11\x47\x11\xe5\xec\
\x00\x4a\xa1\xcd\x17\x5f\xfc\x12\x3b\x6b\x07\x29\x68\x15\xfc\x20\
\xc0\xf3\xbd\xab\x16\x5f\xdf\xbf\xea\x91\x6e\xe8\x7c\xe6\xe5\x3f\
\x24\x9f\x8e\x33\x33\x3e\x09\xa9\x8a\xe7\x5f\xf5\x48\x77\xe3\xea\
\x7a\xa4\x2f\x9c\xb6\xa8\x0d\x2b\xac\x3b\x8b\xec\x9f\x38\x44\x4e\
\x2d\x6f\xb2\xb2\x77\xcd\x85\x8f\xc2\x90\x76\xdb\xe3\xa7\x1e\xfd\
\x71\xbe\x7a\xe9\x93\x28\x61\x1c\x63\x79\x9d\x4d\x16\xbe\xea\x76\
\xef\x79\x89\x91\x02\x27\xb4\x19\x2e\x6c\x67\xa8\x32\x44\xcb\x6d\
\xf5\xf5\xd5\xad\xbc\x1a\x86\x21\x71\x94\x12\xa4\x0e\xb5\xdc\x08\
\x05\x7d\x80\x8e\xdf\x24\x6f\x94\xfb\x09\x87\x9e\x1a\xd7\x9b\xb1\
\xa9\x4c\x10\x28\x68\x69\x9e\x58\xb3\xf0\xbc\x08\x3f\xf2\x09\x43\
\xbf\xdf\x77\x92\x24\x04\xa1\x4f\x12\xa7\xa4\x52\x22\x63\x9d\xb1\
\xda\x04\xab\xad\x35\xc2\x34\xd8\xe4\xd5\x2d\x1e\xe9\x4d\x83\x80\
\xa2\xa8\x64\xb3\x1a\xe3\xe5\x19\x2c\xbf\x89\x21\xf2\x48\xb9\xe9\
\xcf\x0e\xba\x49\x8d\x5e\xa6\x47\x4a\x28\x57\x4c\xe2\x40\x43\x57\
\x0c\x04\x0a\x61\xe2\x13\x47\xc9\x35\x1e\xe9\x1e\x17\x1b\xba\xc6\
\xdc\xd2\x12\x45\xa3\x8a\x72\xac\xf9\x69\x7c\xd1\xda\xf4\x39\x7b\
\xdf\x57\x17\xfe\xc6\xca\xa7\xd9\x6e\xdc\xce\xf8\x4e\x85\x13\xcd\
\x7f\xc2\x89\xdb\xb8\x6e\x37\x63\xd2\xbb\x85\x5d\xd7\x25\x08\x43\
\x0a\xb5\x90\x97\x5f\xff\x36\x8f\x3f\xfc\x4b\x7c\xfc\xf9\xf7\xf1\
\xed\xc5\x27\x49\x93\xf4\x9a\x6c\x4c\xaf\x6f\xd5\x48\xf9\xfa\x85\
\xcf\xf2\xf8\xbd\xbf\xc2\xe7\x4e\xfd\x01\x67\xec\xaf\xa1\xa2\xe2\
\xba\xd7\xce\x6e\xd7\xf5\x88\xe2\x88\x25\xeb\x12\xed\x8e\x4d\x6d\
\xd8\x40\xd6\x2f\xb2\x61\xaf\x10\x27\xdd\x4c\x4f\x3f\x9d\xe5\xfb\
\xb8\x8e\x8b\x94\xd0\xe0\x2c\x7b\x86\x6e\xc0\xcb\x5f\xe4\x77\x9e\
\xf9\x18\xba\xa6\x5f\x33\xbb\xbb\x71\x3b\x24\x49\xca\xd4\xd4\x00\
\x4f\x9d\xfe\x67\xc6\x6b\x93\xfc\xc6\x73\xef\x07\x91\xf4\xfb\xee\
\x5d\x78\xd7\xeb\xde\xc2\x95\x7c\x99\xdf\x78\xe6\x83\x7c\xf0\xa6\
\xdf\x41\x6b\x27\x8b\xb4\xa3\x15\x96\xce\x35\xf0\x3b\x29\x71\x9a\
\x47\x08\x41\xb3\xd9\xa4\xdd\xb1\x28\xb8\x39\x84\xa9\xf2\x47\x2f\
\x3f\xce\x1b\xf6\xfe\x10\xab\xf1\x79\xd6\xbf\xd3\xe5\xdf\x5e\x32\
\xc1\xb6\x6d\x5a\xad\x56\x37\x87\x2f\x2a\x3c\xbb\xf6\x67\xa8\xaf\
\x25\xb4\xc3\x65\x5a\x97\x57\x59\x38\xd3\xc0\x6f\xa7\xdf\xc3\xc2\
\xf9\x28\xc7\x42\xf8\x32\x9f\x3f\xf3\x87\x58\x41\x93\x96\x7f\x9e\
\xd5\x79\x9b\x56\xd3\x05\x21\xb7\xe8\xc2\x31\x81\x96\x12\x93\xe7\
\x2b\xaf\xff\x0f\xee\xd1\xde\x02\x19\x87\xb3\xf3\x27\x58\x39\xdb\
\xd5\x85\x35\x43\x05\xa0\xdd\x6e\x63\x37\x6d\x32\x51\x81\x66\xf2\
\x3a\x5f\xb8\xf4\xbb\xb8\x71\x93\x8b\xce\x4b\x6c\x2c\xf8\x6c\x2c\
\x37\x31\xf2\x5d\x0d\xc4\xb6\x6d\xac\x46\x07\xbb\xd8\xd5\x85\x9f\
\x5f\xfb\x4b\x2e\xd9\x7b\x58\x8b\xcf\x11\x58\x3e\xe7\x9f\x5e\x43\
\xd1\xe9\xeb\xc2\xcd\x66\x93\x4e\x9a\x10\x44\x06\xe6\xc0\x02\xbf\
\xfc\xa5\x9f\x40\xec\xd8\xb9\x43\xc6\xb6\xc4\x2c\x68\x24\x69\x42\
\x2e\x97\x63\x7d\x7d\xfd\xdc\xe0\xe0\xe0\x6e\x5d\xd3\xd1\xb6\x85\
\xe4\xea\x82\x8d\xb3\x3e\x76\x23\x40\x7a\x3a\xc8\xae\xde\xbb\x99\
\x91\x5e\xad\x54\x2a\x45\x20\x8b\x2e\xc9\x5f\x97\x12\x2d\xa7\xb4\
\x96\x1c\x12\x57\x23\xf1\x40\xcf\x0a\x52\x99\xf6\x3d\xd2\xa6\x69\
\x16\x72\xb9\x1c\xca\x8c\x4f\x5e\x55\x59\x39\xef\x10\x75\x20\xf1\
\xe8\x56\x11\xe9\xdd\x75\x5d\x3e\x9f\x5f\x6c\xb5\x5a\xa5\x91\xe1\
\x91\x82\x15\x5a\xcc\x3c\x98\x63\xf9\x84\x47\x6b\xc9\x45\x09\x0c\
\x02\x27\xc1\xc8\x77\x39\xfb\x1a\x16\x0e\x12\xf4\xbd\x31\x85\xac\
\xca\xca\x69\x9b\xd0\x92\xa4\x9e\x82\x4c\x41\xcb\x82\xd8\x4c\xc7\
\x55\x2a\x15\x4b\xa6\xb2\xd8\xc1\x67\xef\xbd\x79\x56\x4e\xba\xd8\
\xab\x21\xa9\xaf\x10\x3a\x29\x66\x49\x25\x89\x93\x9e\xe9\xf4\x58\
\x3e\x9f\x3f\x62\x98\x59\xc6\xee\x56\xb0\x97\x23\xda\x73\x21\xda\
\xa1\x03\x87\x7a\x86\x6e\xe6\xe6\xe6\x48\xd3\x94\x85\x85\x05\x1e\
\x7e\xf8\x61\x0e\x1c\x38\x40\x6b\xa3\xcd\x73\xcf\x7e\x83\xc9\x5d\
\x19\x36\x1a\x1b\x14\x8b\x05\x36\x36\x36\x68\x34\x1a\x54\xab\x55\
\xce\x9d\x3b\x87\xa6\x69\xbc\xef\x7d\xef\xa3\x90\x2f\xf0\x99\x4f\
\x7d\x96\xda\xb6\x2a\x57\xd2\x39\x86\x86\x87\x58\x5a\x5a\xc2\x34\
\xbb\x0e\xfa\x8b\x17\x2f\x12\x86\x21\x8f\x3f\xfe\x38\xf5\x7a\x9d\
\x2f\x7e\xfe\x1f\xbb\x25\x57\x43\xcb\x8c\x1c\x1a\x61\x75\x75\x95\
\x6c\x2e\xdb\xd7\xa6\x3d\xcf\xe3\xd6\x5b\x6f\xe5\x91\x47\x1e\xc1\
\xd0\x0d\x9e\x7d\xea\x79\x0a\x03\x0d\xe2\x52\x44\x14\xc7\x0c\x0e\
\xd6\xb9\x70\xe1\x02\xaa\xaa\x76\x4d\x42\x8a\xc2\xbd\xf7\xde\xcb\
\xfd\xf7\xdf\xcf\xd3\x5f\x7f\x86\xd3\xa7\xce\x50\x9b\xd4\x31\x32\
\x26\xcd\x46\x03\xdb\xb5\xc9\x66\xb2\x74\x3a\x1d\x56\x56\x56\xa8\
\xd5\x6a\xbc\xef\xfd\xef\x63\x7e\x6e\x9e\xe7\x9e\x7a\x9e\xeb\x46\
\xf3\x44\x83\x01\x8d\x66\x93\x5a\xad\x4a\x14\x45\x2c\x2f\x2f\xf7\
\x2d\x29\x1f\xfa\xd0\x87\xa8\xd7\x6b\x7c\xeb\xf9\x97\x58\xd5\x57\
\x88\xb6\x07\x68\x3b\x76\xec\xe8\x83\x75\xa1\x50\xe0\xf4\xe9\xd3\
\x24\x49\xc2\xde\xbd\x7b\xb9\xfb\xee\xbb\x09\x82\x80\x95\xf5\x65\
\x06\x07\x07\xd9\xd8\xd8\xe8\xdf\xba\x96\x65\x5d\xe3\xa9\x7e\xe0\
\x81\x07\x50\x55\x95\xaf\x3f\xf5\x75\x6e\xbb\xf5\x36\xf4\xe3\x1a\
\xa3\xa3\xa3\xfd\x5b\xf6\xe6\x9b\x6f\xe6\xd2\xa5\x4b\x24\x49\xc2\
\xad\xb7\xde\x4a\xa1\xd0\xbd\x10\xcb\xcb\xcb\xf8\x91\xcb\xee\x3d\
\xbb\x31\x33\x26\x8e\xe3\x70\xd3\x4d\x37\xf1\xb9\xcf\x7d\x0e\xdf\
\xf7\xd9\xb1\x63\x07\x87\x0e\x1d\x62\xdf\xbe\x7d\xb4\xda\x2d\xd6\
\xd7\xd7\x29\x16\x8b\x3c\xf7\xdc\x73\x8c\x8d\x8d\xd1\xe9\x74\xd0\
\x75\x9d\x2b\x57\xae\xa0\x28\x0a\xd3\xd3\xd3\x3c\xfa\xe8\xa3\x34\
\x9b\x4d\x6c\xcf\xa2\x56\xab\x01\xb0\x52\xc8\x73\xee\xdc\x39\x66\
\x66\x66\xb8\x70\xe1\x02\xab\xab\xab\x14\x0a\x05\xee\xb8\xfd\x0e\
\x4e\x16\x4e\x72\xf6\xfc\x59\x0e\x1d\x3c\xc4\xfa\xfa\x3a\x97\x2e\
\x5d\xa2\x5e\xaf\x53\xad\x56\xb9\x78\xf1\x22\x27\x4e\x9c\x00\xd8\
\x52\xb7\xdc\x4d\xae\xbe\xf4\xd2\x4b\x57\x3d\xd2\x3d\x1d\xb4\xa7\
\x0b\xcf\xcc\xcc\xec\xce\xe7\xf3\x7d\xee\xdb\xaa\xdb\x6e\xf5\x09\
\x5b\x96\xb5\x3a\x38\x38\x58\xcc\x64\x32\xd9\x5e\x8e\x6d\x6b\x6d\
\xee\xd6\xbe\x93\x24\x89\xc2\x30\x0c\xaa\xd5\x6a\xa1\x52\xa9\xf4\
\x8d\xe1\x5b\x75\xe1\xad\x35\xbe\x42\x88\xc5\x4e\xa7\x53\xda\xb5\
\x6b\x57\xa1\xd5\x6a\x5d\xc3\xcb\x5b\x75\xe7\x34\x4d\x19\x18\x18\
\xe0\xd4\xa9\x53\xcf\x1e\x38\x70\xe0\xa8\xa2\x28\xdd\x67\xf2\x16\
\x9f\xf3\x56\x8e\xd7\x34\x8d\x66\xb3\xc9\xd0\xd0\x90\x95\xc9\x64\
\x8a\xbd\xaa\xa8\xad\xbc\xdf\xd3\xa7\x7b\xe8\xb7\xb6\xb6\x76\x6c\
\x64\x64\xe4\x48\xb9\x5c\xee\xd6\xc4\xf4\x6a\x9a\xd3\x44\x6e\x28\
\xaa\x10\x42\x88\x1e\x23\xca\x34\x4d\xd7\x3d\xcf\x2b\xb6\xdb\xed\
\x54\x11\x8a\x90\xa9\xb8\xea\x1d\xde\xc2\x9f\x42\x08\x91\x24\xc9\
\x7c\xa7\xd3\xa9\xd8\xb6\x9d\xef\x5a\x8c\x95\x6e\x9d\xad\x72\x0d\
\x33\xf7\x4e\xc4\x0f\xc3\xd0\x75\x5d\xb7\xe6\x38\x8e\x54\x85\x4a\
\x9a\xf2\x7d\xfb\xde\xac\x17\x5e\x54\x14\x45\xac\xac\xac\x44\x02\
\x21\xb7\xb2\xad\x10\xb2\x3f\xf8\xaa\xaa\x62\xdb\xb6\x04\x56\x1a\
\x8d\xc6\x46\x92\x24\xc1\xbf\xd4\xf7\xe6\x52\xca\x71\x1c\x27\xef\
\xfb\x7e\xac\x20\x64\x2a\xaf\xd6\x0b\xf7\xda\xab\xaa\x0a\x20\xa2\
\x28\x0a\xe3\x38\x76\xd3\x34\x6d\x6f\x6c\x6c\x24\x48\x01\x12\x54\
\x4d\xd5\xb4\xf1\xbd\x83\xb5\xa4\xa9\x23\xd4\x6e\x6d\x6c\x77\x6d\
\xe4\x8f\x0e\x0e\x0e\xc6\x49\x9c\x10\x16\x1d\x72\xdb\x03\x68\x65\
\x48\xd6\x4d\x92\x8e\xd6\x7d\xd9\x1a\x12\x40\x49\x92\xc4\xa9\xd5\
\x6a\x59\x4d\xd3\x32\x31\x09\xd1\x78\x83\x81\x5c\x96\x68\xcd\x20\
\xd9\x30\x91\x91\x82\x62\x76\x5d\xe7\x41\x10\xb8\xc0\xda\xe0\xe0\
\xe0\x80\xa2\x28\xe9\xba\xba\xce\xe4\x75\x2a\xc9\x86\x49\xbc\x62\
\x92\x3a\x3a\x42\x93\x08\xad\x3b\x63\x72\xb9\xdc\x36\x45\x51\x46\
\x2b\x95\x0a\x7e\x1c\x92\xb9\xae\x83\xa1\x2a\xc4\x2d\x83\xb4\xd1\
\xe5\x5b\xa1\x4b\x50\x24\xba\xd6\xbd\x8d\x47\x46\x46\x22\x99\xa4\
\xc9\x9a\xb2\xc1\xf6\xeb\x15\xc2\x55\x83\x68\xd5\x44\xda\x3a\x42\
\x91\x08\xbd\xeb\xd2\x6f\x34\x1a\xb9\xc9\xc9\xc9\x11\x24\xca\x62\
\xd0\x60\xea\x66\x20\x12\x24\x4d\x9d\xa4\x91\x21\xb1\x34\x54\x53\
\x22\x14\x41\xb3\xd9\x5c\x0f\xc3\x70\xdf\xb6\x6d\xdb\xca\x8e\xef\
\x63\xee\xb4\xc9\x55\x14\xc2\x0e\x68\xf5\xb7\xae\xa3\xa5\x19\xda\
\x4f\x55\x89\xd6\x34\x0c\xc3\x40\xd7\xf5\xc1\x72\xb9\x5c\xb0\x3d\
\x0f\x65\x8f\xc5\xe8\x6d\x11\x78\x0a\x9e\xdf\x22\x27\xca\xa4\x6d\
\x93\xd6\x53\x15\x14\x45\xa1\xd1\x68\xa4\x85\x42\xa1\xaa\xeb\x7a\
\xbe\x6d\xb4\xd9\x76\x9f\x4a\x21\x17\x90\x84\x1e\x7a\x9c\x27\x49\
\x13\x9c\xa7\x46\x88\x3b\x0a\xaa\xaa\x56\x2d\xcb\x2a\x17\x8b\xc5\
\x31\x21\x04\xde\x75\xab\x8c\xde\x2e\x49\x5d\x41\xea\xc7\xc8\x58\
\x12\x9f\xaf\xe1\x9c\xc9\xf4\x3c\x37\x51\x18\x86\x64\x74\x93\x0d\
\xdd\x62\xc7\x83\x21\x39\x59\xc5\xf7\x3d\x48\x23\xbc\x86\xc4\x7f\
\xa9\x8a\xbf\xa4\x53\x2c\x16\x10\x42\x8c\x54\x06\x2a\x23\xd2\x95\
\x34\xf7\x2e\xb2\xed\xee\x0c\x8d\x8d\x0d\x0a\x4a\x85\xc0\x0b\xd1\
\xdb\x55\x5a\xdf\x28\x62\x9a\x26\x9a\xa6\x91\xcb\xe5\xc2\xc8\x8f\
\x0c\xbb\x6a\x33\x7e\x6f\x91\xd8\x57\x29\x68\x05\xe6\xd6\x2e\x52\
\x52\x86\xd8\xf8\xc7\x32\x32\x50\x31\x0c\x43\x13\x42\xa4\xa6\x61\
\xd0\x4c\x5c\x26\xef\x04\x4f\x5f\xe3\xd6\xd1\x7b\x51\xde\xb6\xeb\
\xe7\x19\x1f\x9b\x21\xc6\xc7\xf7\xaf\xf5\x31\x7b\x81\x47\xa4\x78\
\xec\xd2\xdf\xc4\x2f\x3c\xf0\xa7\xfc\xc2\x83\x9f\x60\x62\xf0\x20\
\xe5\x09\xf3\xbb\x3d\xd2\xa9\xef\xfb\x04\x49\x40\xa7\x19\xf1\xe1\
\x5b\x7f\x9f\x8f\xde\xf1\xa7\x4c\x8c\xec\xe6\xae\x5b\xee\xc1\x4b\
\xec\x1e\xb9\xa4\x49\x92\x5c\xf5\x48\x13\x71\xef\xc8\x7b\x79\xd7\
\xae\x5f\xe5\xd6\xbd\xf7\x73\x64\xe6\x51\x3c\x63\x03\xdf\x0b\x7a\
\x04\x20\xbb\xb5\xc8\x01\x51\x12\x32\x94\x99\xe6\xd1\xc9\x9f\xe1\
\xe7\x8f\xfe\x05\x71\x73\x82\x9b\xf6\x1f\xc1\xc7\xea\x7b\xbb\xbb\
\x0c\xdf\x95\x35\xd5\x0c\x4c\x67\x6e\xe7\x4f\xde\xf6\x1d\xae\x1b\
\x3d\xca\x8f\xdf\xf9\x9f\x89\x2b\xad\x2e\x0b\x7b\x5e\xbf\x6a\xd4\
\xf3\x7d\x34\x1d\xe6\xe7\x3a\xfc\xf2\x5d\x9f\x61\x6f\xf5\x28\xff\
\xe5\x4d\x9f\x23\x3b\x92\x12\xc4\xdd\xed\x71\x1c\xcb\xae\x47\xda\
\x27\x4c\x43\x3a\x5e\x87\x5f\xb8\xf1\xb3\x8c\x99\x07\x51\x46\xf2\
\xd3\x4c\x0d\xec\x25\x08\xc2\x6b\x0a\xae\xbb\x2c\x1c\x10\xc6\x3e\
\x77\x6c\x7f\x94\xdf\x7f\xe1\x63\x84\x56\x86\x1f\xb9\xe1\x03\x64\
\xd4\x5c\x9f\x95\x7b\x32\xa5\xbf\x59\x0a\x3a\x5d\xb8\x99\x53\x6b\
\xcf\xf3\xe5\x93\x7f\xc3\x3b\xf7\xfe\x7b\x26\x2a\x7b\x88\xa3\xb8\
\xcf\xc2\x3d\x5d\xd8\xf3\x3c\x4c\x25\xc7\xc4\xc0\x0e\x3e\x77\xfe\
\xb7\xb9\xa1\xf8\x83\x3c\xb0\xf7\xad\x0c\x66\x27\x71\xbd\xab\x9a\
\x73\x4f\x17\x8e\xa3\x84\x72\x76\x00\x91\xed\xf0\xc4\x4b\x4f\xb0\
\xb3\x76\x3d\xa3\x85\x1d\x78\x81\xb7\xd5\x1c\x4f\xe0\xfb\x38\xae\
\x87\xae\x29\x04\xad\x3c\x9f\x3c\xf6\x1b\x4c\x9a\x47\x78\xf0\xba\
\xb7\x60\xaa\x79\x5c\xdf\xc5\x73\xb7\xb2\xb0\x07\x48\xf2\xe1\x14\
\xeb\xe9\x19\x2a\xf9\x12\xdb\x4a\x3b\xa8\x67\xc7\xf1\x7d\xaf\x8f\
\x6f\x5b\x3d\xd2\x44\x1a\x37\xee\x3e\xc4\xf3\xeb\x9f\x42\x73\x7c\
\x97\xb9\xe6\x25\xa2\x28\x26\xf0\x23\x24\x7a\x5f\x7a\x0c\xc3\x80\
\x2c\x0a\x71\x12\x52\xd0\x2b\xe8\x8a\xc9\x8b\x17\xbf\x42\x9c\x76\
\x1d\xfd\x5b\x65\x4d\x80\x24\x9f\x10\x24\x1e\x05\xa3\x8c\x2a\x3a\
\x38\x2c\xd3\x70\x97\xf1\x7d\x97\xc0\xd7\xae\xa9\x17\x06\x30\x00\
\x55\xa8\x04\x5e\x42\x75\x50\xe5\xc5\xb3\xc7\x98\x6b\x9c\x25\x08\
\xba\xd6\x0e\x4d\xd3\x7a\xcf\x64\x12\x23\x46\x00\x32\x51\xa8\x97\
\x07\x79\xe9\xf2\x4b\xc4\x25\x1f\x91\x76\x8b\xbf\x33\x81\xb9\xe9\
\x4c\x08\x08\xbc\x00\x43\x68\x98\x19\x9d\x4a\x66\x88\xe5\xf6\x3c\
\x31\x1e\x4e\x60\x11\xf8\x06\x32\x95\x57\xb5\x6f\xdf\x47\x20\xf0\
\xd2\x0e\x25\x7d\x90\x86\xf3\x3c\x0a\x0a\xeb\xce\x12\x51\x18\x11\
\xf9\xd1\x35\x72\x6c\xac\xc5\xa4\xa4\x90\x82\x63\xa5\x28\xc7\x5b\
\x9f\xc5\xc8\x48\x82\xc0\xc3\x71\x9d\xfe\xf4\xee\x95\xfc\x6b\x22\
\xcb\x57\x5f\xff\x33\xde\x73\xf0\x3f\x73\xca\xfd\x12\x1b\xf1\xe5\
\x9e\x51\xa8\xcf\xab\xbd\x92\xff\x24\x4a\xe9\x68\x17\xc8\xc8\x41\
\xee\x3b\x78\x2f\x9f\x3f\xfb\x47\xf8\xa9\x4d\x12\x5f\x4d\x61\x6d\
\x2d\xb8\x16\x5a\xc2\x8b\x73\x4f\xf0\x33\xf7\xfe\x06\x7f\x7f\xfa\
\xf7\xb1\xb8\x42\x35\x3b\x8a\xe7\x5d\x75\xc7\xf7\x06\x30\x8a\x62\
\x96\xad\x39\x9c\x4e\xc2\xe8\xb6\x02\x71\x61\x96\x91\xe2\x76\xbc\
\xa0\x3b\xa3\xae\x26\x13\x7c\x7c\xcf\x45\x22\xb0\xb5\xcb\x1c\x1e\
\x3f\x4a\x47\x3f\xc3\x17\x4f\xfe\x15\x05\xb3\xd8\xcd\xc6\xb8\x5e\
\x7f\x40\xba\x3e\xef\x94\xfa\x64\xc4\x4b\xe7\x5e\x65\xa2\x3e\xce\
\x9f\xbc\xfc\x0b\x54\x8b\x75\xdc\xc0\xc1\xb6\x9d\xbe\xee\xed\x38\
\x0e\x81\xef\x53\x2a\x64\xf9\xbf\xff\xf1\x23\xfc\xe6\xa3\x5f\x44\
\x6b\xf9\x8b\x2c\x2e\xcd\xb2\x7c\xb1\x8d\xdf\x8e\x89\x93\x3c\x52\
\x4a\x36\x36\x36\xf0\xe3\x88\x82\x97\xa7\xa5\x9e\xe3\xb7\x9f\xfc\
\x25\x7e\xe2\xc1\xc7\xf8\xc7\x8b\xff\x83\xd6\x49\x95\x66\xb3\xeb\
\x91\x96\x52\x5e\xf5\x48\x67\x04\x46\xb6\xc6\xef\x3e\xf7\x6f\xf9\
\xa3\x1f\xfd\x27\xce\x36\xbe\xc9\x95\xd5\xf3\xac\xce\x37\xf0\x5b\
\x57\x3d\xd2\x3d\xa7\x41\x2e\xc9\x73\xb2\xf5\x55\x0e\x78\xb7\xe1\
\x26\x1d\xce\x2e\x7d\x89\xb9\x4b\x0e\x8d\x75\x17\xa1\x42\xa5\x52\
\xe9\x9e\x60\x10\x13\x69\xe0\xc9\x0c\x4f\xcc\x7d\x92\xf7\x8c\x7c\
\x0c\x51\x6a\xf0\xf5\x57\xbe\x48\x63\xde\xa1\xd5\x0c\x51\xf5\x6e\
\x92\xb6\xd9\x68\x62\xaf\xdb\xe8\x51\x91\xa5\xe0\x14\xc7\xd6\xfe\
\x81\x62\x31\xcb\x17\xce\xfc\x31\x8d\xc5\x90\x8d\x95\x26\x7a\xa6\
\x2b\xe1\x5a\x96\x45\xb3\xd9\xea\x96\xe5\xaa\x79\x9e\x5c\xfe\x63\
\x26\x9d\x43\x9c\x59\xf9\x06\x7e\x27\x61\xe1\xd5\x26\x7a\x01\x4c\
\xb3\xab\x0b\x37\x9b\x4d\xfc\xbc\x24\x4e\x4d\x16\xd2\x17\xf8\xcd\
\xaf\xfd\x1c\x62\xfb\xd8\x0e\x29\x10\xa8\x99\xae\x3f\xae\x57\x2f\
\x1c\x45\x51\x31\x9b\xcd\x91\xbf\x21\x66\x6c\x6f\x8e\xd5\x93\x1e\
\x57\x5e\xea\x80\x65\xa2\xe8\xa0\x9a\xe2\x1a\x5d\x58\xd7\xf5\x62\
\x92\x8f\xd8\xfd\xf6\x02\xe1\x7c\xca\xe5\xe3\x1b\xf8\xb3\x5d\x9d\
\x43\xcb\xf4\xcb\xc8\x2c\x45\x51\x9a\x52\xca\xc9\x7c\x3e\x4f\xfe\
\x68\xca\xd8\x68\x86\xd9\xe3\x6d\xd6\xbf\x93\x90\x86\x02\xd5\x00\
\x45\xeb\x2e\xc2\x2b\x95\x4a\xd8\x6e\xb7\x8d\x5c\x36\x87\xad\xb8\
\xdc\xf5\xc1\x1a\x2b\x27\x7c\x66\x8f\xb5\x09\xe6\x75\x10\x29\x5a\
\x46\x45\x72\x55\x17\x1e\x1d\x1d\x3d\xea\x5b\x01\x95\x37\x2a\x4c\
\x4e\xe5\x98\x7d\xb1\xcd\xda\x89\x08\xe9\x75\x7f\x7a\x45\x35\xe9\
\xeb\xc2\xb5\x5a\x2d\x94\x89\x34\xa2\x91\x88\xe9\x1b\x33\x2c\x1e\
\x77\x68\x5e\x0c\x20\x56\x91\x48\xb4\xac\x20\x4d\x52\x74\x5d\x6f\
\x7a\x9e\x27\x33\x99\x4c\x35\x57\x2e\xb0\xed\x5e\x89\x75\x25\x66\
\xe3\x9c\x8f\x76\xe4\xf6\xc3\x58\x56\x17\x79\x66\x67\x67\x09\xc3\
\x6e\xd9\xc0\x5b\xde\xf2\x16\x76\xed\xda\xc5\xc2\xdc\x22\xa7\xbf\
\x79\x96\xba\x22\x29\xed\xf4\x51\x54\xa5\x5f\x2f\x5c\x28\x14\xb8\
\x72\xe5\x0a\xd5\x6a\x95\x77\xbc\xe3\x1d\x68\xaa\xc6\xe7\xff\xf6\
\x8b\x54\x6b\x03\x4c\xe4\x1b\x54\x8f\x0e\xd0\x6e\xb7\x09\xc3\x10\
\xd3\x34\xb9\x70\xe1\x02\x71\x1c\xf3\xde\xf7\xbe\x97\x1d\x3b\x76\
\xf0\x0f\x7f\xf7\x05\x94\x15\x8d\xaa\xbd\xce\xce\xdb\x06\x68\xb5\
\x5b\x18\x86\x41\x92\x24\x2c\x2e\x2e\x22\xa5\x64\xdf\xbe\x7d\x3c\
\xf2\xe8\x23\xf8\xae\xcf\x77\x9e\x7a\x95\x9c\xd5\x66\xa6\xea\xa3\
\x8f\x6a\x64\xb3\x59\x16\x16\x16\xfa\xd4\x20\x84\xe0\x8e\x3b\xee\
\xe0\xce\xbb\xee\xe4\xb9\xa7\xbe\xc1\xe5\xaf\xce\x52\xd7\x87\x99\
\xbc\x21\x4b\xb3\xd5\xec\xfe\xd0\x85\x69\x62\x59\x16\x2b\x2b\x2b\
\x8c\x8d\x8d\xf1\xe1\x0f\x7f\x98\xe7\x9e\x7e\x8e\x57\x4f\x9e\xe2\
\xfa\xf1\x32\xd6\x40\x87\x30\x0a\x49\xe2\x84\x62\xb1\xc8\xfc\xfc\
\x3c\xcd\x66\x13\x80\xff\xf8\x1f\xff\x23\x00\x2f\x3c\x77\x0c\x37\
\x63\xb3\x3c\xb2\x8c\xb6\x6d\xdb\x36\x66\x67\x67\x19\x18\xe8\xfe\
\x0e\xcc\x85\x0b\x17\x08\x82\x80\xfb\xee\xbb\x8f\xc3\x87\x0f\x33\
\x3b\x3b\x4b\x2c\x23\x86\x86\x86\x30\x4d\x93\x6f\x7e\xf3\x9b\x8c\
\x8e\x8e\x62\x9a\x66\xdf\x12\x3b\x3e\x3e\xce\x43\x0f\x3d\xc4\xd0\
\xd0\x10\x17\x2f\x5d\xec\x0e\xfc\x66\x32\x60\x6c\x6c\x8c\xa5\xa5\
\x25\x46\x46\x46\xb8\x7c\xf9\x32\x00\x0f\x3f\xfc\x30\x7b\xf6\xec\
\xa1\xd3\xe9\xb0\xb6\xb6\xc6\xb9\x73\xb0\x6f\xdf\x3e\x2e\x5f\xbe\
\x8c\xef\xfb\x8c\x8f\x8f\xb3\xb2\xb2\x42\x1c\xc7\xdc\x7c\xf3\xcd\
\xdc\x79\xc7\x9d\xec\xdc\xb9\x93\xbf\xfc\xcb\xbf\x64\x7e\x61\x9e\
\xd1\x91\x51\x9e\x78\xe2\x09\xea\xf5\x3a\x00\x6b\x6b\x6b\x44\x51\
\x84\xa6\x69\x1c\x3c\x78\x90\x77\xff\x5f\xef\xc6\x34\x4c\x9e\x78\
\xe2\x09\x86\x86\x86\x48\x92\x84\xf9\xf9\x79\xae\x5c\xb9\x42\x4f\
\xaa\xed\xb1\xf0\x2d\xb7\xdc\xc2\xdc\xdc\x1c\x4b\xab\x4b\xdc\x78\
\xe3\x8d\xbc\xf2\xca\x2b\x54\x2a\x15\xce\x9c\x39\xc3\xc1\x83\x07\
\xc9\x64\x32\x1c\x3f\x7e\x9c\x34\x4d\x79\xf8\xe1\x87\xb9\x70\xf1\
\x42\x3f\xb9\xf0\xfc\xf3\xcf\x7f\xff\x7a\xe1\x4e\xa7\x63\xe5\x72\
\xb9\x62\xad\x56\xa3\xdd\x6e\xf7\xb7\xf5\x98\x72\xab\x17\xda\xb6\
\xed\xb9\x7c\x3e\x3f\x50\xad\x56\x8b\x5b\xcd\xde\x5b\xf9\xb9\xf7\
\x3d\x8e\x63\x2b\x0c\xc3\x66\xb1\x58\x9c\x1c\x18\x18\xa0\xd3\xe9\
\x5c\xc3\xab\x3d\x1d\xb9\xf7\xb7\x61\x18\xa1\xe3\x38\xc6\xf0\xf0\
\x30\xb6\x6d\x7f\x8f\x77\x79\xeb\xef\x63\x55\x2a\x15\x4e\x9f\x3e\
\xfd\xec\xee\xdd\xbb\x8f\x6e\xb1\xfc\x5e\xb3\x4f\xef\x7f\x4d\xd3\
\x58\x5b\x5b\x63\x7c\x7c\x3c\xb4\x6d\xdb\xf8\x97\x6a\xa2\x37\xcf\
\xb7\x69\x59\x96\x2c\x97\xcb\xd5\xef\xde\xae\xa5\x49\x1a\x6d\xf5\
\x31\x3b\x8e\x83\x65\x59\x5e\xb5\x5a\xcd\xb4\x5a\xdd\xf4\xbd\x4c\
\x05\x8a\x10\x08\xe5\x7b\x78\x55\x58\x96\x95\x68\x9a\xd6\xcd\x5e\
\x23\xd0\x84\xde\xd5\x85\x95\xab\x35\x19\x5b\xfc\xd7\xd2\x75\xdd\
\x50\xd7\x75\x1c\xc7\x41\x57\xf5\x6b\xea\x85\x15\x5d\x5c\xd3\x7f\
\xa7\xd3\x91\x8a\xa2\x44\xbe\xef\x6b\x9a\xa6\x09\x12\x81\x14\xa0\
\x6a\x12\x45\xb9\xb6\xed\x26\xf8\xcb\x24\x49\x64\x9a\xa6\xb1\x82\
\xd2\x35\x5e\x23\x51\x94\x3e\xd7\xf6\xab\x0a\xe2\x38\x4e\x2c\xcb\
\x4a\x55\x55\x15\x02\xf8\x17\xea\x85\xc5\xe6\x7a\x37\xca\x66\xb3\
\x71\x9a\xa6\x52\xc8\xcd\xba\x65\x40\xdb\x36\x39\x12\xc8\x48\xd9\
\x2c\x90\xee\x16\x2f\xaf\xae\xae\x5e\xaa\xd7\xeb\x07\x7d\xcf\x97\
\xa1\xd1\xd5\x85\x71\x4d\xd2\x4e\xb7\x46\x57\x46\x0a\x42\x93\xbd\
\x3c\x9c\x51\x2a\x95\xf4\x4c\x26\x43\x44\x44\x3a\xde\xa6\x98\x35\
\x48\x5a\xbd\xba\xdb\xab\x81\x87\x61\x18\xb8\xae\xbb\x5a\x2c\x16\
\x67\x74\x5d\x17\x2d\xb5\xc5\xf0\x1e\x48\x6d\x8d\xb4\x65\x22\x1d\
\x6d\xd3\x48\xde\xbf\xfa\x9e\xeb\xb9\xb2\x5a\xad\x0e\x04\x49\x88\
\xb2\xa3\x4d\x4e\x33\x88\x5b\x3a\x69\x5b\x47\x46\x6a\xf7\xc7\xc5\
\x90\x64\x32\x19\x2e\x5e\xbc\x58\x19\x1e\x1e\x0e\x7c\xd7\x8f\xdb\
\x99\x16\x23\x13\x2a\xe1\x86\x41\x6a\x6b\xdd\x1a\xe7\x44\x20\x54\
\xd0\x75\x5d\x18\x86\xe1\x8f\x8c\x8c\x04\x19\x33\x53\xfe\x3f\xd4\
\x0b\x63\xdb\xb6\x7e\xe5\xca\x95\xb5\x72\xb9\x5c\x0c\xe3\x38\x0d\
\xca\x16\x95\xba\x4a\xda\xd1\xd1\x46\x7e\xa4\x55\xd0\x35\x9d\xe6\
\x53\x15\xa2\x55\x1d\x45\x15\x08\x21\x6a\xa5\x52\x29\x9f\x4a\x50\
\x76\x39\x8c\xde\x11\x11\x05\x2e\x22\xd2\xd0\x45\x86\x34\x50\x68\
\xfe\x63\x1d\x55\x55\xd1\x75\x5d\x98\xa6\xa9\xea\xba\x8e\x63\xb8\
\x0c\xde\xe5\x53\x2f\x6a\x10\x87\x84\x9e\x0b\xba\xa4\xfd\xe5\x21\
\xe2\x96\x8a\x94\x52\x68\x9a\xa6\x1b\x86\x21\x34\x4d\xc3\x1d\xeb\
\x30\x7c\xbf\x46\x51\x8e\xb2\xda\x58\x41\x93\x1a\xf1\x42\x91\xce\
\xf1\x02\x32\x06\xd3\x34\xd5\x3e\x0b\xab\x1d\xf6\xbf\x29\x42\x4b\
\x54\x7c\xc7\xc1\x24\x47\xe4\x82\x75\xac\x44\xb0\x68\xf4\x7e\x03\
\xb1\x52\x2e\x97\x33\x41\x94\x90\xb9\xc5\x65\xdb\x21\x13\xab\x6d\
\x91\xa1\x40\x10\x06\xe8\x4e\x99\xc6\xd7\xcb\x98\x86\x81\xa6\x69\
\x79\xd3\x34\xed\xff\x2f\xf5\xc2\xaa\xaa\xa2\xaa\x6a\xb5\x90\xcf\
\xe7\x96\xfd\x16\x13\x6f\x94\x14\x06\x7d\x22\xcf\x47\xb9\x63\xea\
\x1d\x88\x3c\xc4\xd2\xff\x2e\xa6\xec\xfe\x9d\xaa\x01\x35\x79\x80\
\xff\xf0\xe0\xa7\x78\xef\x91\x5f\xa5\x56\xdc\x41\x7d\xdb\xc0\x35\
\x35\xbd\x7d\x94\x4b\x03\xec\x35\x9d\x77\x1f\xfa\x65\xde\xbc\xe3\
\x63\x1c\x9c\xb8\x87\x7b\x6e\x78\x23\x5e\xe4\x5c\xf3\xdb\x59\xbd\
\xe3\xf8\x49\xc0\x7d\x23\x3f\xc1\xb0\x72\x23\x8f\xdd\xf1\x1f\xb8\
\x65\xf2\xed\x78\x7a\xa3\xef\x22\x4d\x92\x44\x76\x51\x31\x24\x4c\
\x23\x6a\xe6\x36\x1e\x19\xfb\xb7\xfc\xa7\x87\x3f\xcd\xeb\xf3\x1e\
\xa3\xe3\xc3\xc4\xe2\xaa\x8f\x59\x76\xa7\x2c\x9e\xe7\x63\xe4\x04\
\xdb\x94\xdb\xf9\xbd\x1f\x7c\x9a\x4a\x66\x86\x5f\x7e\xdb\x1f\xe3\
\x65\x1a\x04\x9b\x75\xd0\xff\x7f\xea\x85\x3d\xcf\x27\x88\x02\x94\
\x4c\xc0\xee\xf4\x9d\xfc\xe8\x4d\x3f\x87\x72\x78\xf8\x41\xee\x99\
\xf9\x81\x4d\x16\xbe\x56\x17\xf6\x3c\x9f\x28\x0a\x38\x3c\x7a\x3f\
\xff\xfc\xfa\x27\x78\xe1\xe5\xd3\xbc\xfd\xc0\xfb\x31\x14\xb3\x4f\
\x22\x3d\x55\xae\x27\xff\x4d\x57\x0e\x71\xb9\xf9\x1a\x27\x16\xbe\
\xc9\x43\x33\xef\xa1\x9c\x19\x24\xde\xd4\x76\xb7\x92\x88\xe7\x79\
\xa8\xe8\xd4\xf3\x63\xf8\xb9\x8b\xec\x1f\xb8\x97\x89\xca\x2e\x86\
\x72\x93\xf8\x5b\xfa\xee\x09\xeb\x71\x14\x51\xcb\x0f\x93\xad\x44\
\x7c\xe5\x85\xaf\xf3\x96\x7d\x3f\xc1\x8d\xa3\xf7\x62\x7b\x9d\xfe\
\xc5\x04\xfa\xb5\xc3\x42\x40\x31\xdd\xce\x27\x9e\xff\x6f\xfc\xbf\
\x6d\x9c\x6b\x8c\x5c\x65\x01\x86\x9f\x73\xbe\x73\xce\x5c\x76\xe7\
\xbe\xd3\xdd\x2e\xdd\xb2\xbd\xb0\xad\x6d\xb7\x58\x4b\xa9\x6c\xb1\
\x36\xa4\x0d\x26\x1a\x8c\x25\x26\x18\x2d\x58\x8c\x3f\x08\x5a\x34\
\x81\x5f\x68\x22\x09\xff\x88\x09\x09\x4a\x82\x09\x68\x34\x40\x0c\
\x8a\x60\x0c\x72\x91\x16\x50\xda\x02\x52\x2b\x4b\xaf\x3b\xed\x76\
\x2f\xdd\xeb\xcc\xec\xcc\xec\xcc\xb9\x5f\xfc\x71\x66\x4e\x97\xe2\
\xef\x99\x39\xf9\xe6\x3b\x97\xef\xbc\xdf\xfb\xbc\xef\x57\x6e\xf8\
\x36\x6b\x33\x43\xc4\xe5\x64\x5b\x67\xeb\x11\x4c\x65\x9a\x06\xe2\
\x73\x79\xe1\x5d\xdc\x98\xde\x82\x69\x1b\xd7\x3a\xbc\xda\x8d\x4b\
\x8e\xe3\xb0\x54\xd3\x79\xf8\xe0\x23\xbc\x56\xfa\x0d\x4a\xb5\x59\
\xe1\xa3\xb9\x77\x71\x1d\x17\xcb\x0c\x08\xb8\x96\x8d\xb5\x6d\x0b\
\x15\x70\x7c\x9b\x94\x56\xc0\x50\x54\xce\xcc\xfd\x1b\x9f\xd0\xdb\
\x15\x42\x44\x20\xb8\xe3\x38\x78\xdd\x5e\xc8\x48\x27\x7a\x58\x10\
\x26\x35\xf7\x2a\xb6\xdf\x6e\x81\xb3\x44\x98\x29\x6e\x4f\x60\xf8\
\x50\x0f\xda\x09\x4b\x07\x4f\x36\xa9\x2c\x2f\x32\x5b\x9f\xc4\xb2\
\x63\xf8\x76\x54\xf0\x88\x69\x99\x78\x8a\x87\x24\xc9\x48\xbe\x20\
\x9f\xca\xf3\xf6\xe8\x1b\x94\x63\x2d\x64\x5f\x44\x57\x77\xb8\x99\
\x60\x61\x5b\x16\x42\x56\xf0\x71\xe9\xe9\xea\x65\xb1\x35\x83\xe5\
\xe9\x18\x4e\x0b\xdb\x4e\x46\xab\xb1\xe3\x38\x04\xb6\x07\x71\x48\
\x74\xa9\x04\x6e\x48\xb9\x4a\xc8\x8c\xd7\x3e\x05\x2f\x9c\x07\x1f\
\xaf\x3d\x27\x26\x8e\xec\xa0\x08\xc1\x52\xbd\x81\x2a\x62\xc8\xa7\
\xea\x2f\x93\xee\x4e\x61\xbb\x76\x44\xda\xaf\x34\xa8\x45\xa0\x72\
\x72\xfe\xcf\x6c\x49\x1d\xa0\x6f\x83\xcb\x44\xeb\x54\x38\x98\xff\
\x93\x58\x77\x2c\x17\x23\x3e\xc1\xd2\x92\xc9\xce\xa1\x61\x5e\x39\
\xff\x2b\x26\x97\xcf\xe2\xd8\x4e\x94\xe9\x5d\x49\xe9\xab\x9a\xe0\
\xed\xcb\x2f\x72\xf8\x4b\x8f\xf1\xfb\xd1\xc7\xb8\x6c\x1e\x27\x9d\
\xc8\xa3\x5f\x97\x17\x36\x8d\x70\xd7\x67\x6e\xf9\x0a\xf5\x86\x45\
\xae\x28\x90\xf2\x33\xa8\x52\x02\xdb\xb5\x3e\x33\x6e\xd3\x34\x30\
\x6d\x0b\x09\x41\x5d\xb9\xc8\x70\xff\x97\x99\xf2\x4e\xf0\xd7\x33\
\xcf\xd1\xa5\x75\x87\xe3\xd6\x8d\x08\xb9\x33\x4c\x13\xcf\x87\x8d\
\x1b\xf3\xbc\x7f\xee\x7d\xfa\xb2\xfd\x3c\xfd\xe1\x23\x64\xe2\x85\
\x68\x4e\xae\x91\x09\x61\x8b\x5c\x21\x93\xe1\x89\xa3\x3f\xe5\x81\
\x9d\xbf\x44\x69\x2a\xd3\xcc\x2e\x4d\x33\x5b\xaa\xa0\xd7\x3c\xba\
\xdd\xd0\x17\x2e\x97\xcb\xd4\x1b\xcb\xc4\x9b\x49\x6c\x15\x9e\x3a\
\xf1\x30\xf7\xec\xb9\x8f\xe9\xb9\xd3\x2c\x9d\x81\x6a\xb5\x1e\x69\
\xe1\xce\x15\x48\x52\x42\xa8\x05\xfe\x78\xe6\x09\x7e\xb1\xfe\x19\
\x66\xec\xb3\xcc\x8d\x66\x29\x5f\xad\x61\xd5\xfd\xce\xbb\x5d\x84\
\x56\x74\xdb\x31\xaa\xfe\x25\x5e\x3d\xfb\x0c\x6e\xa2\xce\xe5\x85\
\xb3\x94\x3e\x30\xa8\xce\x1b\x91\x2f\x6c\x18\x06\x8e\xe9\x60\xc9\
\x3e\x86\x1f\xe3\xd8\xe4\x1f\xd8\xbf\xf9\x6e\xc8\x54\x38\x37\x39\
\xcf\xfc\xe5\x3a\x8d\x25\x07\x49\x84\xef\x6c\xe5\x72\x99\x4a\xbd\
\x41\xbf\x9d\x63\xd2\xf8\x98\x77\x27\xff\x44\x90\x68\xf2\xaf\x99\
\x97\x59\x98\x32\x29\xcf\xd7\x50\xe3\x4a\xa4\x85\x97\xab\x0d\x1a\
\x09\x1f\x2f\x48\xf1\xce\xdc\x73\x6c\xf5\xf7\x30\xee\x7c\x40\x63\
\xd1\xe0\xca\xa9\x32\x8e\xee\x13\x4f\x84\x5a\xb8\x5c\x2e\x63\x24\
\x7c\x4c\x5b\x50\xe9\x1e\xe5\xb9\xe3\x4f\xa2\x3c\xff\xc3\xd3\x98\
\x8b\x10\xcb\x48\xa8\xaa\x12\x35\xd6\xca\xb2\x4c\x4c\xd3\x70\x17\
\x02\xe6\xff\xe3\xd1\x98\x9a\xe3\xc7\x4f\x3d\x8a\x3e\xae\x41\x10\
\x10\x4b\x2b\x91\xb1\xd3\x6e\xc0\xc0\xb7\x7c\x96\xce\xb9\xc8\xad\
\x6e\xee\x7b\xe9\xfb\xe8\x25\x0d\xb3\x5e\x26\x91\x15\x88\x70\x75\
\x8f\x8e\x9d\xcf\xe7\xb1\x67\x5b\x4c\xbd\x1d\xf0\xe1\xe8\x9b\x2c\
\x8e\xda\xf8\x96\x84\x92\x04\x2d\xa6\x46\x50\x64\x10\x04\x68\x09\
\x0d\xcf\xb0\x99\x3f\xed\xb2\x3c\x3b\xc3\xb1\x5f\x3f\x8e\x7e\x59\
\xc5\x5c\x72\x89\x67\x15\x14\x45\x44\x81\x48\xa1\x28\xc4\x64\x15\
\x7d\xdc\xa7\x5a\x87\x17\x2f\xbe\xc4\xd2\x79\x9f\xc6\x15\x1f\x2d\
\x29\xa3\x25\xd5\xa8\xd2\x59\x08\x81\x1a\x53\xe9\xd6\x7d\x16\x47\
\x1d\x96\xe7\x17\x39\x7e\xee\x05\x96\xce\x05\xb8\x7a\x40\x3c\x2b\
\x50\x54\x39\x82\x33\x25\x49\x22\x21\x69\xd4\x2e\xb8\x34\x15\x9f\
\xb3\x93\xef\xa0\x6c\xda\xb0\x05\xe9\x26\xd0\xd4\x30\xc1\xe8\xfb\
\x3e\x33\x33\x33\xec\xd9\xb3\x87\x81\x81\x01\x2c\xdd\xe6\xf4\x07\
\x9f\xb0\x2a\x19\x43\x4a\x2c\x52\xdc\xd7\xc3\xfc\xc2\x3c\x8d\x7a\
\x83\x42\xa1\xc0\xd8\xd8\x18\xbd\xbd\xbd\xec\xdb\xb7\x8f\x54\x77\
\x8a\xbf\xff\xed\x0d\x7a\xfb\x7a\x59\xe5\xcc\xd3\xbb\x7b\x15\xf5\
\x46\x8d\x56\x4b\x27\x9d\x4e\x73\xe1\xc2\x05\x0c\xc3\xe0\xfe\xfb\
\xef\x67\xe7\xce\x9d\xbc\xf5\xfa\x3f\xb0\xaa\x36\x92\x32\xc3\xb6\
\x03\x39\x9a\xcd\x26\xb9\x5c\x8e\x5a\xad\xc6\xf4\xf4\x34\xad\x56\
\x8b\xe1\xe1\x61\x46\x46\x46\x88\x69\x31\x3e\x39\xf5\x29\x9e\xd1\
\xe2\x06\xa9\x45\x6c\xab\x46\x26\x97\xe1\x52\xe9\x12\xf1\x78\x3c\
\x4c\x27\xa9\x2a\x23\x23\x23\xdc\x7c\xf3\xcd\x9c\xf8\xe7\x49\x16\
\x2f\x56\x28\x6a\x82\xbe\x41\x19\x77\xad\x43\xb5\x52\xed\x10\xb8\
\x51\x6c\xf6\xd0\xa1\x43\x2c\x55\xaa\x7c\x7c\xec\x34\x99\x54\x8a\
\x78\xcc\x24\x7f\xd3\x32\xb9\x7c\x0e\xcb\xb2\xa8\x54\x2a\xb4\x5a\
\x2d\x74\x5d\xe7\xc8\x43\x47\x68\x35\x9b\x8c\x97\x26\x59\xaa\xd5\
\xc8\x3a\x16\xca\xf0\xf0\x36\xce\x9f\x3f\xcf\xe0\xe0\x20\xa9\x54\
\x2a\x12\xfc\x7b\xf7\xee\xe5\xf6\xdb\x6f\x0f\x21\x1f\x7e\x47\x2e\
\x97\xc3\xb6\xed\x08\xcb\xb5\x4c\x8b\x62\xb1\xc8\xd8\xd8\x18\xf9\
\x7c\x9e\x43\x87\x0e\x91\xcf\xe7\xb9\x3a\x73\x95\xa1\xa1\x21\x26\
\x26\x26\xe8\xd4\x28\x5f\xb9\x72\x85\xad\x5b\xb7\x52\x2a\x95\x30\
\x4d\x93\x3b\xee\xb8\x83\x4d\x9b\x36\x31\x31\x31\x11\x2e\x00\x9e\
\xc1\xf6\xed\xdb\x39\x7d\xfa\x34\xe9\x74\x9a\xfe\xfe\x7e\xe6\xe6\
\xe6\x00\xd8\xb1\x63\x07\x07\x0f\x1e\x64\xfd\xfa\xf5\x3c\xfb\xec\
\xb3\x2c\x2c\x2c\x50\x28\x14\x38\x79\xf2\x24\x7d\xbd\x7d\xed\xc6\
\xca\x90\xd1\x51\x14\x85\x91\x91\x11\xee\xbd\xf7\x5e\x1c\xc7\xa1\
\x54\x2a\x31\x34\x34\xc4\xa5\x4b\x97\xc8\x66\xb3\x1c\x3d\x7a\x94\
\xb5\x6b\xd7\x32\x35\x35\x45\xb9\x5c\x26\x97\xcb\x71\xf8\xf0\x61\
\x46\x47\x47\x29\x57\x2b\x6c\xde\xbc\x19\x5d\xd7\xb9\x70\xe1\x02\
\x99\x4c\x86\x62\xb1\x48\xa9\x54\x8a\x7c\xe1\x23\x47\x8e\x70\xee\
\xdc\x39\xc6\xc7\xc7\x99\x9d\x9d\x0d\x19\xe9\x5d\xbb\x76\x05\x1d\
\x9d\xe8\x38\x0e\xc9\x64\x92\xc9\xc9\xc9\x8b\x6b\xd6\xac\x19\xda\
\xbc\x79\x33\x63\x63\x63\xd1\xce\xf3\xf5\xdd\xcc\xed\xf8\x53\x53\
\xd3\xb4\xd8\xd6\xad\x5b\xd5\x6a\xb5\xca\xf2\xf2\x72\xa4\x55\x3b\
\x5e\x6b\x27\x62\x2f\x49\x52\xd9\x75\xdd\x71\x60\x57\x36\x9b\x8d\
\x68\x82\x95\xbb\x29\x2b\xfd\x67\x55\x55\x1b\xb3\xb3\xb3\xd2\xfe\
\xfd\xfb\x53\x63\x63\x63\x91\x36\xed\x3c\x36\x3a\x15\x2d\xae\xeb\
\x76\x18\xe9\x89\x91\x91\x91\x1b\x6d\xdb\x8e\xbc\xdb\xce\x6e\xf2\
\xca\xa6\x8e\x95\xbe\x70\x87\xed\x5e\xb9\x58\xac\x1c\xbb\xef\xfb\
\x08\x21\xa8\xd7\xeb\xe5\x62\xb1\xd8\x33\x38\x38\xc8\xe4\xe4\x64\
\x74\x4b\x2b\xa9\xee\x74\xa4\x29\xa5\x76\x96\xd6\x75\x5d\xaf\xbd\
\xbf\x46\x32\xd1\x15\x62\xb4\xda\xf5\x5c\x72\x88\xf8\x5a\x96\xe5\
\xa6\x52\x29\x75\x76\x76\x16\x59\x96\xdb\x8c\xf4\xf5\x1c\x73\xd0\
\x99\x44\x79\x66\x66\x46\x5a\xbd\x7a\xb5\x27\x84\x90\x62\x5a\xec\
\xb3\x59\x64\xae\x79\xbd\xed\x1a\xe4\xa0\x58\x2c\x2a\x53\x53\x53\
\x24\x12\x09\x3a\x5a\x58\x6a\x7f\xb7\xab\xab\x3b\xfa\x4d\xfb\x8f\
\x9a\xba\xae\x63\x3b\x36\x71\x2d\x8e\xef\x43\x5c\xfb\xfc\xf1\x85\
\x10\x94\xcb\x65\x84\x10\x52\xbd\x5e\x47\x42\x42\x15\x31\x34\x25\
\xb6\x82\x91\x96\x22\x4d\xdc\x68\x34\xf0\x7d\xbf\x99\x4e\xa7\x7b\
\x16\x16\x16\xe8\x4a\x74\x11\xf8\x61\x7d\x96\x92\x1b\x8c\x47\x5a\
\xb1\x73\x36\x65\x59\x76\xf3\xf9\x3c\x86\x6e\xe0\x74\x99\x24\xfb\
\xed\xb0\x94\xd0\x10\xf8\xba\x1a\xe6\x85\xdb\x98\x6c\xa5\x52\x69\
\x15\x0a\x05\x55\x08\x11\xf3\xf0\xf0\x56\x2d\xd3\x9d\x52\x42\xd6\
\xb8\x93\xe9\x75\xe5\x4e\x0d\x7c\x22\x16\x8b\xe5\x93\xc9\xa4\xa3\
\x28\x8a\x68\xca\x4d\x72\x03\x12\xbe\x29\x11\xb4\x54\x7c\x5d\x21\
\xb0\xe4\xd0\xeb\x0d\xaf\x02\x21\x84\x48\xe6\x72\x39\x5a\xbe\x41\
\x7a\xa3\x05\xae\x4c\xa0\x2b\x04\xa6\xd2\xee\xda\x92\x91\xa4\x10\
\x6b\x53\x14\x65\xb2\xa7\xa7\x67\x93\xa9\x9b\xd4\xe2\x55\xfa\x06\
\x05\x4e\xb3\x9d\x73\xd6\xc3\x9c\x73\x27\xb1\x2e\xcb\x32\x7d\x7d\
\x7d\x6a\xe0\x07\x94\x9d\x3a\x7d\x5b\x20\xb0\x24\x7c\x43\x21\x30\
\x04\x7e\x33\xe4\xaf\x65\x21\x77\x56\xf7\x4b\x99\x4c\x66\xd0\xb2\
\x1d\xfc\x55\x4d\x52\x05\x19\x57\x07\x25\x7b\xa0\x8c\x2c\x04\x4b\
\x6f\x65\x71\xe6\xb5\x4e\x0b\x5b\x5c\x08\x81\xe3\x7b\xf8\xfd\xcb\
\x24\x77\xb7\xe8\x4d\x0c\xd0\x34\xeb\x21\x74\x69\x2b\x54\x5e\x29\
\x46\x77\x9a\xa2\x28\xb2\xe7\x79\xe8\xaa\x41\x6c\x7b\x95\xe2\x9a\
\x1c\x29\x35\xcb\x4c\x75\x8a\x58\x5c\xa5\x7e\x2c\x8f\x3d\x13\xc3\
\xf7\x7d\x59\x51\x94\x64\x2c\x16\xd3\x14\x45\x91\xe7\x56\x4f\xb3\
\xe5\x4e\x8d\x44\x50\x60\xa9\x59\x41\xc8\x02\x77\xaa\x9b\xe5\x8f\
\x52\xf8\x6e\xd8\x20\x62\x59\x16\xb2\x2f\x05\x73\x4a\x45\x1a\xdc\
\x1b\x10\x0f\xb2\x58\x76\x0b\xd7\xb1\x09\x0c\x95\xfa\xf1\x0c\xce\
\xa2\x8a\xa6\x69\x04\x41\x10\x97\x65\x19\xd9\x95\x59\x5e\xb7\xc0\
\xe6\x3d\x69\x12\x41\x9e\xa6\x5e\xc3\xb6\x1d\x68\x24\xa9\xbe\x99\
\x5d\x59\xaf\x27\x39\xb6\xc3\x7c\x72\x91\xe1\xbd\x49\x02\x2b\x8e\
\x2a\x4b\x34\x5a\x15\x54\x45\x63\xf1\x95\x3c\xbe\x11\xed\xe4\xa4\
\x09\xa0\x6a\xe8\xac\xbb\x4d\x47\xc9\x18\x0c\xa4\x86\x90\x0f\xac\
\x7b\x80\x64\x57\xbb\x3b\xcb\xba\x96\x17\x0e\x2b\x92\x2c\x3c\xd9\
\x66\x7d\xec\xab\x7c\x6f\xc7\xa3\x7c\xe7\x0b\x8f\xb3\x2e\xb3\x3b\
\x2a\x31\xec\xd8\x9a\x51\xfe\xd7\x73\x58\xa8\x34\xf8\xd1\xee\x27\
\x19\xce\xde\xc5\x37\xb7\x3f\xc0\xb7\x6e\xfd\x01\xa6\xdb\x5a\x49\
\xa8\x06\x1d\x59\x67\x05\x06\xbb\x7a\xef\x66\x77\xf1\xbb\xdc\xb3\
\xfb\x41\xbe\xb8\xea\x1b\xe8\xa2\x82\x71\xad\x47\xba\x93\x17\x96\
\xbc\xc0\x61\x6d\x6e\x13\x5f\x1f\x7c\x88\x9f\xdf\xf9\x5b\x32\xe6\
\x6d\xac\xea\xed\xc1\xf6\xcc\x28\x45\x19\xf5\x5f\x1b\x26\x8a\x26\
\xb1\x31\xb9\x87\x07\x6f\x79\x9a\xa1\xfc\x5e\x7e\xb2\xf7\x19\x6c\
\x6d\xf9\x33\x34\x6b\xc7\x73\x96\x55\xb8\x32\x51\xe3\xd1\x7d\xcf\
\xb3\x21\xbb\x9b\x9f\xed\x7f\x01\x27\xd9\x88\x3e\xbf\x96\xd6\x6c\
\xe7\x85\x65\x83\x7d\xa9\x76\x5e\x78\x4d\x6a\x13\xeb\x0b\xdb\xb0\
\xed\x50\x79\x74\xb4\x70\x28\xf8\x4d\x5c\xd7\x64\x53\xe1\x16\x8e\
\xcf\xfc\x85\xf7\x4e\x7d\xc4\xce\x35\xfb\xc2\x64\xf9\x0a\x6d\x1b\
\x69\x61\xdb\x22\x2b\x06\x98\xaa\x95\x38\x71\xf9\x75\xbe\x36\x74\
\x08\x81\xc0\x75\xbd\x48\x67\x77\x1a\x83\x0c\xc3\x20\x70\x15\x06\
\x73\x37\x71\xd6\x7c\x95\x1d\xf9\xbb\xd8\x58\xdc\x46\x5f\xd7\x7a\
\xcc\xeb\x7c\x61\xd3\x34\xc2\xbc\xb0\x92\xa4\xea\x4c\xf3\xdf\xd2\
\x28\xb7\xae\xdb\x4b\x3e\xbe\x1a\xd3\x32\xae\x53\x22\xa1\xe7\xac\
\x08\x41\x73\x49\xf0\xda\xd9\xe7\x59\xad\x6d\x63\xfb\xda\xed\x68\
\x52\x22\xd4\xca\xc6\x0a\x4d\x6e\x1a\x04\x81\x47\xd6\xdb\x40\xd5\
\x2b\xd1\x97\xed\xa7\xb7\x7b\x80\xde\xe4\x40\x14\xde\xee\x48\x50\
\xbd\xcd\x48\x37\xda\x79\xe1\x33\x95\xf7\xf8\x1f\xcb\xb6\x8e\xf6\
\x8f\xf1\xef\x16\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\
\x00\x00\x2a\x0f\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\
\x95\x2b\x0e\x1b\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd9\x03\x03\
\x0e\x1c\x2b\xec\xa5\xbb\x6e\x00\x00\x20\x00\x49\x44\x41\x54\x78\
\xda\xed\x9d\x79\x94\x5d\x55\x9d\xef\x3f\xfb\x0c\x77\xae\x39\x35\
\xa5\x06\x2a\x21\x21\x09\x14\x15\x84\x88\xd1\x00\x09\x08\xad\x08\
\x1d\x5b\xec\x36\x60\x0b\x48\xd3\xf0\x1c\x68\xc4\xf5\xda\xd5\x4f\
\x78\x48\x83\xda\x93\xad\x48\xab\x4b\xb4\x15\x51\x40\x9f\xd8\x34\
\xad\x08\xd8\x0e\xc8\x20\x24\x81\x8c\x18\x92\x9a\x92\xaa\x54\xaa\
\xea\xd6\x70\xab\xea\xce\xc3\x99\xf6\xfb\xe3\xd6\x3e\xb9\xc9\x13\
\x03\x61\xce\xcb\x5e\x2b\x8b\x5b\xdc\xe1\x9c\xbb\xbf\xfb\x37\x7d\
\x7f\xc3\x85\xe3\xeb\xf8\x3a\xbe\x8e\xaf\xe3\xeb\xf8\x3a\xbe\x8e\
\xaf\xe3\xeb\xf8\x3a\xe6\x97\x78\x33\xdd\x4c\x75\x75\x75\x57\x26\
\x93\x31\xa5\x94\x5e\x5d\x5d\xdd\xfb\xd6\xac\x59\xf3\x39\xd3\x34\
\x5d\xd7\x75\xe5\xfe\xfd\xfb\x5b\x56\xac\x58\x81\xe7\x79\x24\x93\
\x49\x5a\x5b\x5b\x29\x95\x4a\x68\x9a\xc6\xae\x5d\xbb\xb0\x6d\xbb\
\xfc\x85\x84\x40\xd3\xb4\x83\x5f\x50\x08\x0b\x70\xd5\x63\xc3\x30\
\xfa\x00\x0d\x20\x1e\x8f\x7f\x30\x1e\x8f\x8f\xa8\xd7\xb6\xb6\xb6\
\x12\x8f\xc7\xdf\xd0\x3d\x30\xde\xc8\x8b\xb7\xb5\xb5\x2d\x9d\x9b\
\x9b\x7b\xa7\x94\xd2\x03\x58\xbe\x7c\xf9\xb7\xdb\xdb\xdb\xc3\x52\
\x4a\x86\xf6\x0d\x11\x0c\x06\xa5\xa6\x69\xc2\x95\x2e\x86\x61\xe0\
\x38\x0e\x48\xc9\x44\x3e\x41\x87\xd1\x41\x26\x93\x01\x5d\x90\x76\
\xf2\x9c\xbd\xea\x9d\x00\x4c\x27\xa6\xd1\x84\x86\xae\xeb\x00\xe8\
\xba\x1e\x90\x12\x44\xf9\xe8\x85\x81\x33\xd5\xf5\x1b\x1b\x1b\x77\
\xae\x5c\xb9\xd2\x11\x42\xb0\x67\xcf\x1e\x86\x87\x87\x1b\xdf\xe8\
\x43\xf9\xba\x03\x52\x55\x55\x65\x66\x32\x19\x1d\x60\x76\x76\xf6\
\xba\x8f\x7e\xf4\xa3\xd7\xab\xe7\xfa\xfb\xfb\xcb\x27\x5c\x08\xf2\
\x94\xb0\x4a\x96\x10\x9a\x20\xe7\x14\xf0\x3c\x0f\xcb\xb2\xc0\x83\
\x9c\x5b\xc4\xb2\x2c\x2c\xcb\xc2\xd6\x5d\x1c\xd7\x26\x9d\x4e\x03\
\x70\x60\x7a\x9c\xd3\x4e\xea\xa6\x58\x2a\xa1\xeb\x3a\x3b\x76\x3f\
\x4f\x55\x93\xc0\x9b\x0b\x21\x84\x00\x49\xf9\x9f\x00\x04\xb5\xea\
\xda\xd1\x68\x94\xee\xee\xee\x3e\x40\xb8\xae\x6b\xee\xd9\xb3\x67\
\xd1\xff\x17\x80\xb4\xb5\xb5\x3d\xb8\x78\xf1\xe2\x8b\xa4\x94\xf4\
\xf7\xf7\x33\x33\x33\x53\x3e\xf9\x40\xc9\x2e\x6f\xb2\x00\x8a\x9e\
\x85\xed\xd8\x08\x21\x70\x1c\x07\x29\x25\x96\x65\x21\x3d\x89\x94\
\x1e\xb6\x6d\x63\x59\x16\x8e\x21\x91\xe0\x7f\x86\xf4\x3c\x6c\xdb\
\xc1\xb6\x6d\xa4\xe7\x11\xa9\x8a\xb2\xb8\x61\x31\x62\x81\x20\x91\
\x99\xc3\x3a\x69\x94\x05\x75\x61\x9c\xb1\x28\xf6\x58\x08\xe1\xea\
\x95\x8a\xfb\x24\x29\x25\x52\x4a\x92\xc9\xe4\xba\x0a\xb5\x3e\x1a\
\x8f\xc7\x07\x8e\x19\x40\x1a\x1b\x1b\x3f\x58\x28\x14\x22\x80\x15\
\x0c\x06\x97\xc7\x62\x31\xa4\x94\x98\x01\xd3\xdf\x58\x00\xc7\x73\
\x28\x95\x4a\xfe\xc6\x5a\x96\x55\x06\x42\x5a\x78\x9e\x57\x7e\xce\
\x03\x29\x25\xa5\x52\xa9\x0c\x88\xe7\x95\x01\x2c\x16\x01\x70\x1d\
\xd7\xff\x4c\x57\xd3\x91\xea\x7d\x40\x7f\x6f\x1f\xe7\x37\x9e\x47\
\x6e\x34\x87\x29\xa0\x14\x4d\xb3\xa0\x43\xc3\x89\x87\x90\x39\x13\
\x74\x09\x9a\x44\x68\xc8\xb3\xce\x3a\xeb\xb7\x00\xa6\x69\xf2\xe8\
\xa3\x8f\xde\x09\x7c\xfc\x98\x01\x64\xc1\x82\x05\xf7\xbf\xed\x6d\
\x6f\xd3\x34\x4d\x63\xef\xde\xbd\x94\x4a\xa5\xf2\xa6\xba\x36\xb6\
\x65\x57\x80\x50\xde\x68\x81\x40\x4a\xcf\x07\xca\x96\xb6\x2f\x21\
\x78\x12\xcf\x93\x07\x37\x7d\xfe\x44\xab\xd7\xba\x5e\x59\x7a\x4a\
\xa5\x12\xba\xa6\xfb\xcf\x09\x21\x90\x9e\x87\x87\x87\x2b\x5d\x12\
\xb3\x33\x88\x25\x33\xa4\xa4\x09\x0b\x25\xd2\x16\x38\xc9\x00\x6e\
\xca\xc0\x4b\x1b\xa2\x6c\xd5\xc0\x30\x0c\xea\xea\xea\x4e\x6f\x6b\
\x6b\xbb\x45\x08\xc1\xec\xec\xec\xf3\x63\x63\x63\x0f\xbe\xe5\x00\
\xe9\xe8\xe8\x38\xa3\xa6\xa6\x86\x52\xa9\x84\xe7\x79\x5e\xa1\x50\
\xd0\x74\x5d\xc7\x75\xdd\x83\x00\x48\x0f\xcb\xb6\x7c\x0f\x49\xd9\
\x09\x21\x04\x9e\x47\x59\xed\x48\x89\x8d\x8d\x37\xbf\xd1\xd2\xf5\
\x90\x42\xfa\x9b\xee\x49\x81\x40\x1c\x04\xc4\x3d\x28\x21\xba\xa6\
\xf9\xd2\x24\x84\xf0\xc1\xb1\x6d\x9b\x03\xc3\x23\x9c\xbb\xe8\x5c\
\x64\x52\xe2\x49\x49\x36\x92\xa2\x61\xa9\x8e\x37\x17\x42\xda\x02\
\x69\x69\x48\xab\xec\xad\x75\x76\x76\x9e\x09\x9c\x69\x18\x06\x3b\
\x76\xec\xb8\xa7\x12\x90\x57\xdb\x33\x7b\xad\x00\x09\x34\x37\x37\
\x6f\xe9\xe9\xe9\xc1\x71\x1c\x36\x6f\xde\x8c\x6d\xdb\xb8\xae\x7b\
\x50\xf5\xcc\x03\x60\x5b\x07\x55\x96\xe7\x79\x58\xae\x5b\x36\xbe\
\xa0\xc0\xc4\x16\x0e\x42\x42\x3a\x9d\x2e\x4b\x51\xb0\x44\xb1\x58\
\x24\x9f\xcf\x23\x1d\x81\x44\x56\x7c\xc6\x41\x40\x34\xa1\xe1\x79\
\x15\x12\x32\x0f\x88\xfa\xdc\x54\x2a\x85\xe7\x79\x4c\xa7\x66\xd1\
\x16\x4d\x32\x33\x72\x58\x14\x60\x78\xd8\x89\x00\xd6\x81\x30\x42\
\x2b\xbb\xd4\xb3\xb3\xb3\x2b\x5b\x5b\x5b\x6f\x06\x74\xe0\xe1\x78\
\x3c\xfe\xdc\x9b\x56\x42\xda\xda\xda\x9a\xc7\xc6\xc6\x6a\x80\xa0\
\xfa\xc2\xae\xeb\xfa\x1b\xa1\x69\x1a\xae\xeb\xfa\x9b\x27\x3d\x89\
\xed\x1c\x04\xc4\xc5\x23\x9b\xca\xa0\x09\x0d\xdb\xb3\xc8\xd8\x19\
\x1c\xc7\xc1\xd1\x3d\x62\x66\xc4\x57\x59\x8e\x61\x93\xcd\x66\xc9\
\xe5\x72\x48\x47\xe0\x78\x2e\x73\x73\x73\x00\xe4\xbd\x22\xb6\x6d\
\x51\x28\x14\xd0\x84\x40\x9a\x07\xc1\x92\xe0\x7b\x67\xbe\xd7\x06\
\xe4\xe6\xd2\x2c\x6b\x3c\xcd\xb7\xe1\xe3\xc1\x71\x16\xad\x08\xe2\
\x4c\x86\x71\x73\x41\x64\xa7\x31\x6f\x5b\x24\x8b\x16\x2d\xea\x01\
\x7a\x34\x4d\x63\xfb\xf6\xed\x89\x37\x35\x20\x85\x42\xe1\x9e\x4b\
\x2f\xbd\xf4\x82\x5c\x2e\xc7\xe8\xe8\x28\xb6\x6d\xfb\x1e\x92\x0a\
\xe2\x2a\x37\xc2\x93\x92\x5c\x2e\x47\x2e\x97\x2b\x8b\x55\x38\xe8\
\x9f\x66\x57\x73\x31\x4c\x1d\xcf\xf3\x30\xe4\xc1\x80\xaf\x50\x2a\
\x10\x4c\x6b\x18\x55\xe5\x5b\x17\x36\xe8\x9e\x40\x33\xca\xea\x45\
\x77\x05\x99\x4c\x96\x74\x3a\x8d\x40\x10\xaa\x8f\x50\xb4\x8b\xf3\
\x12\xe2\xf9\xce\x80\xef\xb5\xc9\xb2\xfa\x2b\x96\xca\xaf\xd9\x9f\
\x9d\xa2\xed\xe2\x14\xd4\x07\x30\x3a\x25\x86\x06\x5e\x51\xc3\x4d\
\x1a\x38\x33\x01\x32\xcf\x55\x81\x26\xd1\x34\x8d\x7c\x3e\x1f\x01\
\x6a\xe7\xf7\x31\xf1\xa6\x03\xc4\xf3\x3c\x2d\x18\x0c\x52\x28\x14\
\x7c\xd5\xe4\x38\xce\x61\xb6\xc1\xa3\x50\x28\xcc\x3f\x76\xfd\x13\
\x0b\x10\x8c\x84\xd0\xe6\xf5\x7e\x20\xaf\x21\xea\x34\xce\x3d\xf7\
\x5c\x66\x67\x67\xe9\xeb\xeb\xe3\x82\x0b\x2e\xe0\xf2\xcb\x2f\xe7\
\xce\x3b\xef\x64\x70\x70\x90\xcb\x2f\xbf\x9c\x58\x2c\xc6\x9d\x77\
\xde\x89\xa6\x69\x74\x76\x76\x72\xe0\xc0\x01\x74\x43\xc7\x30\x0c\
\x04\xe0\x14\x6c\x72\xb9\x4c\x59\x3a\x1d\xd7\x57\x91\xea\x9e\x3c\
\xcf\xf3\xed\x13\x80\x91\xb4\xf1\x1e\x5d\x44\x42\x96\x25\xca\xd3\
\x9d\xb2\x2d\xb2\x34\x04\x02\xdd\xf4\x7c\x37\xb9\xa7\xa7\xe7\x9f\
\x4f\x3d\xf5\xd4\x7f\x2e\x95\x4a\xfc\xea\x57\xbf\x12\x6f\x0a\x40\
\x9a\x9b\x9b\x9b\x84\x10\x0f\x01\x05\xcb\xb2\x7a\xd4\x06\x2b\xa9\
\xa8\x54\x59\x2a\xa6\xc8\x66\xb3\x68\x9a\x86\x65\x59\x18\xba\x81\
\x61\x94\x6f\x43\x93\x82\xf7\xbc\xe7\x3d\xdc\x74\xd3\x4d\x5c\x7b\
\xed\xb5\x9c\x7b\xee\xb9\xbc\xfd\xed\x6f\xa7\xbb\xbb\x9b\x75\xeb\
\xd6\x71\xcb\x2d\xb7\xf0\xfc\xf3\xcf\x73\xe5\x95\x57\xf2\x9d\xef\
\x7c\x87\x77\xbc\xe3\x1d\x5c\x70\xc1\x05\xdc\x7f\xff\xfd\xdc\x78\
\xe3\x8d\x74\x76\x76\xf2\xcc\x33\xcf\x70\xcf\x3d\xf7\x50\x53\x53\
\x43\x34\x1a\x65\x7a\x7a\x1a\xd7\x30\xd0\x34\x0d\xcd\x12\x64\xb2\
\x19\xb2\xd9\xac\xaf\x3a\x3d\xcf\xf3\x1f\x3b\x8e\x43\x38\x12\x26\
\x18\x31\xd1\x34\x8d\x67\x8b\x7b\x78\xef\x35\x31\x04\xe5\x80\xd2\
\x4a\x98\x38\x13\x61\xac\x91\x10\xee\x6c\x60\xde\x31\x91\x04\x83\
\xc1\x37\x8f\xca\x9a\x9c\x9c\x5c\xf4\xfe\xf7\xbf\xff\xcc\x58\x2c\
\xc6\xe6\xcd\x9b\x29\x16\x8b\xb2\x54\x2a\x09\x05\x82\xeb\xba\x38\
\x8e\xc3\xdc\xdc\x1c\x42\x08\x74\x7d\xfe\xf4\x0a\x81\xe6\x69\x48\
\x24\x77\xdd\x75\x17\x89\x44\x82\x5b\x6f\xbd\x95\xab\xae\xba\x8a\
\xbe\xbe\x3e\xce\x39\xe7\x1c\x76\xec\xd8\xc1\x65\x97\x5d\x86\xe7\
\x95\x7d\xd0\x6f\x7d\xeb\x5b\xdc\x70\xc3\x0d\x7c\xe1\x0b\x5f\x20\
\x91\x48\xd0\xd3\xd3\x43\xb1\x58\x64\x66\x66\x86\x93\x4f\x3e\x99\
\xde\xde\x5e\x3a\x3b\x3b\x59\xbc\x78\x31\x9f\xfa\xd4\xa7\x58\xb5\
\x6a\x15\x67\x9f\x7d\x36\xae\xeb\xf2\x9e\xf7\xbc\x87\x87\x1e\x7a\
\x88\x52\xa9\x74\xd0\x59\x38\xcc\xd1\x70\xe7\x1d\x0a\xdb\xb6\x49\
\x26\x93\x94\x92\x73\xec\xb8\x37\x87\xe7\x49\x14\xfd\x22\xca\x11\
\x3e\xd6\x8c\x81\x35\x11\xf0\xf9\xb3\xea\xea\xea\xaf\x4b\x29\x09\
\x87\xc3\xcf\x4d\x4d\x4d\x7d\xff\x75\x05\xa4\xb9\xb9\x99\xc9\xc9\
\x49\xf5\x67\xde\xb6\xed\xca\x53\x27\xd4\xc9\x53\x5f\x58\x79\x50\
\x0a\x90\xaa\xaa\x2a\x2e\xbd\xf4\x52\x7e\xfc\xe3\x1f\xf3\xde\xf7\
\xbe\x17\x21\x04\x17\x5d\x74\x11\xdf\xf8\xc6\x37\x78\xf0\xc1\x07\
\xf9\xc8\x47\x3e\xc2\xf6\xed\xdb\xe9\xef\xef\xe7\xea\xab\xaf\xa6\
\x58\x2c\xa2\x69\x1a\x0f\x3e\xf8\x20\x3f\xfc\xe1\x0f\xf1\x3c\x8f\
\x45\x8b\x16\x71\xf1\xc5\x17\x63\x9a\xe5\xd3\x7c\xc5\x15\x57\x70\
\xd9\x65\x97\xf1\xe8\xa3\x8f\xb2\x60\xc1\x02\xaa\xaa\xaa\x08\x85\
\x42\xe4\x72\x39\xee\xbd\xf7\x5e\x22\x91\x08\x23\x23\x23\xec\xdf\
\xbf\xdf\x77\xa7\x93\xc9\x24\x8e\xe3\xf8\xae\xb2\xeb\xba\x68\x9a\
\x46\xa9\x54\xe2\xc0\x81\x03\xac\x3f\xff\xa2\x32\x18\x48\xa6\xf4\
\x04\x9d\x2b\x4c\xbc\x94\x89\x9b\x31\xf1\xaa\x4d\x64\x87\x86\xd0\
\x24\x18\x52\x22\xf9\xa4\x61\x18\x6c\xdd\xba\xf5\x47\xaf\x3b\x20\
\x93\x93\x93\x34\x36\x36\xfe\xd5\x89\x27\x9e\xf8\xdd\x7c\x3e\x8f\
\xeb\xba\x12\x10\x95\x7e\x7e\xa5\x97\x25\x84\x40\x08\x81\xb2\x2f\
\xb7\xdf\x7e\x3b\xd9\x6c\x96\xb5\x6b\xd7\xf2\xf4\xd3\x4f\xb3\x7e\
\xfd\x7a\xf6\xec\xd9\x83\xe3\x38\x3c\xf6\xd8\x63\x3c\xfa\xe8\xa3\
\xe4\xf3\x79\x5a\x5a\x5a\xfc\xd3\x5c\x28\x14\x58\xb0\x60\x01\xa1\
\x50\x88\x42\xa1\x40\x2a\x95\x22\x1a\x8d\xfa\x6a\x23\x10\x08\xf0\
\xc0\x03\x0f\x30\x37\x37\x47\xa9\x54\x92\x37\xdd\x74\x93\x08\x04\
\x02\x65\x6f\x4b\xd3\x48\xa5\x52\xe8\xba\xce\xc2\x85\x0b\xf9\xb7\
\x7f\xfb\x37\x3e\xf3\x99\xcf\x90\x4c\x26\xfd\xf7\x2b\x69\x0e\x04\
\x02\xbe\xb4\xcc\xcc\xce\x20\xa5\x24\xe7\x96\x08\x5f\x18\x47\x76\
\x04\xd1\x84\x86\xa6\x79\x48\xcd\xc3\xc9\xe8\xd8\xe3\x21\x52\xbf\
\xae\x13\xe8\x12\xc3\x30\x90\x52\xda\xaf\x24\x46\x39\x6a\x95\x65\
\x59\x56\x57\x5b\x5b\x1b\x73\x73\x73\xd8\xb6\x2d\x94\xf1\x56\x31\
\x80\x94\xd2\x3f\x7d\xae\xeb\x72\xdd\x75\xd7\x71\xf5\xd5\x57\xb3\
\x61\xc3\x06\x76\xed\xda\xc5\xfa\xf5\xeb\x79\xf2\xc9\x27\x71\x1c\
\x87\xeb\xaf\x2f\xf3\x8b\x13\x13\x13\x48\x29\xfd\x6b\xec\xdb\xb7\
\xef\x90\x6b\x1e\x38\x70\x40\x6d\xa0\xed\x38\xce\x01\x29\x25\xba\
\xae\x5b\xf9\x7c\xfe\xf2\xb1\xb1\xb1\x29\x80\x40\x20\x70\x49\x73\
\x73\xf3\xed\xb9\x5c\x8e\x42\xa1\x40\x2c\x16\xe3\xda\x6b\xaf\xa5\
\xa3\xa3\x83\x6d\xdb\xb6\xf1\xdd\xef\x7e\x97\x91\x91\x11\x9a\x9a\
\x9a\x7c\x42\xd2\x71\x1c\x92\xc9\x24\xae\xeb\xd2\xd0\xd0\x70\x30\
\x20\x9d\x97\xfc\xfd\x07\x46\x30\xa7\x1c\x06\x83\xd0\x12\x5c\x4a\
\x20\x28\x38\x90\x1e\x44\x22\x11\x1e\x94\xe2\x63\x88\x79\xc6\x3f\
\x9f\xcf\xff\x49\x73\x73\xf3\x63\x9a\xa6\x69\xf1\x78\x7c\xdd\xeb\
\x66\x43\xa4\x94\x56\xa9\x54\xc2\xb6\x6d\x3f\xbe\x80\x72\xf0\x96\
\x4e\xa7\x31\x4d\x13\x21\x04\xa5\x52\x89\x70\x38\xcc\xdf\xfe\xed\
\xdf\xf2\xf4\xd3\x4f\xd3\xdc\xdc\xcc\x8f\x7e\xf4\x23\x1e\x7f\xfc\
\x71\x06\x07\x07\x0f\x01\x20\x12\x89\xfc\xe1\xa4\x8d\x10\x58\x96\
\xb5\x4f\xd3\xb4\x21\x40\x3a\x8e\xb3\x2b\x1e\x8f\x7f\xfa\x0f\xbd\
\x36\x12\x89\x7c\x7b\xeb\xd6\xad\xff\x09\x48\xdb\xb6\xb3\x8b\x17\
\x2f\x1e\x6d\x68\x68\x88\x28\x09\xb9\xef\xbe\xfb\xe4\x97\xbe\xf4\
\x25\xf1\xbd\xef\x7d\x8f\x0f\x7c\xe0\x03\x7c\xe8\x43\x1f\xe2\xf3\
\x9f\xff\x3c\xdb\xb7\x6f\xf7\x3d\xae\x4a\xa7\x24\x91\x48\x70\xf6\
\x9a\xb3\x70\x1c\x07\x81\x60\xd4\x1c\xe3\x84\xf3\xa0\x6d\x7a\x39\
\x4e\x22\x84\x1d\x0f\x21\x5b\xe6\xe3\x14\x43\x02\xb4\x18\x86\xd1\
\xb2\x71\xe3\xc6\xa3\x8a\xe0\x5f\x36\x20\x75\x75\x75\xd7\xae\x59\
\xb3\xe6\x1f\x07\x07\x07\x43\x8e\x53\x66\x55\x75\x5d\x3f\x24\xde\
\xb0\x2c\x8b\x40\x20\xc0\x0d\x37\xdc\xc0\x8a\x15\x2b\xb8\xe6\x9a\
\x6b\xb8\xfe\xfa\xeb\xb9\xf0\xc2\x0b\x19\x1e\x1e\x66\x3e\x39\x44\
\x2c\x16\x3b\x12\xe8\x2e\x20\x85\x10\x64\x32\x99\x6f\xc4\xe3\xf1\
\xaf\x1c\xe9\xfe\xd2\xe9\x74\x1e\xf0\x93\x4e\xa3\xa3\xa3\xeb\x47\
\x46\x46\xcc\x79\xe9\xf9\xc7\xf1\xf1\xf1\xd3\x2e\xb8\xe0\x02\x12\
\x89\x04\xb7\xdd\x76\x1b\x13\x13\x13\x2c\x5e\xbc\x98\xe7\x9f\x7f\
\x9e\x7c\x3e\x8f\x6d\xdb\x14\x0a\x05\xd4\x77\x73\x5d\x97\x42\xa1\
\x50\xfe\x4e\xc2\xa0\xd0\x33\x81\xb6\xa0\x16\xad\x36\x47\xf4\x14\
\x0d\xa9\x49\xa4\x0d\x4e\x32\xc0\xec\xcf\xeb\xf1\x8a\x9a\x52\x5d\
\x1c\x8d\xea\x32\x8e\x42\x55\xb5\x2d\x5c\xb8\xb0\x7e\x6c\x6c\xcc\
\xb7\x17\x95\x34\x87\x4a\x0c\x01\xac\x5f\xbf\x9e\xbe\xbe\x3e\x4c\
\xd3\x64\xf3\xe6\xcd\x3c\xf5\xd4\x53\x04\x83\x41\x82\xc1\xa0\x4f\
\x8f\x1c\x8e\x81\xb2\x45\x55\x55\x55\x6c\xd9\xb2\xa5\x3d\x99\x4c\
\x4e\xbc\x12\x9d\x9c\xc9\x64\x7e\xa3\x1e\x87\xc3\xe1\x4b\x0e\x1c\
\x38\x60\x00\x9e\xeb\xba\x27\xde\x70\xc3\x0d\xd1\xaf\x7c\xe5\x2b\
\xf4\xf6\xf6\xd2\xd1\xd1\xc1\x9d\x77\xde\xc9\xfa\xf5\xeb\x29\x14\
\x0a\xbe\x5d\x71\x1c\x07\xc7\x71\x98\x9d\x9d\xc5\x2e\x59\x94\x4a\
\x3a\x2f\xf4\xe7\x79\xfb\xc2\x0b\x89\x85\xa3\x3c\xbe\xef\x3f\x08\
\xe9\xd5\xd8\x64\xc9\x1f\xb0\x91\x0e\x68\x9a\x86\x69\x9a\xf4\xf4\
\xf4\xdc\x9d\x4c\x26\x37\x8d\x8c\x8c\xdc\xf9\x9a\x49\x88\x52\x43\
\x95\xbe\xbb\xd2\xc1\xb6\x6d\xb3\x70\xe1\x42\xee\xb9\xe7\x1e\x3e\
\xfc\xe1\x0f\xf3\xaf\xff\xfa\xaf\xfc\xdd\xdf\xfd\x1d\xc1\x60\xb0\
\xcc\x3b\xcd\x4b\x90\xe7\x79\xa8\xbc\x83\x52\x13\xf3\xf6\x40\xa4\
\x52\xa9\x87\x80\x84\xae\xeb\xe9\x70\x38\x6c\x2b\xc3\xab\xa4\xea\
\x95\xac\x44\x22\x71\x6d\x22\x51\x0e\xa8\xeb\xeb\xeb\x9f\x8c\xc5\
\x62\x67\xdf\x7c\xf3\xcd\x8c\x8f\x8f\x73\xfb\xed\xb7\xb3\x73\xe7\
\x4e\x02\x81\x80\x4f\x5c\x56\x7e\xbf\xe9\xe9\x69\xba\xbb\xbb\xcb\
\x81\xab\x2b\x79\x7c\xfc\x71\xba\xcf\x09\xd2\xae\xb5\xe2\x4e\x84\
\xf0\x52\x0b\xa1\xed\x10\x72\x15\x5d\xd7\xaf\xec\xef\xef\xd7\x5f\
\x2b\x40\x42\xef\x7b\xdf\xfb\xf6\x1e\x38\x70\xa0\x5a\x89\xb2\xa2\
\x46\x94\xeb\x29\xa5\xe4\xd3\x9f\xfe\x34\x4f\x3d\xf5\x14\xf3\x5c\
\x0f\x1f\xfc\xe0\x07\xa9\xa9\xa9\x79\x49\x17\x30\x4d\x93\x2d\x5b\
\xb6\x7c\x3e\x93\xc9\xbc\xaa\xfc\xd0\x8b\x48\xfa\x8d\x03\x03\x03\
\xf5\x9a\xa6\x59\x52\xca\xff\xf5\x4f\xff\xf4\x4f\x6b\x6f\xba\xe9\
\x26\x2c\xcb\x62\xdd\xba\x75\xec\xdb\xb7\x8f\xb1\xb1\x31\x5f\x8d\
\x39\x8e\xe3\xb3\xc6\x39\xaf\x44\xdb\x7b\x4a\x84\xdb\xa1\xd0\x3c\
\x8b\x01\x04\xb5\x08\x6e\x09\x28\x98\x4c\xfd\xa4\x11\x61\x78\x18\
\x86\xe1\xc7\x50\xaf\x05\x20\xf5\xd5\xd5\xd5\x0b\x75\x5d\xf7\x25\
\xa4\x50\x28\x50\x2c\x16\x09\x04\x02\x84\xc3\x61\x34\x4d\xe3\x27\
\x3f\xf9\x09\x57\x5c\x71\x05\xb5\xb5\xb5\x14\x8b\x45\xdf\x5b\x39\
\x5c\x1a\xd4\xdf\xba\xae\x93\x4e\xa7\x77\x1a\x86\xf1\xb8\xeb\xba\
\x4e\x24\x12\x89\x67\x32\x99\xd7\x14\x0c\x4d\xd3\xc8\x66\xb3\xbf\
\xcb\x66\xb3\xea\x34\x5f\x61\x18\x06\xb7\xdc\x72\x0b\x86\x61\xc8\
\xbb\xee\xba\x4b\xac\x5e\xbd\xda\x7f\xbd\x72\xe3\x95\x26\xc8\x14\
\x73\x44\xb6\xe7\x98\xfa\x7d\x9a\xcb\xcf\xf8\x5b\x5c\xbd\xc0\x43\
\xbd\xdf\xa6\x29\xdc\xc5\x44\x66\x88\xb9\xb1\x22\xc2\x28\x5f\x67\
\x66\x66\x66\x51\x34\x1a\xfd\xcb\x5c\x2e\x77\xdf\xab\x02\x48\x85\
\xde\x76\x95\xaf\xae\x44\xd9\xb2\x2c\xf2\xf9\x3c\xed\xed\xed\x3c\
\xf4\xd0\x43\xbc\xff\xfd\xef\x67\xdf\xbe\x7d\x5c\x77\xdd\x75\x04\
\x02\x01\x34\x4d\xfb\x43\x55\x20\x87\x7c\x7e\x38\x1c\x26\x1e\x8f\
\x3f\x35\x33\x33\x73\xc3\xeb\x95\x46\x3e\xfc\xd4\xa6\xd3\xe9\xcd\
\x3b\x76\xec\x08\x7a\x9e\xa7\xb7\xb4\xb4\xbc\xff\xe9\xa7\x9f\x46\
\xd7\x75\x84\x10\x84\xc3\xe1\x43\x54\x97\x69\x9a\x9c\x7d\xda\x1a\
\xa4\x07\xae\x74\xd9\xd6\xbb\x11\xd3\xd0\x59\xea\x9c\x83\x3b\x23\
\x58\x6a\x2f\x86\xd5\xaa\x9e\x47\x02\xac\x29\x14\x0a\x6b\xee\xbe\
\xfb\xee\x57\x07\x90\x78\x3c\x4e\x6b\x6b\xeb\x59\xb6\x6d\x7f\x40\
\xc5\x15\x2a\x12\x37\x8c\x32\x0f\x55\x53\x53\xc3\xe6\xcd\x9b\x31\
\x4d\x93\x9d\x3b\x77\x1e\x02\xc0\x4b\x4a\x9e\x04\x02\x6f\x68\x39\
\x52\x2a\x95\xba\x03\xb8\x03\x08\x2c\x5e\xbc\xb8\xf4\x37\x7f\xf3\
\x37\x4c\x4e\x4e\xb2\x61\xc3\x06\xba\xba\xba\xf8\xee\x77\xbf\x4b\
\xb1\x58\x2c\xb3\xc2\xc5\x22\xf1\x78\x1c\x21\x04\x1b\x53\xbd\xfc\
\xd9\xa7\xab\xf1\x5c\xf0\x7c\xb0\xc1\x1a\x0b\x52\xe8\x8d\x61\x8d\
\x05\x01\xf9\x62\x0e\xcc\xd1\xab\xac\x50\x28\xf4\xd7\x67\x9e\x79\
\xe6\x95\x96\x65\x49\xd7\x75\x85\xba\x39\xcf\xf3\x38\xef\xbc\xf3\
\xd8\xbd\x7b\x37\xdf\xf8\xc6\x37\xe8\xea\xea\xf2\x6d\x8b\x0a\xb8\
\x0c\xc3\xf0\xa3\x60\x29\xcb\xd1\xac\xa2\x42\xb6\x6d\xdb\x16\x01\
\x0a\xbc\x79\x96\x4c\x24\x12\x39\xd3\x34\xa5\x6d\xdb\xe1\x0d\x1b\
\x36\xe8\x3f\xf8\xc1\x0f\x7c\x49\xaf\xe4\xe6\x00\x2c\xcd\x25\x54\
\x25\x88\x26\x4f\xe7\x86\x0b\x3f\xcb\x23\x83\xdf\x61\x60\x6c\x90\
\xd2\xa9\x93\x4c\xe4\x0b\xe4\x86\x34\x10\xf2\x65\x1d\xd0\x97\x04\
\x5d\x57\x57\xd7\x7d\x2b\x56\xac\xf8\xb0\x6d\xdb\xec\xdf\xbf\x9f\
\x68\x34\x4a\x3c\x1e\xe7\x93\x9f\xfc\x24\x1d\x1d\x1d\xfc\xcb\xbf\
\xfc\x0b\xe1\x70\xd8\x57\x49\xca\xc0\x7b\x9e\x57\x66\x59\xe7\xf3\
\x20\x95\x2e\x71\x3e\x9f\x67\xef\xde\xbd\xe1\x7c\x3e\x5f\xe4\x4d\
\xb8\xea\xea\xea\x1e\x3b\xe5\x94\x53\xce\x5d\xbe\x7c\x39\x5b\xb7\
\x6e\x65\x7a\x7a\x9a\xd6\xd6\x56\xa4\x94\x64\x32\x19\x16\x2d\x5a\
\x84\x8e\x86\x53\x82\xa2\x57\xc4\xa5\x84\x69\x47\xd0\x8d\x79\x35\
\x6d\x82\x30\xfc\x78\x4a\x9a\xa6\x29\xb6\x6f\xdf\xbe\x7a\x6c\x6c\
\x6c\xf3\x2b\xb5\x21\x31\xcf\xf3\xaa\xd4\xe9\x70\x5d\xd7\x67\x6b\
\xe3\xf1\x38\xe7\x9c\x73\x0e\x37\xdd\x74\x13\xdb\xb6\x6d\x23\x97\
\xcb\x31\x37\x37\x47\x2a\x95\x22\x10\x08\xd0\xda\xda\xca\xd8\xd8\
\x18\x63\x63\x63\xac\x58\xb1\x82\x6d\xdb\xb6\x51\x2c\x16\x7f\x35\
\x3e\x3e\x7e\x6b\x2c\x16\x4b\xd7\xd7\xd7\x97\xf2\xf9\xfc\x9b\x11\
\x0f\x84\x10\x01\xdb\xb6\x79\xe2\x89\x27\xe8\xee\xee\x96\x97\x5c\
\x72\x89\xb8\xeb\xae\xbb\xa8\xad\xad\xc5\xf3\x3c\x9f\x47\x1b\xd5\
\x67\x68\xff\x8b\x59\x16\x34\x06\x70\xf2\x56\x39\xcb\x38\x15\xc2\
\x9a\x08\x22\x0b\x3a\x5e\x5e\x07\x10\xd1\x68\x14\xc3\x30\x9c\x57\
\xac\xb2\xea\xeb\xeb\x6f\x6f\x6c\x6c\xfc\x53\xe5\xea\x2a\x40\xba\
\xba\xba\x78\xf6\xd9\x67\xd9\xb4\x69\xd3\x8b\xbe\x77\x74\x74\xd4\
\x97\x9a\xde\xde\x5e\x62\xb1\x18\x81\x40\x60\x72\xff\xfe\xfd\x4f\
\xcf\xcd\xcd\xf9\x69\xd7\x37\xe3\x72\x1c\xe7\xf3\x03\x03\x03\xcd\
\xb6\x6d\xaf\x59\xb3\x66\xcd\xb5\x43\x43\x43\x18\x86\x71\x48\x7e\
\x07\x20\x65\xe5\x38\xb3\x39\x4a\x21\x2f\x79\xd7\x09\xef\xa3\xaf\
\x6a\x33\xe9\x8e\x19\x40\x50\x18\x36\x99\xf9\x45\x0d\xe8\x65\xf2\
\xf3\x55\xb1\x21\xae\xeb\x46\x94\xfb\xaa\x0a\x0b\xde\xfd\xee\x77\
\x33\x3a\x3a\xca\x63\x8f\x3d\x46\x38\x1c\xf6\xcb\x3c\x15\xbd\xae\
\x0c\x7e\x85\x6b\x5b\x2e\x09\x75\x5d\xd2\xe9\x74\x94\x37\xf9\xaa\
\xae\xae\x26\x9d\x4e\xff\xf7\x3c\x37\xe6\x0e\x0d\x0d\x5d\x9b\x4c\
\x26\x51\x55\x33\x9e\xe7\xd1\xdf\xdf\x4f\x55\x55\x15\x86\x67\xb3\
\xed\x6b\x36\x94\xa2\xcc\xd5\xed\x44\x0a\x93\x78\xca\x21\x60\xea\
\x14\x0b\x19\x9c\x54\x39\x3d\x3d\x35\x35\x45\x2c\x16\xfb\x61\x53\
\x53\xd3\x0d\x53\x53\x53\x8f\x1e\x35\x20\x52\x4a\x57\x49\x86\xb2\
\x07\x13\x13\x13\xdc\x78\xe3\x8d\x7c\xf5\xab\x5f\xe5\xf6\xdb\x6f\
\x67\x78\x78\x98\x93\x4f\x3e\x99\x99\x99\x19\xe2\xf1\x38\x35\x35\
\x35\x3c\xff\xfc\xf3\x68\x9a\xc6\x49\x27\x9d\xc4\x96\x2d\x5b\xc4\
\xe8\xe8\xe8\x13\xd3\xd3\xd3\xeb\x00\x16\x2e\x5c\xc8\xf8\xf8\xf8\
\x9b\x16\x10\xc5\x02\xcf\x4b\xb7\xa9\xb2\x9c\xab\x56\xad\x92\xa3\
\xa3\xa3\x22\x99\x4c\xd2\xd0\xd0\xc0\xc9\x27\x9f\x5c\x26\x23\x3d\
\x1b\x4e\x9a\xa1\xa6\xae\x80\x97\x0e\x50\x93\x5a\x84\x97\x36\x71\
\x53\x06\xb4\x80\xd0\xa5\x22\x3e\x4f\xba\xf7\xde\x7b\x1b\x8e\x5a\
\x42\x9a\x9a\x9a\x16\xbb\xae\xfb\x6e\xe5\x39\x05\x02\x01\x02\x81\
\x00\x43\x43\x43\x6c\xd8\xb0\xc1\xf7\x9a\x00\x7e\xff\xfb\xdf\xfb\
\x39\xf3\xca\x78\x63\xd3\xa6\x4d\x84\x42\x21\xff\x75\xc0\x9b\x1a\
\x8c\x3f\x10\x44\x3e\xba\x65\xcb\x96\xb5\x81\x40\xe0\xea\xf3\xce\
\x3b\xef\x8a\x1f\xfc\xe0\x07\x7e\x6a\x21\x93\xc9\x20\x3d\x8f\x31\
\x99\xe0\x8c\xb7\x39\xd8\x8e\x4b\x55\xa8\x86\x82\x9b\x41\xe8\xe5\
\x82\xbe\xfc\xae\x18\xe9\x67\x6a\x40\x1c\x74\x72\xfe\xe8\xf5\x8e\
\x40\xcc\xad\x89\x44\x22\x0b\x5d\xd7\x25\x99\x4c\x22\xa5\xe4\xfa\
\xeb\xaf\xa7\x58\x2c\x12\x0e\x87\x89\x44\x22\x3e\x48\xa1\x50\x88\
\x60\x30\x48\x38\x1c\x26\x1c\x0e\x13\x0a\x85\x08\x85\x42\xc4\x62\
\xb1\x43\xbc\xab\xb7\xda\xca\x64\x32\x93\xa5\x52\xe9\x49\x4d\xd3\
\x46\xbe\xf5\xad\x6f\x1d\x22\x3d\x7e\x32\x4e\xba\x80\xc6\x62\x79\
\x11\x9f\x59\x7d\x37\x7f\x7f\xee\x8f\x18\xde\xe3\xd2\x11\x5b\x8e\
\x08\x96\x6b\xcf\x14\x7b\x7c\xa4\x98\xe4\x8f\x3e\x1b\x0e\x87\xaf\
\x5c\xb2\x64\xc9\xdd\x00\xc9\x64\x92\xa6\xa6\x26\x7e\xf2\x93\x9f\
\x30\x3d\x3d\xcd\x43\x0f\x3d\xc4\xfe\xfd\xfb\x59\xb7\x6e\x1d\x8f\
\x3c\xf2\x08\x2b\x57\xae\x24\x91\x48\x30\x38\x38\x48\x4f\x4f\x0f\
\x8d\x8d\x8d\xfc\xf0\x87\x3f\xa4\x58\x2c\xce\xbc\xf0\xc2\x0b\x8d\
\xcd\xcd\xcd\x81\xc9\xc9\xc9\xd2\x5b\x15\x98\x9a\x9a\x9a\x2f\xac\
\x5e\xbd\xfa\xa6\xd1\xd1\x51\x92\xc9\x24\xa1\x50\x88\xd3\x4f\x3f\
\x9d\x40\x20\x80\x04\x02\x86\x86\x74\x02\x38\x5e\x09\xc3\x30\x71\
\xa4\x8d\xa6\x97\x6b\xcf\x64\x05\x31\x30\x3c\x3c\x1c\x7f\xfa\xe9\
\xa7\x17\x1e\x95\xca\xd2\x34\x2d\xac\x54\x90\xae\xeb\x84\x42\x21\
\x3e\xf9\xc9\x4f\xd2\xd9\xd9\xc9\x0b\x2f\xbc\x80\x94\x92\xde\xde\
\x5e\x74\x5d\xf7\xf3\x1c\x00\x8f\x3c\xf2\x08\xae\xeb\xaa\x82\x06\
\x09\xc8\xb7\x32\x18\xf3\x74\xcb\xcf\x9e\x79\xe6\x99\xfd\x0b\x17\
\x2e\xbc\xbd\xbd\xbd\x3d\x3a\x3a\x3a\xea\xf3\x79\x1e\x1e\xdb\xeb\
\xfb\x38\xeb\x2f\x22\x18\x19\x13\x37\x6d\xa0\xa7\x82\xb8\x29\x03\
\x37\x6d\x60\x8d\x85\x10\x02\xc5\x68\x9b\x47\x6d\x43\x42\xa1\xd0\
\xa7\x65\xd9\xaa\x8b\x25\x4b\x96\xe0\x38\x0e\xf9\x7c\x9e\x3d\x7b\
\xf6\xf8\xd1\xab\xe2\x7c\x82\xc1\xa0\x9f\x94\x51\x65\x9b\x80\x4c\
\xa5\x52\x36\xc7\xc0\xca\x64\x32\xcf\x02\xcf\x1a\x86\xf1\x8f\x23\
\x23\x23\x51\x4d\xd3\x0e\x26\xe5\xa4\x43\xfd\x42\x03\xe1\xea\x2c\
\x6a\x3c\x85\x9a\x45\x55\xbc\x30\xb5\x89\x58\xa0\x96\xbc\x3e\xc5\
\xc8\x57\x5b\x11\x5a\x99\x50\x55\x99\xd5\xa3\x02\x44\x4a\x29\xa4\
\x94\xa2\x54\x2a\xf1\x89\x4f\x7c\x02\xc3\x30\xb8\xe8\xa2\x8b\xf8\
\xf2\x97\xbf\x8c\x69\x9a\x18\x86\x41\x2a\x95\x62\x64\x64\x84\xee\
\xee\x6e\x86\x87\x87\x49\xa7\xd3\xf4\xf7\xf7\x93\xcd\x66\xc9\x64\
\x32\x7f\x33\x30\x30\xf0\x0d\x8e\xa1\xa5\x3c\x4d\x28\xe7\xf8\xa5\
\x94\xb8\x48\x3c\xf2\x3c\xbb\xab\xc8\xa2\x77\x76\xf1\xf0\xf3\x4f\
\x10\xab\x8f\x12\x88\x2e\x66\xd7\xcc\x5e\x52\xa3\x36\x9a\x56\x3e\
\xa4\x85\x42\xa1\xee\xa8\x6d\x48\x7d\x7d\x7d\x7f\x53\x53\xd3\x52\
\xd7\x75\xa9\xab\xab\xe3\x8a\x2b\xae\xa0\xb7\xb7\xd7\x2f\x36\xf0\
\xdb\x0a\x4a\x25\x42\xa1\x10\xa5\x52\x89\x6c\x36\x4b\x34\x1a\x25\
\x95\x4a\x91\xcd\x66\xaf\x3b\xd6\x00\x69\x69\x69\xf9\x8c\x10\xe2\
\xb3\x42\x88\xba\x95\x2b\x57\xfa\xb9\x9e\xa2\x67\x11\x3a\x39\x85\
\xe1\x18\x78\xd3\x11\x9c\xc9\x30\x68\x1e\x42\x13\x2a\xd7\x8e\x94\
\x92\xe9\xe9\x69\x1e\x7b\xec\x31\x71\xb4\x71\x88\x50\x74\xb5\xae\
\xeb\xdc\x75\xd7\x5d\x87\x24\xa3\x2a\xdd\x5b\x15\xb9\x9a\xa6\x89\
\x6d\xdb\x32\x16\x8b\x89\x74\x3a\xad\x1f\x4b\x60\x54\x57\x57\x33\
\x31\x31\xf1\xa5\xd6\xd6\xd6\x6b\x81\x3a\xcf\xf3\xca\x44\xa9\xd0\
\x78\xac\xb8\x9d\xbf\x5c\xd5\x84\x23\x6d\x2c\xb7\x44\xcc\x08\xe3\
\x66\x74\xbc\x54\x90\xdc\xce\x18\x56\x3c\xf0\x07\xa9\xff\x97\xe5\
\xf6\x02\xa6\x6d\xdb\x5c\x70\xc1\x05\x34\x36\x36\x72\xf5\xd5\x57\
\xd3\xd4\xd4\x84\x69\x9a\x2c\x5e\xbc\x98\x8e\x8e\x0e\x4c\xd3\xa4\
\xb1\xb1\x91\x64\x32\x49\x2e\x97\x43\xd7\x75\xe2\xf1\xb8\xd8\xbd\
\x7b\xf7\x2f\x32\x99\xcc\x2f\x8f\x25\x40\x94\xcb\x2b\xe6\x4f\xa1\
\x4a\x45\x58\x96\x85\xd0\xca\xe5\xa6\xef\x8c\xfd\x35\x5f\x5c\xf7\
\x30\xcb\x6a\xcf\x64\x79\xc3\xd9\x44\x3b\x25\xc4\x4a\x07\x1b\x8c\
\x5e\x89\x0d\x01\xc4\x7c\x76\x8d\x7b\xef\xbd\x97\xa1\xa1\x21\xf6\
\xec\xd9\x83\x69\x9a\xd4\xd7\xd7\x93\xcd\x66\xd1\x75\x9d\xce\xce\
\x4e\x6a\x6b\x6b\xd9\xbb\x77\x2f\x3d\x3d\x3d\x64\x32\x19\x0a\x85\
\xc2\xe6\x78\x3c\xde\xcb\x31\xb8\x1c\xc7\x91\x7e\x57\x30\xa0\x21\
\xd0\x43\x82\x7d\x7b\x93\xfc\xcb\xff\xb8\x9c\xeb\x1e\x7c\x37\x77\
\x5f\xb6\x99\xc9\xb9\x69\x9e\x4b\xfc\x17\xdf\xfb\xd5\xbd\x58\x96\
\xe6\x07\x94\xaf\x88\xcb\xd2\x34\x8d\x81\x81\x01\x2e\xba\xe8\x22\
\x0a\x85\x82\xaf\x0b\x2b\xcb\x5c\xfa\xfa\xfa\xfc\xc7\x4f\x3f\xfd\
\xf4\x21\x51\xf9\xb1\xb8\x66\x66\x66\xde\xd5\xd2\xd2\x32\xa5\x78\
\x2d\x81\x40\x84\x05\x85\x9c\x47\x38\x74\xd0\xab\x75\x5c\x87\xc7\
\x87\xfe\x03\xcf\x95\xbe\xbd\x7d\xa5\x12\x42\x20\x10\xa0\xbe\xbe\
\x9e\x40\x20\xa0\xaa\x14\x09\x06\x83\x64\x32\x19\x6a\x6a\x6a\xfc\
\xbc\xba\xca\xa9\x67\x32\x19\x2c\xcb\xb2\x81\xb1\x63\x15\x10\xd7\
\x75\xb3\xf3\x92\x52\xa6\x43\x28\xe7\x80\x62\x31\x13\xdb\x02\xd7\
\x9b\xef\x08\xc6\xc5\xf5\x1c\x3c\xd7\xc5\xb2\xbc\x57\x0e\x88\xe7\
\x79\xac\x5b\xb7\x8e\x2b\xaf\xbc\x92\x33\xce\x38\x83\x87\x1f\x7e\
\x98\xdf\xfd\xee\x77\xe4\xf3\x79\x26\x26\x26\x38\xe3\x8c\x33\x98\
\x9c\x9c\xa4\xaf\xaf\x0f\xcf\xf3\x68\x6c\x6c\xe4\x85\x17\x5e\xc0\
\xf3\xbc\x7d\xf1\x78\xfc\xdf\xdf\x0c\x93\x11\x5e\xab\x35\xcf\x64\
\x4b\xcb\xb2\x84\x40\x20\x34\x9d\xa5\xcb\xea\x79\x64\xeb\xcf\xf9\
\xcc\x39\xdf\xe2\xbf\xf6\x7c\x13\x27\x17\xe6\xbc\xc5\x1b\xe8\x75\
\xbe\x43\xa9\xe4\x1c\xa2\x55\x8e\x5a\x42\x36\x6e\xdc\xc8\xf4\xf4\
\x34\x35\x35\x35\x0c\x0c\x0c\xf8\x28\x4b\x29\xf9\xe9\x4f\x7f\xea\
\x57\x92\x78\x9e\xc7\xd4\xd4\x94\x2a\xae\x16\xc0\x31\x0b\x06\x60\
\x15\x0a\x85\xd9\x42\xa1\x50\x9f\x4c\x26\xd1\x84\x46\xbe\xd9\xa3\
\x68\x55\xf1\x1f\x43\x7f\xcf\x27\x63\xdf\xe3\x40\xae\x9f\xe7\x27\
\x7e\x87\x11\xd0\x98\x19\x4b\x31\x37\x57\xae\x43\x50\x99\xd5\xa3\
\x06\xc4\x30\x0c\x46\x47\x47\x19\x1e\x1e\x46\xd7\x75\xdf\xe5\xd5\
\x34\xcd\xff\x5b\x45\xed\x2a\x0f\x32\x39\x39\xd9\xd2\xda\xda\xfa\
\xbe\x78\x3c\xfe\xc8\xb1\xaa\xb5\x84\x10\x96\x4a\x57\xab\x86\x1e\
\x23\x28\xa8\x22\xc2\x1d\x8f\x5d\x4b\xb2\xcf\x63\x66\x6f\x81\x52\
\xd2\x23\x37\xe9\x22\x34\xe1\xef\xd3\x51\x03\x12\x8d\x46\xf9\xc8\
\x47\x3e\x42\x7f\x7f\x3f\x17\x5f\x7c\x31\x4f\x3c\xf1\x04\x43\x43\
\x43\xd4\xd5\xd5\xe1\x38\x8e\xef\x06\x8e\x8c\x8c\x90\x4c\x26\x69\
\x69\x69\x61\x72\x72\x92\x5c\x2e\x57\x4d\xb9\x18\xe6\x98\x03\xa4\
\xa5\xa5\x45\x55\xe9\x4b\x75\x08\x6b\x6b\x6b\x31\x87\x8b\x3c\x71\
\x63\x9a\x42\xc2\x2d\x77\xec\x6a\x20\xf4\xb2\x0f\x66\x98\xf8\x07\
\xf6\x48\xf4\xfb\x1f\x05\x24\x12\x89\x30\x35\x35\xc5\xa7\x3e\xf5\
\x29\xa4\x94\x6c\xdc\xb8\x91\xba\xba\x3a\x6a\x6b\x6b\x71\x5d\x97\
\xd1\xd1\x51\xba\xba\xba\xfc\xc6\x97\xb6\xb6\x36\x72\xb9\x9c\x02\
\xca\x3b\x16\x45\x63\x62\x62\x82\xf6\xf6\xf6\xef\x06\x02\x81\x46\
\xd7\x75\xb9\xe3\x8e\x3b\x58\xb7\x6e\x1d\x5f\xfb\xda\xd7\x38\x70\
\xe0\x00\x9e\xe7\x91\xc9\x64\x30\x0c\x83\xd9\xd9\x59\xaa\xab\xab\
\x09\x85\x42\x0c\x0f\x0f\x53\x28\x14\x38\x52\x0d\xc1\x91\xd8\x5e\
\x7e\xf1\x8b\x5f\xf0\xf3\x9f\xff\xfc\x45\x5f\x53\x69\x27\x76\xed\
\xda\x45\x24\x12\x79\x49\xc6\xeb\xad\xbc\x02\x81\xc0\xda\x55\xab\
\x56\x19\xbf\xff\xfd\xef\x59\xb1\x62\x05\x7d\x7d\x7d\xac\x5d\xbb\
\x96\xef\x7f\xff\xfb\xf4\xf4\xf4\x30\x36\x36\x46\x57\x57\x17\x8f\
\x3c\xf2\x08\xd5\xd5\xd5\xb4\xb7\xb7\x73\xda\x69\xa7\x71\xdf\x7d\
\xf7\x1d\x31\x37\xf4\x47\x15\x5a\x6b\x6b\xeb\x70\x55\x55\xd5\x09\
\x95\x84\x5a\xa9\x54\x22\x12\x89\xf8\x95\x25\x91\x48\x84\x5c\x2e\
\x87\xe7\x79\xd4\xd5\xd5\xf9\xd2\xd2\xde\xde\x6e\xcf\xcd\xcd\xdd\
\xb3\x67\xcf\x9e\xab\x8f\x35\x40\x16\x2d\x5a\x34\x78\xea\xa9\xa7\
\x9e\x38\x31\x31\xc1\x92\x25\x4b\x58\xb3\x66\x0d\xbf\xfc\xe5\x2f\
\x29\x95\x4a\x04\x02\x01\xb2\xd9\x2c\xe1\x70\x98\x74\x3a\xed\xef\
\x9b\xe7\x79\xe4\x72\x39\x5c\xd7\x65\xe7\xce\x9d\x47\xc7\x65\x45\
\xa3\x51\xeb\x9b\xdf\xfc\x26\xa9\x54\x8a\x0f\x7c\xe0\x03\xdc\x7f\
\xff\xfd\xf4\xf6\xf6\xd2\xd0\xd0\x40\x55\x55\x15\xcf\x3d\xf7\x1c\
\x55\x55\x55\xf4\xf5\xf5\x31\x31\x31\x41\x57\x57\x17\x23\x23\x23\
\x0c\x0f\x0f\xe3\x38\x8e\x29\x84\x30\x8f\x41\x30\xfe\x2a\x10\x08\
\xd4\xcd\xcc\xcc\x20\x84\x60\x68\xdf\x10\x03\x03\x83\x18\xc6\xa1\
\x27\xdf\x1f\x86\x33\x1f\x77\x08\x21\x08\x85\x42\xfe\xff\x3f\x2a\
\x40\x74\x5d\xe7\x73\x9f\xfb\x1c\x5f\xfc\xe2\x17\xb9\xf5\xd6\x5b\
\xd9\xb1\x63\x87\x5f\x75\x91\xcf\xe7\xd1\x75\xdd\xaf\x46\x71\x1c\
\x87\xe1\xe1\x61\x3f\x8a\x9f\x77\x8d\xbd\x79\x49\x3b\x66\x5c\xe0\
\xb6\xb6\xb6\x7f\x5c\xba\x74\x69\x3d\xc0\x8e\x5d\xcf\xe3\x35\xa5\
\x89\x04\x4c\x64\x26\x80\xb4\x34\x3c\x4b\x43\xda\x1a\x42\x48\x5f\
\xff\x48\x29\xc9\xe7\xf3\x94\x4a\x25\x7f\x48\xc2\x2b\x8a\xd4\x6f\
\xbe\xf9\xe6\x17\xe3\x74\x7c\x5b\xa3\x7a\xb5\x43\xa1\x90\x7f\x32\
\x34\x4d\xbb\x72\xe9\xd2\xa5\x17\x0d\x0c\x0c\x34\x1e\x2b\x12\x62\
\x59\x96\xae\x5a\xbc\xc7\xab\xf3\xfc\xf9\x27\x1a\x70\x0f\xa3\xa7\
\x3c\x07\xb2\x5b\xaa\xc9\xef\x89\xa2\x6a\x7b\x67\x67\x67\xc9\x66\
\xb3\x94\x4a\xa5\xc4\x51\xb3\xbd\x63\x63\x63\xf7\x37\x35\x35\x71\
\xe9\xa5\x97\x72\xd3\x4d\x37\x71\xd6\x59\x67\xd1\xda\xda\x8a\x2a\
\xd5\x3f\xef\xbc\xf3\x68\x6f\x6f\x07\xca\xad\xd2\x4b\x96\x2c\x61\
\x72\x72\xd2\x77\x8b\xab\xab\xab\xe9\xe8\xe8\x38\x56\x24\x23\x02\
\x2c\xcc\xe7\xf3\x7a\x32\x99\x2c\x37\x28\x15\x2c\x32\x13\x25\xc4\
\xd0\x32\x3e\xdc\xf6\x45\x4e\x64\x0d\xda\x44\x3b\x6e\xca\x24\x53\
\x4a\x91\x9c\x2b\x33\xe0\xaa\xb7\xe4\xa5\xd4\xf8\xfe\x51\x09\xb1\
\x6d\xbb\xdf\xb2\x2c\xd6\xae\x5d\xcb\xb2\x65\xcb\x18\x1f\x1f\xa7\
\x54\x2a\x31\x37\x37\x47\x73\x73\x33\xe9\x74\x9a\xda\xda\x5a\x52\
\xa9\x14\x93\x93\x93\x54\x57\x57\xa3\xeb\x3a\xaa\x87\xc4\xb6\x6d\
\x4c\xd3\x94\x15\x4e\xc2\x5b\x56\x75\xe5\xf3\xf9\x8f\xfd\xe9\x9f\
\xfe\xe9\x97\x4d\xd3\x04\x29\xc9\xd8\x45\xde\xbd\x68\x39\xa9\x9f\
\x19\xcc\x15\x53\xfc\xc3\xc3\x5f\xc6\xd0\x0d\x34\x43\x20\x58\x80\
\xae\xc1\x82\x46\x9d\xa1\xa1\x21\x1a\x1b\x1b\x29\x16\x8b\x04\x83\
\x41\xe9\x79\x5e\xf8\x15\xd1\xef\x63\x63\x63\x6c\xd8\xb0\x01\xc3\
\x30\xfc\xfc\xb9\x5a\x83\x83\x83\xbe\xc1\x02\x78\xee\xb9\xe7\x30\
\x4d\x93\x4a\xe2\xcd\x75\xdd\xaa\x95\x2b\x57\xde\x35\x35\x35\x35\
\x18\x8f\xc7\xff\xe1\xad\x2a\x21\x9e\xe7\xd5\x29\x16\xbb\x20\x6d\
\xec\x35\x23\x2c\xe8\x89\xe1\xe2\x22\x6d\x70\x27\xa2\x58\x93\x01\
\x9c\xc9\x20\x56\xdc\xf0\xab\xde\x55\x43\x6c\x2a\x95\xa2\xb9\xb9\
\x59\x14\x8b\xc5\xab\xda\xda\xda\x18\x1b\x1b\x7b\xf9\x2a\x4b\xd3\
\x34\x4f\xa9\x9e\x48\x24\x42\x30\x18\xf4\xeb\xb0\x54\xd7\xd4\xe1\
\x35\x58\x6a\x7a\x82\x6a\x03\x03\x42\x3d\x3d\x3d\x57\x01\x7f\x72\
\x0c\xd8\x8f\x72\xff\xa1\x65\x51\x57\x13\xa2\x54\x70\x38\x77\xc1\
\x5f\xf3\xde\x25\x57\x10\x68\xb5\xa8\x7f\x9b\x43\xd5\x79\x09\x4a\
\x79\xfb\xe0\x4c\xc8\xf9\x7a\x2c\x55\xa0\x2e\x84\x08\xbe\x18\x18\
\x47\x94\x90\xaa\xaa\xaa\x7b\xc7\xc6\xc6\xd6\x7f\xf6\xb3\x9f\xfd\
\xf3\x9a\x9a\x1a\x2e\xbd\xf4\x52\x6e\xbb\xed\x36\x92\xc9\x24\x2b\
\x57\xae\xc4\xf3\x3c\x7e\xf9\xcb\x5f\xb2\x72\xe5\x4a\x76\xec\xd8\
\x01\x40\x30\x18\xa4\x58\x2c\x92\xcd\x66\xfd\xc1\x01\xf3\xae\x9e\
\xfb\x56\x54\x5d\xa1\x50\xe8\x2f\xd6\xaf\x5f\x7f\xff\x7c\x25\x8d\
\x74\x5d\x29\xc2\x55\x3a\xde\x2f\x1a\xc8\x08\xc9\x03\xda\x6f\x40\
\x80\xa6\x75\xfa\xef\xa9\xae\xd7\xd8\xbc\x79\x33\xcb\x96\x2d\xf3\
\x5b\xe1\x94\x2a\x3f\x52\xd0\xfc\x47\x01\x99\x9a\x9a\x92\x8b\x16\
\x2d\xb2\x1e\x78\xe0\x01\x3e\xfe\xf1\x8f\x73\xf3\xcd\x37\xd3\xd7\
\xd7\x87\xa6\x69\xc4\xe3\x71\x3f\x25\xb9\x65\xcb\x16\x0a\x85\x02\
\xc9\x64\x92\xe6\xe6\x66\x9f\x7c\x54\xee\xaf\x6d\xdb\x84\xc3\xe1\
\x33\x2e\xbc\xf0\xc2\x67\x06\x06\x06\xfe\x7b\x70\x70\xf0\xd6\xb7\
\x90\xaa\x0a\x29\x9b\x68\x08\x5d\x0c\x9d\xb0\x8b\x77\x9c\x5f\x85\
\xeb\xb9\xd8\x29\x0d\xd2\x11\xdc\xb4\x81\x9b\x34\x29\x0e\x86\x91\
\xae\x00\xb7\xac\xde\x2b\xa7\xd7\x05\x02\x01\x12\x89\xc4\xf3\x86\
\x61\xec\x7b\x45\x6e\xaf\x94\x52\x73\x5d\x97\x2f\x7d\xe9\x4b\x7e\
\xbd\x95\xf2\x16\xa4\x3c\xb4\x5d\x4b\xd3\x34\xa6\xa7\xa7\x7d\xf1\
\x54\xdd\x46\xf3\x11\x6c\x4d\x7d\x7d\xfd\x3b\x6b\x6b\x6b\xf7\xbe\
\x45\x48\xc4\xda\xfa\xfa\xfa\x25\x89\x44\x62\xd1\x7c\xd2\x0d\x5d\
\x6a\xe8\x96\x20\x35\x24\xa9\xd3\x4e\xa4\xb5\xae\x9a\x01\x6f\x3b\
\xa2\x4a\x43\x54\x0b\x72\x2f\x54\xe3\x15\x35\x7f\x22\xc5\xc1\xd9\
\x92\x52\xb5\x32\x7c\x73\x6a\x6a\xea\x99\x57\x04\xc8\xec\xec\xec\
\x57\xb3\xd9\x6c\xe7\x35\xd7\x5c\xf3\xae\x42\xa1\xc0\xbb\xde\xf5\
\x2e\x1e\x78\xe0\x01\x3c\xcf\x63\xc1\x82\x05\x8c\x8f\x8f\x13\x0e\
\x87\x89\xc5\x62\x3c\xf3\xcc\x33\xb4\xb7\xb7\xd3\xd2\xd2\x42\xb1\
\x58\xf4\x49\xb6\xca\x51\x15\x2f\xb7\x4d\xf8\x8d\x5a\xc5\x62\xf1\
\xe3\xa7\x9f\x7e\xfa\x3f\x28\x43\x5e\x92\x16\xc1\x06\x87\xa6\xd9\
\xb7\x23\x9e\x13\xe4\x80\x1c\x0e\x01\x4e\xf5\xdf\xd3\xde\x64\xb0\
\x7b\xf7\x6e\xbf\x15\x43\x55\xe2\xa8\x34\x05\xe5\x39\x8d\x47\xaf\
\xb2\x0c\xc3\x20\x9d\x4e\x6f\x5e\xb8\x70\xe1\xd0\xbe\x7d\xfb\xde\
\xf5\xef\xff\xfe\xef\xf4\xf7\xf7\xb3\x74\xe9\x52\x0c\xc3\xf0\xdb\
\xa3\x0d\xc3\x20\x1a\x8d\x72\xca\x29\xa7\x30\x37\x37\x47\x43\x43\
\xc3\x21\x05\x65\xaa\x49\x74\x9e\x07\xfb\xe0\x25\x97\x5c\x72\xfe\
\xcc\xcc\x8c\xd8\xbe\x7d\x7b\x77\x3a\x9d\x4e\xbc\x59\x40\xa8\xb4\
\x6d\x9e\xe7\x99\xf9\x7c\x1e\x21\x04\x79\xb7\x04\xe7\x1e\x60\xf1\
\xb2\x30\xd2\x03\x69\xcf\x47\xe4\x96\x86\x3d\x6d\x92\x7a\xbc\x0e\
\x29\x5c\xbf\xa1\x47\x1d\xc0\x64\x32\x49\x30\x18\x44\xd3\x34\x19\
\x08\x04\x84\x65\x59\xe6\x2b\x02\xa4\x22\x12\x17\x7d\x7d\x7d\xac\
\x5b\xb7\xce\x37\x50\x87\x97\x8d\x2a\xf5\x25\x84\xe0\xd7\xbf\xfe\
\xf5\xc1\x21\x65\x9a\x46\xb1\x58\xf4\x01\x11\x42\x84\x0d\xc3\x08\
\xd7\xd6\xd6\x62\x59\xd6\x9b\x8a\x16\x8e\xc7\xe3\x9c\x72\xca\x29\
\xff\xa7\xb9\xb9\xb9\xb3\x54\x2a\x75\x64\xd2\x69\xf0\x34\x1c\xcf\
\xc3\x78\xb2\x96\xa1\xcd\x82\xf9\x56\xe7\x0a\x5e\xd6\x25\x3b\x33\
\x4d\x75\x4d\x95\xdf\x61\xe6\xcf\x1b\xb6\x6d\x42\xa1\x10\xba\xae\
\x8b\x4d\x9b\x36\x9d\x58\x5f\x5f\x3f\xf1\x8a\x00\x51\xab\x54\x2a\
\x4d\x99\xa6\x39\xaa\xeb\x7a\x7b\x53\x53\x13\x86\x61\xf8\xdc\x8c\
\x9a\x5f\x15\x0a\x85\x7c\x23\xaf\x8a\xe9\xd4\x00\x80\xd9\xd9\x59\
\x7f\xca\x8e\x9a\xdb\xeb\x38\x0e\x5d\x5d\x5d\xd7\x04\x02\x81\xec\
\xc4\xc4\xc4\xf3\x53\x53\x53\x4f\xbe\xd1\x52\x31\xef\x59\x5e\xb0\
\x60\xc1\x82\x7a\x80\x84\x9d\xa6\x7a\xcd\x2c\xf5\xd5\x21\x9c\xc9\
\x30\xce\x64\x08\x59\xd2\xf1\x4a\xe5\xb9\xbe\x78\xe5\xc3\x18\x6d\
\x29\x77\x07\x54\x4e\x5d\x55\xd9\x53\x55\x74\x2e\xa5\xdc\x37\x33\
\x33\x73\xc4\xfb\x79\x49\x95\x85\xf9\x7c\xfe\xbf\xc7\xc7\xc7\x7f\
\xdc\xd4\xd4\xf4\x3f\xef\xbe\xfb\x6e\x56\xad\x5a\xe5\xab\xa3\x0f\
\x7d\xe8\x43\x4c\x4e\x4e\xb2\x7c\xf9\x72\xba\xba\xba\x98\x99\x99\
\x61\xed\xda\xb5\xac\x5e\xbd\xda\x1f\x4a\xa6\x5a\xdb\x22\x91\x88\
\x3f\x24\xcc\xf3\x3c\x5a\x5b\x5b\xcf\xef\xea\xea\x7a\xdf\xee\xdd\
\xbb\xf3\xc5\x62\xf1\x0d\xc9\x2e\x66\xb3\x59\x5a\x5b\x5b\x3b\xab\
\xaa\xaa\xde\x56\x5d\x5d\xdd\x68\x9a\xe6\x47\xa5\x94\xc1\x42\xa1\
\xc0\xae\xe4\x10\x4b\x4f\x35\x28\x16\x2c\x1c\x23\x87\x57\x9b\xc2\
\x6b\x48\x22\x9b\x67\x29\x58\x79\x72\xe3\xf8\x55\xed\x2a\x05\x91\
\x4e\xa7\x7d\xb2\x55\x0d\xda\x89\xc5\x62\x0c\x0f\x0f\xbf\x24\xcf\
\xf2\x65\x15\x50\x85\x42\x21\xae\xbd\xf6\x5a\x6a\x6a\x6a\x18\x1f\
\x1f\x47\x4a\xc9\xee\xdd\xbb\xf1\x3c\x8f\x7d\xfb\xf6\xf9\x0d\x29\
\x3f\xfd\xe9\x4f\x7d\x35\xa6\x69\x9a\x6f\x6f\x94\x28\x2b\x5e\xa7\
\x62\x60\x66\x8d\xa6\x69\x4b\x01\xb1\x70\xe1\xc2\xf8\xe8\xe8\x68\
\xe6\xf5\x04\x25\x14\x0a\xdd\xb2\x6a\xd5\xaa\xbf\x2a\x47\xca\x1a\
\x33\xcc\xb1\xa0\x5d\x67\xdd\x64\x0f\xf6\x6f\xcc\x72\x4b\x9a\x06\
\x68\x07\x35\x6c\x58\xc0\xc4\xec\x10\x4d\x4d\x4d\xfe\x58\xc0\xca\
\x31\xb8\xaa\xa0\x21\x9f\xcf\x3f\x3e\x3d\x3d\x7d\xdb\x4b\xbd\x97\
\x97\x05\x88\xda\xd4\x44\x22\x21\x3d\xcf\x13\xf3\x5c\x15\xba\xae\
\x53\x2c\x16\x0f\xe9\x51\x57\x34\x8b\x4a\x68\x25\x93\x49\x66\x66\
\x66\xa8\xa9\xa9\xc1\xb2\x2c\x4c\xd3\x54\x80\xc8\xce\xce\xce\x8f\
\x2c\x59\xb2\xe4\x23\xba\xae\xf3\xab\x5f\xfd\xea\x2a\xe0\xee\xd7\
\x41\x55\xb5\xa5\xd3\xe9\xf3\x3c\xcf\x2b\xe5\x72\xb9\xe5\xaa\x63\
\x38\x65\xe7\x09\xaf\x4c\xa1\x05\x83\xd0\x25\x91\x96\xc0\x4d\x99\
\x38\x49\x03\x37\x69\x10\xf6\x6a\xa9\xae\xa9\x46\x7a\xd2\xb7\x8b\
\xba\xae\xfb\x83\xdc\x94\x33\x54\x31\x28\xe1\xc0\xe8\xe8\xe8\x6f\
\x5f\x0b\x40\xe2\x5b\xb7\x6e\x0d\x06\x83\xc1\x5b\x3e\xf1\x89\x4f\
\xdc\x78\xf6\xd9\x67\x73\xfe\xf9\xe7\xf3\xed\x6f\x7f\x9b\xed\xbb\
\x25\x5f\xa5\x00\x00\x09\x83\x49\x44\x41\x54\xdb\xb7\xf3\x8e\x77\
\xbc\x83\x74\x3a\xcd\xe0\xe0\x20\x35\x35\x35\x3e\xaf\xb5\x7a\xf5\
\x6a\x9e\x7b\xee\x39\x1f\xac\xc3\x67\xf8\x5a\x96\x25\x94\x73\x30\
\xdf\x22\xd7\xd1\xdd\xdd\x7d\x86\x94\x92\x99\x99\x99\x3d\x13\x13\
\x13\xaf\x49\x23\x7b\xa1\x50\xf8\xcb\xcb\x2e\xbb\xec\x9f\x55\xbb\
\x72\x51\x2b\x12\xad\x13\x60\xe9\x78\x25\x0d\x99\x2e\xdb\x88\xb2\
\x62\x97\xc8\x06\x30\x9a\x0d\xb6\xef\xd8\x46\xc8\x3a\x98\x62\x50\
\x87\x2b\x99\x4c\x22\x84\xf0\x0f\x9c\x65\x59\x04\x83\x41\x6c\xdb\
\x7e\x59\x73\x46\x5e\x0e\x20\x92\xf2\xcf\x4d\x38\xdb\xb7\x6f\x47\
\x4a\xc9\x8f\x7f\xfc\x63\xa6\xa7\xa7\xf1\x3c\x8f\x07\x1e\x78\xc0\
\x4f\x5e\x55\x8e\xca\xdb\xb4\x69\x93\x5f\xfe\xa2\x4e\x4e\xa1\x50\
\x38\xc4\x45\xac\x9c\xd5\x78\xea\xa9\xa7\xde\xd6\xd1\xd1\x71\x5b\
\x24\x12\xe1\xe1\x87\x1f\x5e\x05\x6c\x7d\x15\xa5\xe2\x03\xc0\x69\
\xe5\xbd\x74\xdf\xbd\x73\xe7\xce\xf2\xc8\x0c\xcf\x41\x9c\x90\xa6\
\xa6\x74\xa8\x57\x6a\x27\x75\xec\xf1\x20\x26\x21\x9a\x9b\x9b\x39\
\xfc\xc7\x04\x0e\x3f\x5c\xca\x33\x8d\xc5\x62\x64\xb3\x59\x06\x07\
\x07\xfb\x75\x5d\x1f\x78\xad\x00\xf1\x1d\x81\x50\x28\xc4\xb6\x6d\
\xdb\xfc\x69\xa4\xaa\x5d\x41\x75\x54\x55\x26\xf2\x95\x64\xa8\xea\
\x8b\x5c\x2e\x47\xb1\x58\xf4\xd9\xe3\x4a\xcf\x4b\x4d\x11\x4d\xa7\
\xd3\x78\x9e\x47\x47\x47\xc7\x7f\x9e\x74\xd2\x49\x45\xd7\x75\xf5\
\xde\xde\xde\x3b\xe2\xf1\xf8\xd7\x5e\xce\x8d\x1e\x36\xce\x96\x6c\
\x36\x7b\xcd\x65\x97\x5d\x76\x61\x36\x9b\x45\x08\x41\xa2\x76\x5c\
\x76\x2e\x33\x85\x15\x0f\xe3\xcc\x04\x91\x19\xd3\xf7\x68\xa5\xe7\
\x21\x35\x49\x78\x69\x98\x8d\x9b\x36\x52\x5b\x5b\xeb\x03\x52\x39\
\xe4\x53\x4d\xe9\x56\x71\x57\x30\x78\x70\x5c\xfa\xf4\xf4\xf4\xb2\
\x97\xcb\xdd\xbd\x6c\x40\x74\x5d\xbf\xfb\xc9\x27\x9f\x7c\x4a\xd7\
\xf5\x2b\x2f\xba\xe8\xa2\xcb\xd6\xae\x5d\x4b\x5b\x5b\x1b\x9b\x36\
\x6d\x22\x9d\x4e\x93\x4c\x26\x99\x9b\x9b\x43\xd7\x75\x4e\x38\xe1\
\x04\x86\x87\x87\x99\x98\x98\xe0\xf4\xd3\x4f\xe7\x85\x17\x5e\x38\
\x04\x34\xdb\xb6\xfd\x21\x36\xa5\x52\x89\x60\x30\xe8\x7f\x61\xdb\
\xb6\xa9\xab\xab\xeb\x54\x3f\xfe\xb2\x6b\xd7\xae\x75\xe1\x70\x78\
\x06\xd0\x82\xc1\x60\x21\x99\x4c\x3e\x70\xa4\x7b\x9d\x9c\x9c\xe4\
\xe4\x93\x4f\x7e\x7c\xd1\xa2\x45\x6b\x95\x5d\xdb\x37\xb6\x57\x06\
\x82\x9a\x90\x05\x03\x7d\x26\x22\x0e\xf4\x83\xd0\x6c\x84\x66\xe3\
\x7a\xae\x1f\x3f\x00\xd4\xd4\xd4\xa0\xe9\x07\x8b\x3b\x2a\x01\x51\
\x01\xaf\x2a\x5c\x38\xf1\xc4\x13\x89\xc7\xe3\x94\x4a\x25\xaa\xaa\
\xaa\x0e\x99\x52\xf1\x72\x88\xd4\x97\x0d\xc8\xdc\xdc\xdc\x20\x30\
\x58\x53\x53\x73\xd6\xce\x9d\x3b\x59\xbe\x7c\x39\x03\x03\x03\xfe\
\x54\x4f\x25\xca\xae\xeb\x32\x36\x36\xe6\x73\x5d\x3b\x76\xec\xf0\
\xdb\xe0\x54\x10\x39\x37\x37\xc7\xe1\xe3\x65\x2b\xb8\x2f\x7f\x13\
\x00\xd9\xdd\xdd\x7d\xc9\x8a\x15\x2b\x2e\x01\xd8\xbf\x7f\x7f\x7e\
\x74\x74\xb4\x65\x5e\x45\xe8\x53\x53\x53\x4f\xba\xae\x1b\x9f\xe7\
\x8d\x16\xb5\xb5\xb5\x6d\xac\x2c\xd9\xac\xdc\x10\xe9\x49\xa1\xa4\
\x40\x49\x29\xc0\xf4\xf4\x34\x3d\x3d\x3d\x7e\x2c\x31\x3b\x3b\x5b\
\xe6\xaf\xe6\x27\x53\x54\x8e\x16\x51\x80\x54\xd2\x22\x4b\x96\x2c\
\x61\x7a\x7a\x9a\x52\xa9\x34\x37\x35\x35\x35\x52\x2c\x16\x9d\xa3\
\x51\xab\xaf\xa8\x6f\xa0\xad\xad\x8d\x8d\x1b\x37\x02\xfc\xc1\x59\
\x1e\x55\x55\x55\x87\xe7\x57\x08\x85\x42\xb8\xae\xcb\xd4\xd4\x94\
\x1f\xd9\x2b\x5d\xac\x86\x84\x29\x09\x51\x7f\x03\x22\x97\xcb\x31\
\x31\x31\xa1\xf4\x74\x64\xc5\x8a\x15\x5f\x57\xc4\xdd\xd6\xad\x5b\
\xa9\xae\xae\xf6\x83\xd0\xe9\xe9\xe9\x3f\x58\xb2\xe9\x38\x0e\x17\
\x5e\x78\xa1\xef\x11\x8d\x8f\x8f\xd3\xd8\xd8\xe8\xdf\xbf\x72\x5d\
\x95\x3a\x52\x3f\xb1\xa1\xa6\xe2\x29\x57\x5d\x55\x8e\xa8\x61\x97\
\xba\xae\x33\x32\x32\xa2\x00\x7a\x78\xf7\xee\xdd\x97\x1f\xed\x9e\
\x1e\x35\x20\x9e\xe7\x7d\xed\xd9\x67\x9f\xfd\xb1\x65\x59\x2b\x7b\
\x7a\x7a\xee\xbd\xf5\xd6\x5b\x69\x6b\x6b\xe3\x67\x3f\xfb\x19\x3b\
\x76\xec\x40\x08\x41\x36\x9b\x65\xd9\xb2\x65\xf4\xf6\xf6\xfa\xcd\
\x3d\x8d\x8d\x8d\xec\xdb\xb7\xcf\xd7\xbb\x8a\xda\x3e\x3c\xd2\x55\
\x29\x60\xe5\x4e\xab\x0d\x52\x41\xa6\xda\x14\x29\x25\xad\xad\xad\
\xb4\xb5\xb5\x21\x84\x60\x72\x72\x92\xd6\xd6\x56\x3f\x0e\xd8\xbb\
\x77\xaf\x9f\xd7\xdf\xb5\x6b\x97\xcf\xbf\xa9\x93\x7f\xb8\x81\x56\
\xb4\x90\xf2\x9e\x0e\xf7\x0c\xd5\xf5\x01\x9f\x48\x1d\x1c\x1c\x64\
\x6e\x6e\x8e\xaa\xaa\x2a\x2a\x87\x76\xbe\xae\x80\x64\x32\x99\x29\
\x60\x4a\xd7\xf5\x2a\x4d\xd3\xf8\xdc\xe7\x3e\x47\x43\x43\x03\x89\
\x44\xc2\xff\x52\x52\x4a\x26\x26\x26\x0e\xa1\xeb\x15\x58\xb1\x58\
\x8c\xee\xee\x6e\xe2\xf1\xb8\xaf\x12\x2a\x87\x6b\xaa\x7f\x4a\x8a\
\x94\xe1\xaf\xcc\xb1\x54\x66\xf2\x14\x11\xa8\x0c\xad\x72\x34\x14\
\xeb\xac\xdc\x54\xc5\x38\x2b\xf0\xd5\xe7\xa8\x83\xa0\x3c\xc2\xca\
\xc2\x84\xf9\xa1\x39\xfe\xf7\x50\x0e\xcc\xb2\x65\xcb\xe8\xeb\xeb\
\xc3\x30\x0c\xe2\xf1\xf8\x1d\x42\x08\xaf\x50\x28\x3c\xf3\x86\x00\
\x52\xa1\x86\x9c\x52\xa9\x54\x0c\x87\xc3\xc5\x64\x32\x19\x8d\x46\
\xa3\x66\x30\x18\x44\xd7\x75\x7f\x34\xac\x3a\x65\xca\xb0\xaa\x4d\
\x53\xd5\x8f\x2a\x9a\xd7\x75\xdd\x4f\xff\x56\x66\xd6\x54\x4f\xb8\
\x92\x10\x21\x84\x2f\x21\x95\x3a\x5e\x01\xa7\x1c\x05\xe5\xc1\xa9\
\x4d\x57\xaa\x47\x01\xa2\xc6\xc1\x56\x3e\xa7\x0e\x40\x25\x20\x95\
\x3f\xb7\x51\x55\x55\xe5\x17\x5c\xff\xf6\xb7\xbf\x25\x12\x89\x10\
\x89\x44\xd8\xb7\x6f\x9f\x3f\x33\xb2\xa9\xa9\x89\xa9\xa9\xa9\x37\
\x06\x10\xdb\xb6\x9f\xdb\xbe\x7d\x7b\x18\xa0\xbe\xbe\xfe\x37\xdd\
\xdd\xdd\xe7\x7d\xec\x63\x1f\xe3\xc9\x27\x9f\x64\x60\x60\xc0\x4f\
\x61\x2a\x43\xaf\x36\x44\xd7\x75\xb2\xd9\xac\xcf\xf7\xd4\xd4\xd4\
\xf8\x06\x5f\xc5\x33\x2a\xda\xad\x54\x1b\x2a\xd9\xa3\x00\x3b\x7c\
\xde\x7c\xa5\x24\x05\x02\x01\x7f\x63\xd5\xa6\x2b\xe6\xf9\x70\x8f\
\x49\x5d\x4f\x79\x81\x4a\x22\x54\x1b\x9f\xca\x8b\x74\x76\x76\xb2\
\x7c\xf9\x72\x1e\x7b\xec\x31\x3c\xcf\xa3\xa5\xa5\xe5\xff\xe9\x1b\
\x3c\x5a\x30\x5e\x15\x40\x0e\xcb\x2e\x8a\xba\xba\x3a\xbe\xfe\xf5\
\xaf\x2b\xf1\x9e\xaf\xda\x2f\x9f\x3a\xd3\x34\xfd\xaa\x14\xb5\x02\
\x81\x00\x96\x65\x11\x8d\x46\xfd\xd3\xaf\x3c\x17\x55\x90\x66\xcc\
\xff\x20\x8b\xda\x74\xb5\xb9\xea\xfd\x95\x9b\x5e\x19\x6c\x2a\x37\
\xfa\x70\x3b\xa1\x24\x44\x49\x9d\x7a\x4e\xc5\x27\x6a\xf3\x95\x9d\
\x33\x0c\x83\x70\x38\x8c\xe3\x38\xc4\xe3\x71\x9f\x3c\xcc\x66\xb3\
\x0f\x0d\x0c\x0c\xfc\x4f\xf3\xf0\x2f\xf5\x46\xaa\xac\xc3\xd4\xd7\
\x1d\xbf\xf9\xcd\x6f\x1e\x14\x42\xc8\x40\x20\xf0\xc5\xf6\xf6\xf6\
\xea\x8b\x2f\xbe\x98\x1d\x3b\x76\xf8\x11\xbd\x3a\x89\xea\x34\x3a\
\x8e\x43\x6d\x6d\xad\x5f\xf7\x1a\x0a\x85\x58\xba\x74\x29\xd3\xd3\
\xd3\x87\x8c\x99\x2d\x16\x8b\xbe\xc4\xd4\xd6\xd6\x1e\x12\x78\x56\
\x06\x6b\x95\x12\x52\x59\xf5\xa1\x40\x54\xaf\x53\x00\xab\xc1\x6b\
\xa6\x69\xfa\x41\x9e\x72\xcf\x55\xb0\x17\x0e\x87\x59\xbd\x7a\x35\
\xdb\xb6\x6d\xf3\x55\x5b\x5d\x5d\x1d\x85\x42\x21\x93\xcb\xe5\x5e\
\xd5\x5f\x00\x7d\x55\x01\x99\x99\x99\xf9\x69\x45\x94\xfc\xbf\x63\
\xb1\x58\xf5\x43\x0f\x3d\xe4\x7b\x2b\x95\xbf\xb0\x73\x98\xc7\x46\
\x2a\x95\xa2\xaa\xaa\x8a\x33\xcf\x3c\x93\xfd\xfb\xf7\xfb\x9b\x52\
\x39\xa5\x4e\x01\xe2\xba\x2e\xa9\x54\x0a\x28\xf7\xb0\x28\x6f\x4c\
\x6d\xb4\x02\xa4\x72\x22\xb5\x02\x24\x9b\xcd\x1e\x32\x1d\x55\x1d\
\x8c\xea\xea\xea\x43\xae\x57\x5d\x5d\x4d\x43\x43\x03\x73\x73\x73\
\xa4\xd3\x69\xa2\xd1\x28\xae\xeb\x92\xcb\xe5\x0e\xec\xdd\xbb\xf7\
\x59\x21\x84\x0c\x06\x83\xbf\x79\xb5\x39\xb6\xd7\xac\x7f\xd9\x75\
\xdd\xe1\xfe\xfe\xfe\x1c\xe0\x09\x21\x4e\xd0\x75\xdd\xac\xae\xae\
\xf6\x0d\x7d\xc5\x80\x9a\x43\x0c\x78\x22\x91\xa0\xbe\xbe\x1e\xdb\
\xb6\x89\xc5\x62\x2c\x5d\xba\x94\xdd\xbb\x77\xfb\xaa\x43\xa9\x1b\
\x15\x67\x08\x21\xfc\x71\xe0\x6a\x38\x65\x65\x85\x79\x3e\x9f\xf7\
\xd5\x92\x4a\x03\x54\xb2\xd1\x4a\xdb\x68\x9a\x46\x4d\x4d\x8d\xff\
\x39\xea\xf5\x81\x40\x00\xd7\x75\x79\xea\xa9\xa7\x30\x0c\x83\x50\
\x28\xf4\x44\x2a\x95\xba\xfc\xb5\xda\xb7\xd7\x0c\x90\x44\x22\xe1\
\xcf\xea\xee\xec\xec\x1c\xe8\xec\xec\x5c\xf2\x12\xed\x90\x4f\x3f\
\x78\x9e\xc7\xc2\x85\x0b\x19\x1c\x1c\x24\x12\x89\xf8\x53\xa6\xfd\
\xde\xbe\x0a\x1a\x46\xd9\x1d\x65\xaf\x14\x58\x87\x4f\xd6\x56\x91\
\xb9\xa6\x69\xbe\x87\xa7\x24\xa8\xb3\xb3\x13\xc3\x30\x18\x1c\x1c\
\xf4\x27\x1f\xcd\x57\xce\xa4\x67\x67\x67\x9f\x13\x42\x78\x96\x65\
\x6d\x7e\x2d\xd3\x02\xaf\x4b\x87\x7f\x2e\x97\xfb\xfe\xf8\xf8\xf8\
\x02\xca\x6d\x6e\xef\x0b\x87\xc3\xcb\x8e\xd4\xaf\x3d\x33\x33\x43\
\x20\x10\xe0\x99\x67\x9e\xf1\x37\xf0\xec\xb3\xcf\x66\xe3\xc6\x8d\
\x68\x9a\xe6\xf7\xa8\x24\x12\x09\xbf\x92\x52\xd9\x0a\xd3\x34\x0f\
\x99\xbc\x1d\x0c\x06\x7d\x4f\xaf\xb5\xb5\x95\xd9\xd9\xd9\x72\xea\
\x35\x1a\x65\xe9\xd2\xa5\x0c\x0f\x0f\x93\xcb\xe5\xd0\x34\x4d\xfd\
\x57\x0a\x21\x84\xe3\x38\xaa\x23\x6c\xdf\xd0\xd0\xd0\xf9\x2f\x96\
\xf6\x7d\x4b\x01\x32\x3f\x90\xfe\x0b\x2a\x9f\x7c\xc2\x09\x27\xb4\
\x2e\x58\xb0\x60\x59\x45\xe4\x2b\xc5\x1f\xe0\x39\x2a\xd5\x98\x8a\
\x69\x2a\xf3\xf8\xd1\x68\xd4\x1f\x31\x58\x5f\x5f\xcf\xf2\xe5\xcb\
\xd9\xb3\x67\x0f\x75\x75\x75\xe4\x72\x39\x82\xc1\x20\xb9\x5c\x8e\
\x13\x4f\x3c\x11\x21\x04\xa9\x54\x8a\x44\x22\x41\x47\x47\x07\xef\
\x7c\xe7\x3b\xf9\xf5\xaf\x7f\x4d\xa1\x50\xa0\xa1\xa1\x81\xde\xde\
\x5e\x34\x4d\x63\xff\xfe\xfd\x4a\x8d\x89\xc1\xc1\xc1\x6f\x39\x8e\
\xb3\x0f\x70\x23\x91\xc8\xc0\xe1\xc5\x10\xaf\xd5\x7a\xdd\x67\xae\
\x6b\x9a\xb6\x40\xd3\xb4\xc8\xfc\xe3\x0f\x76\x77\x77\x7f\xe5\x48\
\xf3\x3f\x94\x3e\x57\x76\x27\x10\x08\xf8\x86\xd9\xb2\xca\x3f\xb3\
\x7a\xd5\x55\x57\x71\xdf\x7d\xf7\x11\x0e\x87\xa9\xad\xad\x65\x7a\
\x7a\xda\x9f\x3e\xb1\x61\xc3\x06\x7e\xfe\xf3\x9f\x33\x37\x37\x47\
\x57\x57\x17\xdd\xdd\xdd\x3c\xfe\xf8\xe3\xaa\x3a\xdf\x57\x6d\xea\
\xf7\x15\x5d\xd7\x25\x93\xc9\x9c\x1b\x8f\xc7\x1f\x7f\xbd\xf7\xe7\
\x0d\x1d\x82\x1f\x0e\x87\x4f\x6b\x6c\x6c\xbc\x4c\xd7\x75\x07\x20\
\x1a\x8d\x7e\x2a\x18\x0c\x1e\x71\xae\xaf\x9a\x60\xa7\xd4\xd3\xd8\
\xd8\x18\xd5\xd5\xd5\x38\x8e\xe3\xe7\x24\x14\xf1\x37\x31\x31\x41\
\x5d\x5d\x9d\x3f\x73\x38\x16\x8b\xf9\x81\x61\xa1\x50\xf0\x6d\xca\
\xfc\xe8\xf3\x43\x66\xd1\xbf\x11\xe3\x6c\xdf\xd0\x29\x31\x85\x42\
\x61\xc7\xc8\xc8\xc8\x8e\x0a\xdd\xec\x45\xa3\xd1\x2a\x29\xa5\xb4\
\x6d\xbb\xcb\x30\x8c\x3f\x3b\x52\xa3\xbd\xb2\x05\xca\x63\x2a\x16\
\x8b\x7e\x90\xa9\x62\x09\xf5\x6b\x9c\xd9\x6c\x16\xcf\xf3\x76\x4b\
\x29\x75\xd7\x75\xdd\x64\x32\xf9\xf7\xe3\xe3\xe3\x3f\x79\xb1\xcf\
\x7e\x23\xc6\xd9\xbe\xa1\x12\x72\x24\xed\x06\xf8\x9c\x7e\x2c\x16\
\xbb\x5a\xd3\xb4\xd8\x8b\x81\xa2\x96\x69\x9a\x3e\x95\x21\x84\x28\
\x00\x7b\x8b\xc5\x62\x56\x4a\x29\x63\xb1\x58\x66\x76\x76\x76\xfb\
\x61\x69\xdd\x63\x79\x04\xc8\xf1\x75\x7c\x1d\x5f\xc7\xd7\xf1\x75\
\x7c\x1d\x5f\xc7\xd7\xf1\x05\xc0\xff\x05\xf4\xe6\xb0\x9e\xc6\x6e\
\xd0\xb8\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x03\x7c\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\
\x95\x2b\x0e\x1b\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd9\x03\x03\
\x0e\x1c\x0e\xa7\xa1\x6f\x29\x00\x00\x02\xfc\x49\x44\x41\x54\x78\
\xda\xed\xd6\xbf\x6f\x1c\x45\x18\xc6\xf1\xef\xcc\xed\xda\xb1\x0f\
\x9b\x93\x85\x90\x2d\xb0\x38\x12\x09\x77\x6e\xac\x14\x6e\x10\xe0\
\x8b\x90\x4b\x37\x14\xc8\x7f\x03\xfc\x01\x54\x27\x51\x42\x81\x28\
\xdd\xd0\x90\x36\x6d\x30\x8e\x70\x2a\x44\x44\x94\x20\x17\xc8\x0e\
\x06\x64\x90\xec\xe4\x02\xde\xf3\xde\x9e\xf7\xd7\xec\x50\x1c\x39\
\x89\x32\x52\xae\x58\xf1\x7c\xa4\xed\x76\xa4\x9d\x7d\xe7\x79\xdf\
\x01\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\
\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\
\x11\x79\x31\x4c\x9d\x3e\x76\x71\x71\xf1\x13\x63\x4c\xf8\x9c\xcb\
\xa6\x80\xcf\x4e\x4f\x4f\xff\x56\x41\x5e\xb0\xf5\xf5\x75\xdf\x6e\
\xb7\xf1\xde\x53\x51\x51\xbe\xf9\x94\x97\xae\x4c\x53\x3e\x99\xc6\
\x9d\xcd\x8c\x5e\xb2\x60\xac\x1f\xef\x2c\x8e\x63\xf6\xf6\xf6\x56\
\xb2\x2c\x3b\xaa\xc3\x1e\x83\xba\x45\x3a\x4d\x53\x0c\x70\x3f\xff\
\x85\xce\xf5\x19\x1a\xe1\x05\x99\xbb\xa4\x19\x5e\x81\x24\xa0\x3c\
\x9f\x22\xfb\x75\x86\xe1\xe1\x2c\x00\x65\x59\x62\x8c\xf1\x75\xd9\
\x9f\xad\x53\x31\xaa\xaa\x22\xcf\x73\xf2\xbc\xc0\x53\x61\x8d\xe1\
\xaa\x79\x9f\xcf\x3b\x77\x78\xe7\xf5\x0f\x98\xe7\x1a\x8b\x57\x5b\
\xd8\x57\x32\x8a\xa2\xa0\x28\x0a\x9c\x73\xb5\x3a\x70\xb5\x4a\x88\
\x73\x8e\x3c\xcf\xc1\x8f\x26\xc3\x60\x90\xf2\xe1\x7b\x1f\xf3\xd1\
\xad\x1b\x7c\xbd\xfd\x90\x77\x5f\x3b\xe3\x24\x7d\xc0\x17\xf7\x3f\
\x25\xcf\x73\xbc\xf7\xcf\x12\xa2\x82\x4c\x82\xf7\x9e\x2c\xcb\x30\
\x18\x98\x82\xe1\xc0\x31\xd7\x6c\x92\x57\x29\x00\x87\x7f\x1e\xf2\
\x63\x72\x0b\xe3\x2d\x59\x96\x8d\x53\xa5\x82\x4c\xb8\x65\x19\x00\
\x0f\x73\xf3\x53\x0c\x86\x43\xa6\x1b\xa3\x79\x51\xe1\x29\x5d\x81\
\xf7\x8c\x92\xf4\xec\xe6\x52\xa3\x82\xd8\x3a\x26\x24\xcb\x72\x2a\
\x3c\x2f\xcf\x37\xb9\xfd\xf3\x4d\xba\x37\x6e\xf2\xe0\xf4\x2e\xf7\
\xfe\xf8\x96\xb7\xdf\xd8\xa2\x28\xf3\x7f\xdf\xcb\xd4\xb2\x26\xa9\
\x28\x0a\xe2\x38\xc6\x1a\x4b\x32\x57\x71\x99\x35\xf9\xc1\x7d\x45\
\x75\x6f\x8e\x57\xaf\x65\x1c\x97\xdf\xf0\xfb\x4f\x7b\x44\x8f\x13\
\xce\xcf\x13\xbc\xf7\xcc\xce\xce\x62\xad\x55\x41\x26\x16\x69\x6b\
\x31\x18\x4c\x05\x8d\x69\x43\xc3\x86\x7c\xf7\xdb\x97\x5c\xdc\x31\
\x3c\x79\x94\x90\xfe\xe5\x48\x1e\x3b\x8c\x31\xb5\x4a\x46\x6d\x0b\
\xe2\x9c\x63\x61\x61\x81\xf0\x6c\xc8\xf7\xdd\x0b\x2e\x9f\x56\x18\
\x03\xc6\x8e\x1e\x6f\x0c\x41\x10\x8c\x67\x4e\x9d\xd2\x51\xcb\x5b\
\x56\xb7\xdb\x65\x63\x63\x83\xfd\xfd\x7d\x0e\x0e\x0e\xe8\xf5\x7a\
\x94\x65\x49\x55\x55\x44\x51\x44\xab\xd5\x22\x49\x12\x7a\xbd\x1e\
\x8d\x46\x83\x7e\xbf\x8f\xf7\xbe\x3e\x1d\xa0\x6e\x33\xa4\xdd\x6e\
\x73\x72\x72\xc2\xe6\xe6\x26\x51\x14\xb1\xb4\xb4\xc4\xda\xda\x1a\
\xdb\xdb\xdb\x78\xef\x69\xb5\x5a\xac\xac\xac\xd0\xe9\x74\x28\xcb\
\x92\x30\x0c\x6b\x95\x90\x5a\x35\xd9\xd5\xd5\x55\xbf\xbc\xbc\xcc\
\xd6\xd6\x16\xbb\xbb\xbb\xa3\x01\x6f\xed\xf8\xc7\x47\x51\x44\x18\
\x86\x78\xef\x49\xd3\x14\xe7\x1c\xce\x39\x8e\x8e\x8e\xde\x1a\x0c\
\x06\x8f\xd4\xb2\x26\xa0\xdf\xef\xb3\xb3\xb3\x33\x9e\x13\xff\x39\
\x5d\xc6\x50\x14\xc5\x68\x63\x41\x40\x10\x04\x14\x45\xa1\x6b\xef\
\xa4\x1c\x1f\x1f\x3f\x04\xca\xe7\x59\x13\x86\xe1\x4c\xb3\xd9\x4c\
\xe3\x38\x46\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\
\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\x44\
\x44\x44\x44\xfe\x4f\xfe\x01\xd1\xc0\x51\x25\xbd\x25\x7f\x60\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x7e\xd7\
\xff\
\xd8\xff\xe0\x00\x10\x4a\x46\x49\x46\x00\x01\x01\x00\x00\x01\x00\
\x01\x00\x00\xff\xdb\x00\x43\x00\x04\x02\x03\x03\x03\x02\x04\x03\
\x03\x03\x04\x04\x04\x04\x05\x09\x06\x05\x05\x05\x05\x0b\x08\x08\
\x06\x09\x0d\x0b\x0d\x0d\x0d\x0b\x0c\x0c\x0e\x10\x14\x11\x0e\x0f\
\x13\x0f\x0c\x0c\x12\x18\x12\x13\x15\x16\x17\x17\x17\x0e\x11\x19\
\x1b\x19\x16\x1a\x14\x16\x17\x16\xff\xdb\x00\x43\x01\x04\x04\x04\
\x05\x05\x05\x0a\x06\x06\x0a\x16\x0f\x0c\x0f\x16\x16\x16\x16\x16\
\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\
\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\
\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\xff\xfe\x00\
\x10\x54\x69\x6d\x65\x20\x66\x6f\x72\x20\x4c\x75\x6e\x63\x68\xff\
\xc0\x00\x11\x08\x02\x00\x02\x00\x03\x01\x22\x00\x02\x11\x01\x03\
\x11\x01\xff\xc4\x00\x1b\x00\x00\x02\x03\x01\x01\x01\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x02\x03\x01\x04\x05\x00\x06\x08\xff\
\xc4\x00\x44\x10\x00\x02\x01\x02\x04\x04\x04\x03\x07\x02\x04\x05\
\x04\x01\x05\x01\x01\x02\x03\x00\x11\x04\x12\x21\x31\x05\x41\x51\
\x61\x13\x22\x32\x71\x42\x81\x91\x14\x23\x52\xa1\xb1\xc1\xd1\x62\
\xe1\x06\x33\x72\xf0\x24\x43\x82\x92\xf1\x15\x53\x73\xa2\x25\x34\
\x35\x54\x63\x93\xa3\xff\xc4\x00\x19\x01\x01\x01\x01\x01\x01\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x02\x03\x04\x05\
\xff\xc4\x00\x21\x11\x01\x01\x01\x00\x03\x01\x01\x01\x00\x03\x01\
\x01\x00\x00\x00\x00\x00\x01\x11\x02\x21\x31\x41\x12\x51\x22\x32\
\x61\x71\x42\xff\xda\x00\x0c\x03\x01\x00\x02\x11\x03\x11\x00\x3f\
\x00\xfa\x73\xfc\x3c\xe0\xa6\x23\x07\x32\x67\x92\x16\xcc\xb9\xc5\
\xcd\xbf\x5a\xb1\x32\xca\xaa\x5e\x17\xce\xa0\x7a\x1f\x5b\x0e\xd5\
\x9f\x23\xb2\xc9\xf6\x98\x6e\x1c\xa8\x0c\x47\x50\x74\x3f\x3a\xbd\
\x82\xc5\x26\x29\x43\x0b\x09\x47\xa9\x3a\xfb\x57\xca\x97\xe3\xd5\
\x7f\xae\x86\x48\xa6\x8b\x35\x95\x18\x1b\x10\x4f\xe9\x44\x6e\x0d\
\x8d\xfe\xa6\xaa\x4e\xf2\x61\x84\xb0\xe5\xf2\x48\x6e\x1a\x9b\x83\
\x90\xcb\x85\xbb\x6a\x50\xe5\xbf\x5a\x74\x58\x6f\x97\xa0\xa9\x0a\
\x0d\xb5\x00\x93\x61\xae\xe6\x80\x9a\xe6\xf3\x44\xc8\x5b\x2d\xfd\
\x27\xa1\xa8\x0d\x49\xd8\xb1\xa6\x21\x1c\xd4\x30\x3b\x83\xad\xe9\
\x7e\x73\x1a\x99\x4a\x99\x2d\xe6\x20\xef\xd0\xd0\x7d\xe6\x6b\xf4\
\xfa\x54\x8d\xc3\x48\x22\x95\xf0\xb9\xf2\xe5\x6f\x21\xd8\x51\xba\
\x46\xc7\xcf\x18\xbe\xf7\x53\x63\x59\xf8\xec\xdf\x6b\x7c\xdb\x93\
\x7a\xbb\x87\xb8\xc2\x47\x98\xdc\x90\x48\x3d\xa9\x94\x9f\x11\x44\
\x16\x8d\x42\xdf\x73\xcc\xfc\xe8\x89\x04\x52\x41\xa9\x56\x07\x4b\
\xfb\x53\xa0\xdc\xce\x08\x2a\xda\x8a\xcb\xe2\x03\xc3\xc7\x4a\x8a\
\x6c\x03\x6d\xbd\x68\xc6\x0b\x38\x51\xcc\xda\xb2\xb8\x8c\x82\x4c\
\x7c\xae\xb7\xb1\x6e\x74\x72\xf0\xc0\xec\x6f\x7b\x93\xce\x8b\x0d\
\x27\x85\x88\x49\x00\x00\xab\x5e\xf6\xab\x58\x3c\x22\x18\x95\xe6\
\x04\x97\xf4\xa8\x36\xb0\xea\x6a\x31\x78\x03\x94\xbe\x1d\xf3\x00\
\x2e\x55\xb7\x1f\xcd\x67\x29\xd6\x93\x32\xba\x87\x8d\xae\x8d\xa8\
\xa5\x3b\x00\x7a\x9e\x95\x9d\xc3\x71\xde\x00\xf0\xa6\x1f\x76\x5a\
\xc4\x73\x53\xd6\xb4\xbd\x3a\x83\x70\xda\x86\x1c\xeb\x72\xeb\x38\
\x15\xb0\xf4\xef\x52\xa1\x6c\x43\x2a\xb0\x3b\xdc\x57\x7c\xbe\x94\
\x24\xf7\x1f\x5a\x90\xd4\x42\xa3\xcb\x12\x0f\x6b\xff\x00\x35\x24\
\xa9\x50\xa0\x00\x06\xc0\x0d\xa9\x57\x27\x9d\xba\x69\x44\xa4\xfc\
\xc5\x5a\x85\x1a\x90\xf7\x36\xb5\x54\xe3\x99\x1b\x18\x3c\x48\xd1\
\xfe\xed\x75\x61\xda\xae\x29\xaa\x1c\x70\xff\x00\xc6\x8f\xfe\x35\
\xfd\x2a\xe5\xe1\x9e\xab\xc6\x17\x35\x91\x42\x03\xba\x8d\x98\x7e\
\xc6\xb5\x78\xba\x80\xd1\x95\xd8\x2e\x5b\xf5\xaa\xbc\x12\x11\x28\
\x92\x47\x51\xa7\x95\x75\xd2\xe6\xac\xe3\xbe\xf3\x02\x8e\x3e\x12\
\x2e\x2f\xce\xd6\xfd\xa8\x93\xa5\x7d\x54\xb8\x03\x7a\xec\xca\x7e\
\x21\x4b\xf8\x87\x6a\x25\xbd\x0b\x13\xb9\x04\xec\x3b\x6e\x6b\x9a\
\x8a\xd4\x2c\x39\xd4\x80\x47\x31\xbd\x06\x30\xe6\xc0\xca\xed\x9b\
\x3a\x5b\xce\xbb\x81\xad\xff\x00\x6a\x61\xa8\x55\x0c\x59\x1a\xd9\
\x64\x52\xa6\xf5\x12\x7f\xc3\xae\x35\x8d\xe4\x42\xc6\x45\x60\xca\
\x2d\x98\x58\xef\xde\xb4\x25\x5b\x12\x1b\x4d\x6d\x58\x5c\x2d\x12\
\x3e\x26\x60\x9d\x6d\x73\x6c\xc7\x4d\x79\x56\xfc\x8b\x93\x10\x63\
\x72\x58\x91\x74\x66\xdc\xf6\xa3\x87\x70\x5f\x55\x9f\x43\x61\xa9\
\xed\x43\x6b\x6f\xb9\xab\x24\x03\x4b\x75\xa5\x69\x40\x51\x90\x0a\
\x65\x62\x74\x37\x52\x37\x53\xd4\x57\x05\xae\x22\xf5\x2d\x49\xb6\
\xed\x6b\x7e\x35\x1f\xa8\xe5\x43\xe2\xc4\x08\x50\xfe\x23\x1d\x95\
\x2b\x85\xd4\xdc\x13\x7a\x25\x20\x12\x42\x28\x62\x2c\x58\x0d\x6a\
\x43\x65\x2f\x13\xa8\x8a\x40\x59\x2d\xc8\xfe\x95\x9a\x3c\xad\x60\
\xac\xec\x3e\x1c\xa4\x0f\x99\x35\x78\x12\x0d\xc6\xe2\xaa\x71\x04\
\x64\x93\x30\x27\xc3\x7d\x46\xba\x03\x45\x51\xc1\xb7\xbb\x06\x62\
\x2c\x48\x16\x00\x74\x02\xac\xf0\xd9\x15\x49\x85\x8e\x8e\x7c\xbd\
\x8d\x67\x83\x46\xad\x6a\x25\x6a\xc6\xb9\x1a\xd7\x0b\x83\x71\xca\
\xab\xe1\xb1\x4b\x22\x65\x93\xd6\x07\xab\xf1\x7f\x7a\x31\x20\x2c\
\x43\xe9\x6d\x81\xad\xeb\x18\xa1\xc7\x11\x17\x1c\x4c\x7a\x19\x14\
\x35\xba\x13\x56\x38\x2c\xb7\x8d\xb0\xa7\x65\x05\x90\xfe\xb4\x9e\
\x3f\x28\x6c\x4c\x68\x2d\x71\x18\xd7\x99\xbd\x17\x04\x04\x45\x34\
\xa7\x73\x65\x1d\x7a\xd6\x3f\xfa\x6b\xe2\xe3\x9a\x84\x01\x89\x42\
\x4d\x9f\x43\x42\x58\x5e\xd7\xd7\xa5\x09\x6e\x95\xa6\x41\x26\x26\
\x31\x03\x86\x3f\x7c\x01\x51\xa6\xe7\xad\x23\x86\xe1\x8c\x83\xc6\
\x95\x88\x04\xd8\x2d\xae\x5a\x83\x8a\xa9\x13\x78\xc0\x8b\x4a\x6f\
\x6e\x95\x67\x0f\x2f\x8d\x1a\xac\x4a\xd0\xaa\x8b\x33\xff\x00\x15\
\x9f\x6f\x6d\x7c\x5c\x67\x55\xd6\x42\x16\xda\x05\x1a\x9f\xa5\x04\
\x99\xe7\x50\xa6\xf1\x45\xbe\xbb\xb5\x02\x14\x8c\x5a\x21\x6e\xad\
\xf1\x1f\x9d\x71\x90\x03\xa9\x37\xf6\xad\xeb\x23\x48\x9a\x31\xf7\
\x13\x1e\xeb\x20\xd0\xd1\x78\xae\x83\xfe\x22\x2c\xa3\xf1\xa6\xd4\
\xb5\x95\x76\xd4\x7b\x8a\x62\xb8\xea\xba\xf2\x3c\xea\x46\x73\xd0\
\xdc\x1d\x88\xe7\x52\x80\xad\xd0\x00\x5c\x1b\x12\x76\x5f\x7f\xe2\
\x94\x09\x51\x68\xe6\x55\x5e\x4b\x6b\x95\xf6\x34\x48\x6c\x81\x40\
\xb2\x8b\x9b\x53\xa8\xdb\xc9\xcb\x10\x49\xfc\x2c\xa3\x2d\x03\x62\
\x10\x3e\x49\x94\xc4\xdd\xb5\x53\x42\x5a\xba\x40\x25\x89\xa3\x6d\
\xac\x48\x3d\x2d\x56\xa3\x1a\x45\x44\x32\x31\xba\x81\x7d\x39\xd5\
\x19\xe6\x97\x13\x32\x85\x51\xd1\x54\x1d\xaa\xd4\x66\xd8\x78\xd4\
\x8f\xf9\x60\x10\x45\x70\xc8\xa6\xe9\x1a\x21\x3a\x5c\x51\x50\x62\
\xc3\x28\x60\x1d\xf3\xb3\x68\x75\xda\xf5\x54\x48\xfe\x3e\x43\x2b\
\x65\x24\xa9\x3f\x95\x5d\x8c\xfd\xea\xfb\x8a\xcc\x9c\xda\x57\xff\
\x00\x51\xa2\xd3\x1d\xc3\x6d\xf6\x93\x98\x5c\x05\x3a\x75\xa5\x63\
\x70\x46\x3b\xe2\x30\xac\x4a\x83\x7c\xbc\xd6\xbb\x86\xb9\x18\xc5\
\x5f\xc4\x32\x9f\xa5\x5e\x85\xf2\xbd\xef\xd4\x1a\xcc\xee\x35\x55\
\xa1\xe2\x90\xa2\x78\x38\xe9\x14\xc8\x79\xef\xcb\xe2\xa7\x60\xd3\
\x26\x0e\xf9\x95\x83\xbd\xee\xa7\x41\x61\xfd\xeb\x17\xfc\x49\xc3\
\x8c\x0e\x31\x50\xa9\x31\xc9\x7c\xd6\x37\xb1\xae\xe0\xd8\x99\xe1\
\xe1\xb6\x8a\x42\xa1\x24\xb6\x52\x34\x37\xd7\x5a\xcc\xe5\x65\xca\
\xb3\xae\x9b\x6e\xf6\xd1\x6c\x68\x4e\x6b\xf9\xaf\x7a\xa0\x9c\x4b\
\x12\x18\x1f\x0e\x0f\xfb\x05\x13\x71\x4c\x4b\x10\x7c\x38\x7f\xec\
\x15\xaf\xd4\x18\xd3\x43\x70\x0f\x5a\x2b\x58\xd5\x7c\x06\x26\x3c\
\x62\x15\x0b\xe1\xc8\x2e\x72\x03\x70\x45\x34\xe7\xb5\x81\xd2\xb4\
\x30\x73\x42\x93\x15\x2e\x4a\x95\xd2\xe0\x6e\x28\xa2\x8d\xa2\x4c\
\xb1\x48\x1d\x7f\x03\x8b\x7d\x0d\x04\x57\x53\xae\xa0\xfe\x54\x38\
\xfc\x60\xc3\x1f\x0e\x34\x57\x92\xda\x96\xd4\x2f\x6a\x91\xe4\x82\
\xac\x40\x2a\x40\xf3\x29\xdc\x7f\x6a\x35\x46\x71\x70\x85\x87\x61\
\x58\xd2\x71\x0c\x49\x95\x9f\xc4\xc8\xea\x97\x19\x74\x02\xda\xdb\
\xfd\xf5\xa3\xc5\x62\x67\x32\x12\xd3\x39\xb0\x07\xd5\xf3\xab\xf5\
\x17\xe6\xb4\x71\xb8\x85\xc1\xc4\x4b\x79\xe4\x20\x80\xb7\xf4\xf2\
\xd6\xb2\xf8\x72\x3e\x27\x14\x91\x90\x3c\xcd\xa9\xb5\x01\xbb\xef\
\x7d\x79\x5f\x53\x5a\x9c\x1f\x0c\x70\xf0\x99\x9a\xf9\xdc\x59\x54\
\xe9\x61\xd6\x8f\xf6\xa7\xc8\xb6\xe5\x73\x1c\xa2\xc3\x61\xed\x50\
\x09\x0d\x70\x6c\x6a\x06\xdb\x1f\xa5\x4d\xbb\x37\xd2\xb6\xca\x8f\
\x19\xc3\x02\x46\x2a\x35\x16\xd9\xc0\x1e\x93\xd6\x83\x84\xe3\x15\
\x41\x86\x66\x61\x1b\x7a\x58\x8d\x14\xd6\x88\x3b\x82\x0d\x98\x59\
\xae\x39\x56\x46\x3b\x0c\xd8\x69\xb2\xda\xe8\x7d\x27\xa8\xac\xde\
\xae\xc6\xa7\x7d\x35\x9d\x48\xb0\xb8\x20\xf3\x06\xe0\x8a\xeb\x5a\
\xb1\xf0\x38\xa9\xb0\xc0\x04\x71\x6e\x68\xfb\x1a\xbb\x17\x16\xc3\
\xb8\x1e\x2c\x45\x49\x3a\xe5\x3b\x55\x39\x41\x8b\x55\x23\x71\xdb\
\x5f\xf7\xfe\xf9\x50\xc5\x2e\x1e\x60\x0c\x53\x29\xec\x74\x34\x44\
\x32\xee\x08\x27\x5a\xd0\x48\x36\xac\xfe\x38\x6f\x8d\x5f\xfe\x35\
\xfd\x2a\xfa\x02\xcc\x00\xdc\x9a\xcd\xe2\xb2\x2c\xb8\xd3\x93\x50\
\xa0\x28\x3d\x6d\x47\x2f\x0c\xf5\x7f\x84\xa8\x5e\x1c\x08\xbd\xdd\
\xc9\x3f\x2f\xfc\xd3\xb1\x0b\x9e\x1c\x42\x87\xf4\xb6\x6b\x5b\x9f\
\x4a\x5f\x0e\x42\x9c\x3e\x3c\xc4\xf9\xae\xc0\x76\xd3\xf8\xa6\xb2\
\x33\x33\x34\x4c\x15\xdd\x6c\x41\xd8\xd6\xa7\x81\x9a\xa0\x58\x7b\
\x5e\x8d\x45\x71\x8d\x94\x90\x45\x99\x7c\xae\xbc\xd4\xd4\x81\x58\
\x29\x14\x2d\x72\x3c\xba\x5f\x99\xa2\x61\xa8\x53\xcf\x5a\x83\x50\
\x2d\x87\xbf\xd6\x81\xb5\xdf\x5e\xc6\x98\xfb\xd4\x11\x65\x66\xcb\
\x98\xa8\xbe\x51\xce\xa3\x15\x78\xa6\x1c\xe2\x62\x13\x46\x01\x96\
\x31\x66\x00\xea\xc3\x91\xab\x03\x12\x24\x68\x0b\xb1\x19\x72\x80\
\xe0\xe9\xca\xd7\xe9\xd2\x83\x0d\x24\xf2\x44\x25\xcf\x0c\x77\x24\
\x0f\xbb\xd6\xba\x7c\x1e\x21\xf0\x4c\xf0\xe4\x66\x6b\xa9\xfa\xdf\
\x4a\x3f\xec\x2d\x19\x90\xac\x84\x11\x6e\x63\xda\x82\xb3\xb8\x57\
\x11\x95\x30\x82\x2c\x4a\x67\x11\xbe\x43\x7d\xc0\xde\xb4\xe3\xc9\
\x2c\x62\x48\x5b\x3a\x1f\x91\x1e\xf4\xcb\xbe\x0b\x30\x04\x0a\x16\
\x02\xd4\xc6\x68\xc7\xc4\x5b\xba\xad\xc7\xd6\xa0\xa8\xb0\x20\x82\
\x0e\xc7\xad\x40\xa2\x05\x08\xb5\x31\x96\x84\x8b\x54\x90\x68\x5d\
\x16\x58\x8c\x6c\x6d\xcc\x1e\x95\xcd\xa6\xa6\x87\x29\x27\x36\xa0\
\xde\xf5\x15\x29\x50\xc7\x21\x46\xdd\x4d\xa8\x41\xe9\x57\xb1\x11\
\x89\xd4\x5c\x59\xc6\xc7\xaf\x6a\xa2\xc0\xab\x15\x61\x62\x37\xac\
\x59\x8d\x4a\x90\x4d\x3e\x2c\x49\xf4\xcb\xa8\xda\xe3\x71\x55\x6f\
\x5d\x7a\x34\xa7\x8f\xc8\x3e\xd0\xb6\xd4\x78\x6a\x01\x1b\x5e\xd5\
\x73\x87\x00\x38\x54\x40\xf3\x66\xfd\xab\x33\x8a\xbb\xae\x1e\x2c\
\x4a\x00\x42\xf9\x25\x16\xf5\x74\xbd\x68\xf0\xf9\x23\x6e\x15\x1f\
\x86\x49\x0a\xc7\x97\x5a\x65\xff\x00\x2a\x3e\x1a\xd6\xe5\xa1\xa8\
\x0e\x08\x25\xae\x00\xde\x96\xcf\x6e\xbf\x4a\x28\x88\xf2\x86\x1a\
\xb4\x83\xf2\xb9\xfd\x85\x68\x25\xd5\x64\x70\xf3\x29\x27\xe1\x4b\
\xe8\xa3\xa5\x10\x60\x14\x28\x00\x01\xb0\x14\xbb\xeb\x5d\x7a\xba\
\x43\x2c\x6d\xa5\x72\x30\xca\x29\x6c\x6d\xef\xca\xb8\x69\xa5\x5a\
\x8e\x06\x89\x5a\x92\x0d\x12\xb5\x29\x65\x5a\x8c\x30\x35\x5d\x4d\
\x12\x35\x4c\x9f\x5c\x40\x24\xa6\xe0\x7a\xf5\xff\x00\xeb\xfc\xd0\
\x17\x2a\x42\x8b\x67\x6f\x48\x3c\xbb\xd4\x82\x15\x02\x8f\x48\xff\
\x00\x77\xa9\x08\x9b\x9b\xd4\x11\xde\x84\x3d\xf6\xd6\xa0\xbd\xb7\
\x23\xeb\x52\x31\x48\x5b\xb1\x36\x0a\x09\x27\xa5\x63\x3b\x8b\xeb\
\x7b\x9e\xd5\x7f\x1d\x2e\x5c\x26\x50\x48\x69\x3f\x21\x59\x6e\xc1\
\x98\xef\xd0\x56\x79\x56\xb8\xc1\xe0\x64\xf0\xf1\x68\xcc\x35\x0c\
\x34\xeb\x5a\x39\x7c\xe5\x17\x5d\x6d\x59\x4e\x03\x32\xdb\x42\x35\
\xbd\x6a\xc2\x56\x58\xf3\xa9\xf5\x8b\x36\xba\x8e\xbf\x90\xfc\xea\
\xe2\x68\x64\x4f\x1a\x42\xc5\xc0\x86\xd9\x00\x23\xd6\x3a\x8a\xa7\
\x2f\x0c\xf0\x30\xe5\x70\x65\xa5\x42\xd9\x9a\xfe\xa1\xa7\x4a\xba\
\xc7\x31\xe9\xd2\xdc\xaa\x23\x2c\xb2\x7a\x8f\x63\x4d\x92\x86\x3b\
\xa3\xa7\xad\x19\x7d\xc5\xa8\x49\xb0\xbd\x6f\x78\xae\x48\x56\x39\
\xb9\xf9\x85\xff\x00\x5a\x5c\x98\x4c\x14\xda\x98\x8c\x6d\xf8\x90\
\xfe\x76\xac\xfe\x3f\x87\x59\x38\x49\x64\xc3\xe2\x04\xcb\xb8\x37\
\xb5\x6c\x61\xf1\x98\x4c\x42\x66\x0f\xe0\xb7\x30\xfb\x7c\x8d\x52\
\x9f\x86\x4c\x83\x3c\x4c\x26\x5e\x76\xdc\x7c\xaa\xab\x2b\x29\x21\
\x94\xa9\xee\x2d\x54\xde\x2b\x25\x6c\x4b\x8c\x82\x05\xbc\x72\x2c\
\xb2\x7c\x39\x76\x15\x9a\x03\x4b\x29\x21\x49\x66\xd7\x4d\xeb\xb0\
\x78\x69\xb1\x0f\x68\xd0\x91\xcc\xf2\x15\xb1\x82\x85\x30\x89\x64\
\x21\xa4\x3e\xa7\xfe\x29\x9b\xc8\x75\x08\x83\x85\x46\xb1\xb0\x9d\
\xdb\x33\xa9\xba\x0f\xad\x89\xf9\x55\xb6\x83\x0c\x14\x2f\xd9\xe3\
\x6f\x28\xd5\xb5\x3b\x51\x2f\x99\x48\xe6\xde\x50\x7d\xcd\xbf\x9a\
\x97\x20\xb9\x23\x6a\xdc\x92\x33\xb4\x2a\xb0\x2b\x02\x30\xd1\x29\
\x1b\x10\x2f\x4d\xb9\x66\x24\x9b\x93\x4b\x8f\xcc\xc4\x8e\x5b\x53\
\x29\x89\x1a\xe5\xa9\xb1\xae\x16\xe6\x6b\x98\xd9\x49\xed\xa5\x29\
\xd6\xa8\x3a\xae\x56\x50\xc3\x7b\x11\x7a\x9b\x2d\xb6\xbf\x73\x5c\
\xbb\x91\xc8\x54\x83\x68\xb6\xf0\x22\x3f\xf4\xd4\x18\xb0\xcc\x7c\
\xf8\x48\x4f\xfd\x34\xc0\xb6\xa8\x22\xf5\x62\x55\x93\x87\xe1\x1e\
\x5f\xba\xcd\x13\x5b\xad\xc7\xf6\xa0\x64\xe2\x38\x4d\x98\xbc\x63\
\x6f\x89\x4d\x5a\x37\x0d\x99\x45\x3a\x37\x36\x0c\xa4\x8b\xd1\x91\
\x6b\x2a\x4c\x76\x25\x90\xa5\xd5\x2f\xbe\x55\xb1\xaa\xca\x09\x6b\
\x0d\x49\x35\xb9\x2c\x30\x4b\xfe\x64\x22\xfd\x57\x43\x41\x87\xc2\
\x61\xe1\x93\xc4\x5c\xce\xc3\x60\xc0\x58\x51\xf9\xa7\x4e\x2a\x11\
\x55\x00\xb0\x55\x02\xdd\x2b\x80\xa9\x00\x93\x73\xb9\xa3\x51\x6a\
\xd8\x0c\xd8\x78\xe7\x00\xb1\x2b\x22\x8b\x07\x53\x62\x7b\x1a\xa2\
\xd1\x49\x1b\xb2\x4c\xcc\x59\x45\xc3\x6d\x98\x75\xf7\xad\x20\x34\
\xa9\x75\x57\xc8\x19\x43\x00\xd6\xb1\xe8\x7f\xf1\x55\xe3\xa2\x56\
\x4b\x58\x68\x05\xaa\x37\x15\xa3\x26\x16\x17\x37\x19\x93\xdb\x51\
\x48\x93\x04\xc3\xd0\xe8\x7d\xf4\xb5\x66\xf1\xa7\x54\x9c\x54\x29\
\x20\xdc\x55\x87\xc2\xce\x2f\x78\xc9\x03\x98\xda\x97\xe1\x3d\xfd\
\x24\x77\x22\xb3\x94\xe9\x4e\xa1\x73\x46\xa2\xc0\x79\xd7\x4e\x47\
\x7f\xce\xad\xe0\x98\xae\x0d\x2c\x6d\xe6\x6f\xda\xab\x61\xe1\x97\
\x13\x8a\x92\x65\x5b\x46\xab\x91\x6f\xa0\x35\x71\x63\x11\xc2\xb1\
\x83\x7b\x5e\xe7\xbd\x31\x52\x78\x86\x14\x62\x90\x98\xc0\x59\x46\
\xb6\x03\x47\xfe\xf5\x9d\x83\x90\xc3\x8b\x11\x9f\xf2\xd4\x5e\x71\
\xd7\xb5\x6c\x00\xf9\x08\x8d\x82\xbf\x22\x6b\x27\x1b\x1f\x87\x2b\
\x8c\x99\x4b\xbe\x63\xf4\x1f\xbd\xe8\xe5\xfd\x53\xf8\xd7\x39\xd5\
\x82\xa3\x10\xb6\xf2\xdb\x41\x6a\x5a\x15\x71\x29\x4b\x65\x12\x79\
\x6c\x3a\xd2\x70\x38\x94\x7c\x11\x85\x9b\x2c\x80\x64\x43\xd6\xfc\
\xbf\x5a\xbb\xe1\xd9\x02\x46\xb9\x94\x6d\x93\x51\x7a\xd4\xec\x2a\
\xc8\x72\xe9\x6b\x93\xb5\x09\x4e\xac\x6a\xc1\x85\xef\x72\x8d\x7e\
\xca\x68\x1d\x0a\x9b\x10\x47\xb8\xa3\x11\x05\x05\xef\xb9\xee\x76\
\xa8\x2b\x4d\x22\x81\x8e\xb4\x22\x8a\xd2\xe7\x84\x4c\x08\x26\xce\
\x3d\x2d\xd7\xb5\x3c\xda\xd7\xa0\x61\xa5\xf9\xd4\x65\x66\x3a\xb2\
\x39\x56\xb8\x23\x7b\xd0\x93\x57\x38\xa2\x66\x8c\x4c\x37\x1e\x56\
\xfd\xaa\x8b\x1b\x0a\xe7\x7a\x6e\x09\x54\x4b\x14\xb0\x1d\x9d\x0f\
\xc8\x8d\x69\x5f\xe1\x59\xbe\xe9\xa1\x61\xe5\x67\xca\x3b\x5c\x51\
\xc6\xc1\x23\x9a\x46\xf4\xa4\x64\x9e\xf5\x47\x81\xb6\x48\xd5\x94\
\xf9\xbc\x71\x6a\xcf\x96\x26\xe3\x1b\x1a\x82\xcc\x27\xc3\x00\x37\
\x72\x68\xa6\x53\xe3\xb2\x81\xf1\x1a\x01\x63\x2f\x8a\x0f\x96\x21\
\x95\x3b\x9e\x66\xba\x32\x33\xbf\x5a\x82\x75\xb0\xff\x00\xc5\x0d\
\xeb\x97\x46\x6d\x77\xa9\x27\xda\xbb\x9d\x75\x70\xde\xa4\x2a\x90\
\x6a\x39\x54\xd3\x10\xd0\xd3\x03\x64\x42\xf6\xb9\x1a\x28\xea\x4e\
\xd4\x90\x69\x80\xda\x4d\xae\x22\xd0\x77\x63\xfc\x0a\x52\x63\xfb\
\xbb\x82\x09\x63\xbb\x6e\x4d\x48\x24\xea\x7e\x40\xd0\x83\x7a\x9a\
\x90\x89\x07\x71\x52\x2c\x06\x77\xb2\xa8\xde\x84\x15\x55\x2e\xe6\
\xca\x39\xd5\x3c\x5e\x20\xcc\xd6\x02\xc8\x36\x14\x5b\x83\x03\x89\
\x93\xc6\x94\xb9\x03\xa0\x1d\x05\x21\x92\xcd\x75\x36\xa3\x35\x04\
\x8a\xc3\x65\x5f\xcc\xb5\x67\x0d\x2b\xc3\x2c\x65\x6d\xac\x4e\x4d\
\xc6\xf5\x5d\x56\xee\x14\x6a\x49\xb7\xbd\x4c\x72\x07\xc4\x62\x82\
\x6a\x11\x15\x45\xba\x03\x44\xa9\x7e\x3c\x5c\x0f\xeb\x56\x43\xdb\
\x51\x4f\x50\x1c\x5e\x36\x57\x1d\x8d\x64\x0a\x62\x31\x06\xe0\xdb\
\xda\xb5\x39\x0c\x69\x6c\xda\xe9\x71\x61\x44\x06\x97\xa4\x61\xf1\
\x80\x8c\xb3\xeb\xfd\x7c\xfe\x75\x64\xa8\xdc\x5a\xc4\x68\x46\x97\
\xad\xb3\x5c\xb7\x07\x43\x63\x4d\x12\x39\xd4\xe5\x3d\xca\x82\x69\
\x2c\x72\xdb\x98\xa9\x57\x61\xa9\x1e\x53\x50\xc3\x73\xb1\x16\x26\
\xc3\xa0\x16\x15\x22\x81\x5b\x37\xa4\x7c\xcd\x18\xbf\x36\x3f\x41\
\x4a\x74\x8f\x92\x48\x17\xf1\x3d\xea\x09\x62\xd6\x1a\x8e\x75\x2c\
\xa0\xcb\x19\x37\x25\x41\x35\x23\x7a\x90\xb3\x05\xd0\x2d\x16\xb7\
\xd5\x8f\xca\x84\x6d\x5c\x09\x02\xd7\x07\xde\x94\xe2\xa4\x9b\x96\
\xa8\x28\xdb\x02\x2a\x6f\xad\x10\xd4\x8d\x0d\xc9\xe9\x52\x72\xa8\
\x55\xb5\x16\x8a\x73\x3b\x2a\x29\x1b\xb1\xb5\x04\xf8\xa8\x20\x66\
\x4d\x65\x91\x77\x51\xa0\x53\xd0\x9f\xda\xb3\xb1\x12\xc9\x34\x85\
\xdd\xd2\xe7\x90\x52\x40\xf9\xdc\x55\x6e\x26\xce\x42\x40\x2b\x66\
\x07\x62\xa6\xf4\x8c\x77\x88\x90\x06\x8c\xb2\xea\x73\x5a\x83\x84\
\x43\x7c\x33\xb3\xb4\x85\x58\xf9\x47\xa7\xf4\xa7\xcb\x87\x71\x1b\
\x08\xe5\xf2\x91\xaa\xb6\xa4\x53\xec\x4a\x8b\x8e\x21\x72\xcb\x15\
\xcd\xbd\x6b\xa7\xe5\x56\x70\xe7\x34\x21\x85\xf2\x9d\x45\x72\x44\
\x90\xc2\x2d\xe6\x2c\x75\xa6\x40\xb9\x70\xca\xa3\x93\x35\xbd\xaf\
\x54\xd4\xe1\x44\x05\xeb\x82\xd1\x28\x26\xf6\xd0\x6d\x7a\xd2\x48\
\x5b\x0b\xd4\x8e\xc0\x9e\xfb\x57\x05\x51\xc8\x5f\xa9\xd4\xd4\xee\
\x69\x65\xda\xfe\x1f\xfe\xd5\xd7\xfb\xc4\x5d\x8e\x62\x6c\x7b\x0f\
\xef\x47\x6b\x0d\x28\x5e\xe6\x64\x51\xba\xdc\xb5\xb9\x02\x2a\x48\
\xa8\x22\x8f\x27\xf5\x1f\xca\xa0\xa1\xe4\x7e\xa2\xa4\x00\x0d\xe9\
\x38\xec\x49\x45\x31\x2b\x12\xe7\x43\xae\xd5\x65\x6e\x1b\x6d\x7d\
\xeb\x39\x22\x95\x66\x0e\xf0\x3b\x00\x6e\x47\x5a\xcd\xff\x00\x86\
\x1f\x02\xb2\xe1\x54\x35\xee\x4d\xcd\xeb\x88\xa3\x12\xac\xad\x65\
\xb8\x6e\x61\x86\xd5\x05\x3a\xde\xa2\x56\xec\x40\xb0\xb6\xe6\x93\
\x89\xc2\xfd\xa4\x12\x08\xf1\x53\x4d\x46\x8e\x3a\x1a\xb4\x16\xc2\
\xc0\x0a\x12\x32\xb6\x61\x7d\x4e\xb4\x62\x62\x48\x0a\xb5\x8d\xee\
\xa7\x40\x16\xc0\x1b\x5a\xfd\x4d\x44\x5e\x3d\xad\x11\x7d\x39\x29\
\x35\xbe\x58\x93\x72\x17\xe6\xa2\xa4\x3b\x0f\x4d\x97\xb8\x00\x7e\
\x95\x9f\xc1\xd6\x03\x3c\xe0\x0c\xcf\x28\xbf\x22\x48\xa6\x61\xf1\
\xf3\xc5\xe5\x73\xe2\x25\xad\x95\x8d\x6b\xc8\x24\x2d\x7b\x86\x1d\
\x1e\xc6\xdf\x5a\xcc\xe2\xf0\x2a\x05\x96\x34\x20\x31\xb3\x0e\x86\
\xb3\x65\x9d\xc3\x2e\xae\x12\xaf\x1a\xc8\x9e\x97\x17\x1d\xbb\x50\
\x30\xa4\xf0\x96\x27\x0d\x24\x7f\x81\x81\x1e\xd4\xf7\x36\x17\xa7\
\xde\xd9\xa4\xbe\x8e\xbf\x33\x42\xe6\xf4\xc6\x16\xd4\xee\x69\x46\
\xa3\x09\xc7\xe9\x82\x3d\xd8\x56\x73\x5c\xe8\x35\x26\xae\x71\x77\
\xb1\x48\x47\xc2\x2e\xde\xe6\xa9\x62\xa7\x4c\x14\x1e\x2b\x1b\xca\
\xeb\xf7\x4b\x6d\xbb\xd7\x3e\x57\xb6\xa2\xaf\x1d\x9b\xc2\x85\x70\
\x68\xd7\x6b\xde\x50\x0e\xe7\x90\xa3\xc3\x44\x62\x96\x08\x94\x5d\
\xa3\x21\x9d\xbb\x92\x0d\x67\x60\x95\xe7\xc6\x78\xb2\x02\xe1\x4e\
\x76\xef\x5b\x5c\x31\x95\xf1\xf9\xdf\x51\x18\x2d\x6f\xc4\xfc\xcf\
\xcb\x6a\xe7\x3b\xba\x6a\xd3\xc8\x71\x39\xb2\x02\x88\xc6\xee\xe7\
\x7f\x61\x52\xcc\x2c\x15\x45\x94\x0b\x01\xd2\x82\xe5\x9c\xb9\xe7\
\xb5\x41\x35\xd7\x46\x08\xb5\x85\xcd\x70\x3a\x77\x26\x80\xea\x40\
\x3c\xb5\x35\x20\xd5\xab\x0c\x06\xa6\xf4\xbb\xd4\x83\x56\xac\x30\
\x1a\x9b\xd0\x03\x5c\xc7\xc8\x6d\x48\x36\x36\xf3\x02\x3e\xa6\xa3\
\x0e\x4f\x84\x62\x63\x77\x8d\x89\x3a\xef\x7e\x74\x31\x9c\xc4\x5b\
\x9d\x4a\x10\x59\xa6\xe6\xf6\x0b\xfe\x91\xce\xa4\x60\x34\x59\x95\
\x10\xbc\x86\xca\x3f\x3a\x05\xb6\xb7\x36\x00\x5c\x9e\x95\x4b\x19\
\x3f\x8b\x2d\x97\xd0\xbe\x9a\x6d\xc4\x3c\x4e\x21\xa6\x6e\x8a\x36\
\x14\xb0\x69\x63\x7a\x9a\xe7\xad\x0c\xb5\x01\x3a\xd4\x1a\x83\xb6\
\xd5\x27\x2b\x59\xb4\xb8\x3c\xaf\x4c\xb2\x87\x18\x85\x40\xa7\x30\
\x59\x00\xf8\x81\xa1\xf0\xbc\xb6\xbe\xbd\x68\xd5\x5d\xa0\x75\x00\
\x06\xb8\x61\x7e\x76\x35\x62\x73\xa6\x59\x19\x46\xb6\x24\x5e\xba\
\xf6\xda\xb9\x87\x89\x69\xd5\x8f\x9c\x93\xa8\xdb\xb5\x72\x86\xce\
\x41\xb1\xef\x6a\x52\x41\xab\x7c\x3e\x7c\xad\xe1\x39\xf2\x36\xdd\
\x8d\x55\xd3\x99\x14\x2e\x5c\x37\x31\xd2\xad\xc5\x5b\x0e\x32\xdc\
\x30\xfa\xd1\x0b\x5a\xd5\x53\x09\x29\x9a\x1b\x31\xf3\xa0\xb6\xa7\
\x71\x56\x86\x80\x03\x5d\x18\xb0\x43\xb5\x4d\xc2\xa9\x63\xb2\x8b\
\xd4\x0a\x56\x39\x98\x61\xec\xa2\xe1\x8f\x98\xda\xa0\x6c\x0d\x9e\
\x1f\x14\xee\xe4\xdf\xb5\xb6\x14\x42\xa8\xe1\xa7\x96\x2f\x2a\xea\
\xa7\xe1\x3b\x56\x83\x03\x60\x76\xd3\x50\x29\x9d\x9a\xe0\x6a\x75\
\xa0\x37\xb5\xaf\xad\x12\x85\x03\xaf\xb9\xa4\x08\x0b\xd3\x10\x12\
\xac\x01\xb3\x5b\xca\x6f\xce\x81\x4f\x73\x44\x46\x9b\x9f\xad\x31\
\x31\xf1\x0a\xd0\x49\xe1\xce\xad\xab\x12\xb2\x01\xa1\xbe\xb6\x3d\
\x0d\x5d\xc0\x60\x1d\x8a\xc9\x88\x5c\x89\xbd\xb9\xb5\x5c\x8f\xcc\
\xb9\x5c\x93\xd6\xfa\xdb\xda\x8a\x16\x24\x34\x6e\xc1\x9a\x33\x6b\
\xf5\x1c\xa8\x9c\x61\xd1\x46\xd7\x94\x20\x00\x28\x16\x00\x72\xa7\
\x0d\x28\x10\x5b\x5b\x6b\x46\x2b\x71\x9a\x15\x48\xd5\x8b\x91\x60\
\x35\x3d\xab\x91\x6d\x1a\x8b\x58\xda\xf6\xe9\x7d\x69\x8c\xb7\x0a\
\xa7\x62\x75\xee\x00\xbf\xeb\x6a\xe6\x1a\x9d\x75\xa4\x16\xe3\xc8\
\x7a\xda\xa4\x0b\x00\x3a\x57\x38\xf2\xdb\xae\x82\x8c\x81\x7a\x92\
\x02\xde\x88\x0b\x54\xd7\x54\x9c\xc4\x2a\x96\x3b\x28\xb9\xa8\x41\
\x65\xd7\xd4\x75\x63\xd4\xd4\x4d\xfe\x4b\xdc\x5c\x65\x35\x26\xe0\
\xdb\xa6\xf5\x24\xd4\x13\x6a\x16\xbd\xea\x00\x26\xa4\xe3\xbd\x71\
\x2d\xc8\xfe\x75\x39\x4d\x2e\x50\xd2\x44\x52\x3b\x00\x77\x73\xb7\
\xb0\xeb\x52\x55\xe2\x52\xa3\x32\x84\x37\x65\xdd\x87\xe9\x44\x98\
\xd4\xca\x03\xa3\x16\xe7\x6e\x74\xa9\x30\x53\x2f\xa4\x07\x03\xf0\
\xd4\x70\xf4\x07\x17\xe6\xf8\x41\x36\x35\xcf\x6e\x95\xb5\x67\x71\
\x75\x80\x01\xcb\x3b\xd8\x9a\x0c\xe9\x98\xa4\x9f\x76\xdf\x85\xb9\
\xd3\x8e\xa6\xe6\xb9\x80\x6b\x66\x45\x6b\x6d\x71\xb5\x69\x00\x45\
\xa6\x9f\x91\xa9\x29\x6e\x67\xf2\xa9\x78\xe3\x65\x39\xa3\x5d\xb7\
\x02\xc4\x50\xc4\xd9\xe0\x46\xce\x3d\x3e\x6b\x9a\x52\x0a\xeb\xea\
\x34\xb9\x21\x57\x86\x44\x37\x62\xca\x48\xbf\x5a\x6a\xb2\x35\xc2\
\x48\xac\x57\x70\x2a\x50\x0c\xe3\xbe\x94\x16\x3f\x0a\x39\x71\x4c\
\xa6\xde\x64\x23\x5e\xb5\x66\xe0\xc9\xa6\xc2\xa8\xc8\x4c\x38\x9b\
\x8b\x82\x8f\x71\x7a\xbe\xb9\x58\x67\x8c\xdd\x48\xbd\xeb\x9c\x34\
\x12\x52\x98\x84\x46\x91\xfd\x2b\xaf\xbf\x6a\x73\x8b\xd6\x6f\x13\
\x9c\x3b\x78\x69\xe8\x53\xf5\xef\x45\xb9\x14\x21\x4f\x8d\x8c\xcc\
\xfa\xdc\x96\x3e\xc3\x5a\xc1\xe2\x18\x89\x31\x78\xb2\xe7\x5b\x9b\
\x28\xe8\x39\x0a\xdb\xc3\xb0\x13\x1c\xc6\xc0\xa9\x1f\x95\x52\x8b\
\x87\x4d\x83\x66\xc4\x4c\x97\x0a\x6d\x19\xe4\x4f\x5a\xe3\xca\x5b\
\x1b\x82\xc2\xa1\xc3\x44\xaa\x06\xb1\x83\x24\x96\xeb\xc8\x55\xbe\
\x10\xa0\x49\x3e\x97\x29\x18\x5b\xdf\xbe\xbf\x9d\x54\x04\x29\x8a\
\x36\xf5\x4a\x73\xb0\xe7\x94\x6d\xf9\xd5\xae\x18\x24\x2d\x3a\x44\
\xe3\xc4\xca\x0b\xb7\x25\x37\xda\x9e\x3e\xaa\xb2\xc0\x8f\x50\x23\
\xde\x84\xef\x40\x1a\x68\x0d\xb1\x0d\x9e\x36\x3a\x38\xd6\xd4\x63\
\x23\x1f\x2c\xa8\x49\xd8\x66\xad\x00\x8b\x8b\xe9\x7b\x9a\x91\xf4\
\xa2\x64\x65\x17\x2a\x6d\xd6\xa3\x42\x2a\x28\x6b\x01\x52\xa7\xad\
\x44\xa6\x28\x55\x5a\x5c\xde\x6d\x94\x6f\x45\x04\xb1\xcf\x71\x1e\
\x60\x47\x23\xce\xa0\x91\x6e\xf5\xc4\x12\x35\x36\xed\x53\xb5\x48\
\xda\xb4\x10\x54\x88\x88\x5d\x59\xfc\x8b\xee\x69\xae\x6e\xe6\xdb\
\x00\x05\x40\x03\xc6\x4e\xaa\x85\x80\xbf\x3b\xef\x43\x23\xac\x51\
\xf8\x8d\xaf\x41\xd6\xa4\x5f\x10\x97\x22\x78\x2a\x75\x6d\x5b\xf8\
\xaa\x60\x9a\xe7\x66\x77\x2e\xc6\xe4\x9b\x93\x52\x39\x56\x6b\x43\
\x02\xa7\x95\x08\x3a\x51\x0d\xeb\x29\x04\x50\xb0\x60\xd7\xb5\xc0\
\xa6\xd8\x74\xa8\x75\xb8\x22\xfb\xd3\x88\xef\x05\xf9\x21\x3d\xc6\
\xb5\x24\xac\x0c\x1a\x56\xb3\x0f\x4a\x0d\xc9\xac\xd7\x5c\x4c\x1e\
\x4b\x48\x8c\xdb\x00\x77\xab\x98\x18\x4e\x1d\x3c\x59\x35\x95\x87\
\x97\x36\xa5\x7b\xd3\x2a\x31\x87\x84\x8b\x13\x0b\x11\x76\x3e\xe7\
\x95\x72\x6a\x5a\xdf\x2a\xeb\xf5\xe7\xbd\x4d\xea\x4e\x51\x94\x00\
\x45\x8f\x5e\xb5\x22\xba\xbb\x2e\x9b\x9f\xad\x21\x31\xb9\x49\x43\
\x8d\xc7\x3a\xd2\x84\x89\x22\xce\x9a\x8b\xed\xcc\x56\x51\x36\x36\
\x3a\x53\x20\x91\xd1\xcb\x46\xc4\x5a\x99\x55\x8d\x4b\x54\xa1\x00\
\x12\xc7\xcb\x6b\x9b\xf3\x15\x49\x71\x92\x81\x62\x15\xbd\xc5\x41\
\xc4\xcb\x23\x58\x90\x01\xd0\xe5\x00\x56\xb5\x9c\x58\xc2\x61\x85\
\xbc\x56\x0d\x72\x6e\xa8\x3e\x1e\x97\xa7\x15\xb1\x25\x9e\xc7\x9d\
\xda\xb3\x56\x79\x64\x46\x57\x91\x89\x89\xb2\x93\xd7\xa5\x40\xd7\
\x5b\xd1\x2c\x58\xd4\x02\xfa\x06\x53\xff\x00\x50\xae\x60\x41\xda\
\xb3\x05\x36\x09\xe4\x8d\xb4\x37\x1f\x84\xed\x4f\xe9\x62\xfa\xd3\
\x06\xd4\xb8\xd8\x3a\x07\x5d\x9b\xf2\xa6\x2e\xd5\xa0\x20\x01\xde\
\xa0\x69\xc4\xd4\x2e\xc6\x31\x71\xf2\xa2\x5d\xaa\x20\xb1\xe2\x33\
\x5f\x70\x2c\xbe\xd5\xa4\xb1\xa0\x52\x49\xb0\x1b\x93\x47\x1d\x9a\
\xc5\x48\x61\xd4\x1a\x4e\x25\x4b\x61\xca\xdf\x42\xcb\x7f\xad\x48\
\xc3\xc6\x8f\x65\x2c\x17\x9a\xdf\x46\xa5\x91\xe6\x2e\xf7\x43\xa6\
\xc0\xf6\xbe\xff\x00\xef\xb5\x1e\x45\xb6\xda\xf5\xe7\x5d\x1f\xa6\
\xf6\xde\x8a\x94\x05\x8d\x43\x66\xd4\x9e\xf4\x43\x7a\x16\x34\x4a\
\x74\xd6\xa4\x9a\xea\xeb\x8a\x1c\xc2\x94\xe9\x54\xb2\x15\x07\x5e\
\x55\xc0\xb1\x17\x68\xf5\xff\x00\x58\xa1\x2c\x7a\xd4\x66\xd6\x84\
\x98\x9d\x1e\xfb\x02\x0e\xa0\x9d\xa8\xb3\x0b\x79\x6c\xdd\xf6\x1f\
\x5f\xe2\x80\xb2\x8f\x31\x03\xdf\x2d\xeb\xb3\x12\x6e\xdf\x2e\xd5\
\x21\x9b\x11\xad\xdf\xb5\xac\xbf\xef\xde\xf4\x2e\xe6\xfe\x60\x2d\
\xd6\xf5\xd7\xbd\x0b\x79\xc6\x50\x7d\xcf\x4a\x92\x6f\xad\x03\xa8\
\xfb\x44\x72\x58\x5c\xdd\x4f\x7d\x28\xc0\x00\x58\x72\xa8\x27\xfe\
\x2a\x34\xe8\xa4\x9a\xaa\x75\xaa\x40\xa3\xcb\x50\xc2\xc6\xa4\x5c\
\xc8\xcd\x19\x55\x60\xb9\xb4\x24\xf4\xa4\xc7\x87\x85\x37\x5c\xe7\
\xab\x6d\x56\x0d\x09\xd2\x8b\x09\x5e\x16\x49\x43\xc4\xaa\x0d\xac\
\xcb\x7b\x5c\x51\x33\x0c\xc0\x15\x74\x27\x6b\xda\xc7\xe6\x2a\x49\
\xa8\x60\x0a\x95\x61\x70\x68\x2a\x7c\x4f\x04\x66\x63\x34\x63\xcd\
\x6f\x32\xdb\x53\x59\x2a\xf3\xe1\xa5\x25\x4b\x46\xdb\x11\xd2\xb7\
\x46\x60\xc6\x27\x60\x40\x17\x43\x6d\xfb\x52\x78\x94\x69\x36\x19\
\xdd\x96\xef\x18\xba\xd8\x7a\x8f\x4a\xc7\x2e\x3b\xdc\x32\xa8\xcb\
\x8e\x57\x87\xc1\xb8\x49\xd8\x02\x75\xdc\x7f\x35\x9d\x38\x2a\x6c\
\x45\x88\xe5\x59\xf8\xa9\x1d\xe5\x69\x09\xb3\x5f\x97\x2a\xbd\x04\
\x87\x11\x82\x59\x08\xf3\x29\xc8\xdd\xfa\x57\x1f\xd7\xe9\xbc\xc1\
\x61\x20\x32\xbd\xce\x8a\x37\x35\x7c\xb9\x06\xc0\x0c\xbb\x65\x23\
\x4a\xe5\x8c\x43\x08\x8c\x6f\xbb\x77\x34\x27\x7a\xde\x63\x2c\xdf\
\xf1\x07\x0e\xcf\x3a\xe2\xa2\x94\x45\x1e\x50\xa7\xa8\xf6\xa6\x70\
\x22\x82\x39\xa1\x8c\x10\xa1\x6f\x73\xbb\x1b\xef\x57\x27\x8f\xc6\
\xc2\xcb\x0d\xae\x4a\xdd\x7d\xc5\x65\xf0\x79\x44\x38\xd0\x1b\x69\
\x01\x43\xaf\x5a\xc5\x92\x72\xd3\xf1\xa2\x48\xca\x54\x8b\xa9\xd0\
\x8a\x50\x83\x0a\x3f\xe4\x9f\xfb\xcd\x36\x45\x2a\xc5\x48\xb5\x42\
\x0b\xb0\x1d\x6b\x55\x17\x88\xfb\x98\x0c\xb0\xb3\x26\x52\x01\x52\
\xd7\x06\x81\x31\x98\x72\x2e\xca\xea\x7a\x0d\x41\xa1\xc5\xbb\xcf\
\x29\xc3\xc2\x9a\x29\xd7\xa9\x34\x71\x61\x22\x44\xfb\xe0\x5d\x8f\
\x20\x6c\x05\x1d\xef\x49\x59\xd9\xf1\x78\xbf\x28\xb6\x6d\x85\xf6\
\x14\xd9\x60\x6c\x32\x89\x92\x4b\xd8\xd8\xe9\x6a\x60\xc3\x45\x71\
\xe1\x33\xc6\xc0\xe8\x77\xae\xc7\x44\xde\x5f\x1b\x13\x7e\x8b\x60\
\x09\xa3\x3e\xa3\xa1\x94\x4d\x00\x93\x2e\x53\x7b\x1a\x9b\xda\xa8\
\xe0\x38\xa6\x0f\x33\x61\x56\x17\x36\x37\x52\xc6\xc4\x9a\x62\xf1\
\x34\xcc\x73\x61\x11\x11\x7d\x44\xb1\xbd\x33\x94\xfe\xac\x5a\x9a\
\xcb\x87\x33\x92\x57\xc2\x20\x83\xfb\x55\x4c\x64\xad\x24\xd7\x61\
\x6b\x01\xa7\x4a\xa9\x8d\xe3\x4d\x89\x4f\x0f\x09\x04\x6a\x11\xae\
\xaa\xe7\xd5\xdf\x5e\x74\x2f\xc5\x1d\x90\xc9\x16\x1a\x1c\xc3\xd6\
\x18\x6a\xa7\xb8\xac\xde\x70\xc9\x56\x2a\x41\xe5\x49\xc0\x71\x11\
\x89\xc4\x24\x32\xe1\xa3\x05\xcd\x8b\x2e\x96\xf9\x53\xf2\x9f\x10\
\xa2\x82\x4d\xed\xa5\x32\xcb\xe2\x12\xd1\xa8\xa7\xc1\x83\x00\x03\
\x33\x5b\xfa\x56\xad\x45\x1e\x1d\x76\x88\x1f\xf5\x1b\xd3\x38\x8d\
\x51\x14\x68\x8c\xc2\xe0\x6d\xb9\xab\x86\x08\x1e\xf6\x4c\xa6\xc4\
\xe8\x74\xdb\xa5\x67\x62\xa2\xc5\x62\x2f\xe1\x36\x74\x1b\x22\xe9\
\x6f\x95\x36\x62\xd5\xec\x3c\xb1\x4a\xf3\x0c\xa1\x4b\x1b\xc7\xdb\
\xda\xa8\xc6\xcd\x98\xa9\xdc\x1a\xb3\xe2\x69\x1e\x1c\xc5\x95\xa2\
\x6d\x5c\x0e\x55\x5d\x47\x9d\x18\x1b\xe6\x4b\x93\xd6\x8a\xa0\xc5\
\x10\xa8\x5a\x9a\x88\xb4\xb5\xed\x52\x37\xb6\x95\x0a\x68\x59\x19\
\xd8\xb0\xd8\x9d\x29\x64\x65\x14\x9b\x9b\xd7\x01\xc8\x0a\x88\x83\
\xa9\xf3\x03\x6f\xd2\x8c\x6a\x34\x37\xa5\x04\xae\xc3\xa9\xa2\x16\
\x45\xcf\x6d\x07\xa4\x75\x3d\x05\x43\x0b\x2e\x7b\x85\x09\xa9\x27\
\xa5\x70\x60\xe0\x4a\xc4\xa9\x22\xc8\xbf\x84\x75\xf7\x35\x27\x22\
\xf8\x51\xe5\x24\x66\x26\xec\x7b\xd7\x66\x3c\x87\xd6\xac\x43\x84\
\x77\x50\xc6\xc8\xa7\x9b\x6e\x69\xc9\x84\x81\x46\xa5\x98\xfd\x29\
\xca\xb5\x48\x13\xf8\x4f\xca\xb9\x58\xb3\x10\x45\xb4\xab\xb2\x61\
\x52\xc4\xc6\xc4\x11\xc8\xd5\x2d\x4c\x9a\x82\x32\xe8\x6f\x56\x25\
\xae\x1f\x29\x56\x31\x9d\x9b\x6f\x7a\xbd\x01\xba\xeb\x58\xf9\xd8\
\x3f\x90\x6c\x77\xad\xa5\xb1\x01\x80\xd0\x80\x45\xab\x5c\x45\x31\
\x05\xcd\xaa\xb9\x97\x2e\x3d\xa6\xb1\x2b\x9a\xd7\xb7\x2a\xb5\x0f\
\xa8\x77\xa1\x89\x8a\xc0\x97\x50\x57\x28\xfe\xf5\xb0\x31\x22\x3b\
\xaa\x23\x06\xbf\x98\xf6\x03\x5f\xd6\x8c\x6b\xf3\xaa\x39\xbe\xcf\
\x8b\xce\x05\xc1\x1b\x7b\xd5\xd6\x60\xa0\x1e\xa0\x10\x29\x94\x25\
\x5a\xea\x3d\xa8\xae\x6d\xbd\x2d\x76\xb7\x4d\x28\x81\xa4\x26\x88\
\x11\x6a\x1b\xd0\x96\xb5\x48\x4c\x45\x09\x34\x2c\xff\x00\x2a\x0c\
\xe3\x6b\x8f\xad\x1a\x70\xcb\xf7\xae\x2c\x05\x29\x9a\xc3\x5a\x12\
\xe4\xec\x3e\xb4\x69\xc1\xca\xcd\x75\x03\xad\xea\x73\x6b\x4b\x5d\
\xfa\x93\xce\x88\x54\x86\x4f\x97\xb5\xf5\x34\xd4\x17\x00\x2e\xdc\
\xad\x42\x8a\x58\x58\x0b\xde\x80\x2c\xda\xc5\x19\xd7\x62\xdf\x86\
\x90\x99\xa7\x0a\x08\x89\x73\x11\xbb\x72\x14\x58\x24\x60\x8d\x2c\
\x9e\xb9\x3f\x4a\x88\xb0\x8c\x8b\xe6\x90\x38\xbd\xec\x05\x81\xf7\
\xa6\xc6\x7c\xbb\x9b\x8a\x64\xbb\xda\x15\x03\xef\xbe\xd4\x67\x6a\
\x5c\x84\x09\x3c\xc7\x4b\x5c\x55\x40\x49\xb8\xb8\x04\xd0\x33\x5d\
\x8a\xda\xdf\x3a\xe9\x65\xb9\xb2\x7d\x6b\x88\x0a\x2c\x3e\x66\x83\
\x11\x41\x23\x85\x5b\xd4\xbb\x59\x49\xe9\x55\xee\xce\xf7\x35\x9b\
\x49\x81\x6e\xa4\x1d\x58\x8b\xdf\xa1\xaa\x78\xec\x40\x66\x40\x9b\
\x2f\xab\xdf\x9d\x5b\x53\xf7\x83\xde\xb2\xb1\x0d\x7c\x64\xeb\x71\
\x96\xf9\x94\x74\xbe\xf5\x9e\x57\xa3\x18\xdc\x6a\x1f\x07\x18\xf6\
\x5b\x23\x1b\xaf\x71\x45\xfe\x1c\x90\x0e\x21\xe1\xc8\x7e\xed\x81\
\x2c\x3a\x5b\x9d\x5e\xc5\xc2\xb8\xb8\x7c\x22\x40\x75\xf4\x13\xfa\
\x56\x56\x0d\x5a\x0e\x20\x61\x95\x4a\xb3\x02\xba\xf2\xbd\x70\xb3\
\x39\x6b\x7f\x1e\x8a\x60\x44\x87\x36\xf4\xa3\xbd\x55\xc1\xe3\x8a\
\x0f\x03\x11\x72\xb7\xd1\x8e\xeb\x57\x19\x48\x20\x8d\x41\xd8\x8e\
\x75\xd3\x75\x90\x83\x95\x83\x0e\x55\x9d\xc5\xb0\x6c\x92\x36\x22\
\x11\x78\x98\xdf\x4f\x84\xd6\x8b\x29\x1a\x10\x45\x72\x12\xbb\x73\
\xde\xfc\xe8\xb3\x54\xac\xfc\x16\x39\x72\x88\xb1\x37\x2a\x36\x7e\
\x62\xad\x23\xc0\xd6\x29\x89\x8c\xdf\x5d\x4d\x88\xa8\xc4\x61\x30\
\xb3\xfa\x94\xc4\xdd\x50\x68\x7e\x55\x56\x4e\x16\x6f\xf7\x58\x88\
\xdb\xb3\x79\x68\xff\x00\x28\x57\xd7\xcc\x09\x59\xa2\xb7\x36\x0c\
\x29\x12\x62\xb0\x48\x4e\x69\xcb\x11\xc9\x53\x7f\x9d\x51\x7e\x1d\
\x8b\x52\x09\x8b\x32\xdf\x52\xa6\xf5\x57\x1c\x78\x96\x1f\x10\x50\
\x61\x7e\xef\xe1\x1e\x1d\xc5\xa8\xbc\xac\xf8\xa4\x5d\xc4\x71\x57\
\x23\x26\x1a\x31\x10\x23\x56\xdd\xbe\xb5\x51\xdd\x84\xb9\x14\xe7\
\xc4\xb0\xcc\x4b\x1d\x10\x75\x34\x98\xb1\x2c\x49\x69\xf0\x91\x8c\
\xbb\xe5\x26\xff\x00\x41\x54\x31\x3c\x59\xa2\xe2\x06\x58\xf0\xec\
\x88\xde\xa2\x41\x0c\x47\x6e\x95\xca\xf2\xfb\x6b\x58\xb1\x89\xc2\
\xa6\x1d\xd5\xdf\x10\xac\xc4\xde\xda\xaf\xf2\x68\x31\xf8\xb9\x31\
\x01\x54\xbe\x60\xa3\x5b\x0b\x02\x7a\xda\x93\x2a\xf8\x89\xe3\xc4\
\xe6\x44\x6d\xcf\x31\xef\x4a\xbd\x62\xdf\xe1\xc1\x03\x56\xa3\x2e\
\x71\x38\x42\xba\xc9\x21\x2a\x6d\xbb\x27\x7a\xa8\x0d\x6d\xff\x00\
\x87\x11\x15\xd3\x13\x3a\x80\x21\xd1\x6f\xbe\xa6\xf4\xf1\x9b\x55\
\xab\x5c\x23\x87\x36\x1f\x33\x31\xb4\x8c\x37\xe4\x83\xf9\xad\x18\
\x55\x21\x5c\xb1\xdf\xbb\x1d\xcd\x40\x91\x65\x17\x8e\x55\x23\xa5\
\xec\x6a\x76\xd2\xbd\x32\x49\xe3\x14\x60\xd1\x03\x4b\x06\x8d\x01\
\x63\x60\x09\x34\x83\x22\x60\x1a\xe7\x60\x09\x3e\xd5\xd8\x13\x93\
\x04\xad\xac\x62\xec\xcd\xd6\xdc\xa9\x32\x9c\xc5\x70\xe8\x41\x67\
\x20\xc9\x6d\x40\x14\xeb\xac\x85\x93\x65\x71\x94\x76\x1c\xa9\x4a\
\x98\xb9\xd2\x46\x00\x2a\xbe\x5d\xd9\x86\xff\x00\x2a\x59\x66\x73\
\x76\x37\xa5\xa0\x14\xc1\x58\x68\x42\xa4\x50\xd7\x7c\x27\xda\xa4\
\x33\xf8\x79\x9e\x42\x8f\x9d\x00\xd0\xd8\x68\x28\x97\x6a\x60\xa2\
\xe7\x50\x45\xcf\x7a\xea\x95\x3e\x60\x06\xa7\x90\x1b\xd2\x02\xaa\
\xb2\xe2\x0c\x24\xb3\xac\x76\x39\x07\xc4\xd5\xa1\x87\xc3\xa4\x07\
\xc5\x94\x03\x21\xe5\xc9\x6a\xbc\x90\x7d\x90\x7d\xa8\x00\x25\x63\
\x62\x06\xb9\x68\xf0\xb3\x4f\x89\x3e\x75\x40\xa3\x98\x16\xb5\x33\
\xaa\xaa\xcb\x48\xa4\xdc\xb1\xd7\x99\x15\xc4\x81\xb9\x1f\x5a\x51\
\x56\x67\x0a\x4d\xc0\xfc\xaa\x7c\x21\x9a\xf7\xb0\xe9\x4b\x26\xd5\
\x7c\x72\x82\x4c\x80\x00\x6d\xaf\x7a\x70\x04\x68\x18\xfc\xea\x55\
\x41\x3e\x7d\x6f\xa1\x1d\xaa\x4c\xf1\x7a\xd2\xe1\x6d\x9b\x0c\xc2\
\xf7\x28\x74\xf6\xac\xe9\x30\xec\x98\x96\x41\x7c\xbc\xb5\xb5\xe9\
\xd8\x1c\xd0\xe2\x75\x07\x2b\x0b\x1a\xb8\xf5\x5a\xad\x50\xc5\x41\
\x3d\x05\xc5\x14\x63\x2d\xd0\xdb\xf1\x0f\xdc\x7d\x7f\x5a\x50\x0f\
\x7c\xad\x6b\x5f\x53\x45\x24\x81\x02\x3b\x1b\x0c\xc5\x49\xec\x45\
\x74\x64\xae\x27\x1d\xd2\x36\x52\x6f\xaa\xfc\xea\xc3\x00\xa7\x28\
\xdc\x00\x09\xf9\x50\x0c\xb3\x48\x1a\xf7\x8e\x3d\x8f\x26\x34\x4e\
\x75\x26\xa4\x90\x6d\x5c\x1a\x80\x1a\xea\x92\x58\x96\xbd\xc9\x02\
\xfb\x50\xb5\xed\xeb\x6f\xad\x40\x3e\x51\xdc\x57\x1a\x90\x6c\x4f\
\xa8\xde\xd5\x24\x0e\x82\xba\xf5\x0c\x68\x48\x20\x03\xa5\x75\xea\
\x2a\x54\x5c\xda\x84\x25\x37\x15\x18\xcc\x4a\xe1\x80\x45\x01\xe5\
\x22\xf6\x3b\x20\xea\x6b\x95\xd4\x11\x93\xef\x09\x3c\x81\xb0\xf9\
\xd6\x5c\xce\xcf\x33\xb3\x1b\xb3\x31\x2c\x7a\x9a\xad\xc8\xa2\xcb\
\x62\xf1\x32\x8b\x3e\x25\xb2\xfe\x18\xd7\x2d\xe8\xb0\xb8\x89\x62\
\x23\xc3\x62\x00\x37\xb7\x5a\xab\x1d\x59\xc1\x46\x65\x9d\x63\x51\
\xab\x1b\x51\xde\x9a\xdd\x27\xee\xc3\x81\xba\x83\x6a\x15\x4d\x2f\
\x7d\x4e\xa6\x9c\x40\xd8\x6c\x05\x85\x01\x16\x3a\x6d\xd2\xbb\xb0\
\x5b\xf9\x54\x9b\x6c\x2f\x4b\x68\xc1\xf5\x6a\x7a\xd3\x24\x04\xe8\
\x4e\x9d\x05\x41\xd3\x5a\x2a\x21\xa2\x50\x41\x51\xa8\xeb\x40\xf9\
\xb7\x16\x14\xe6\xa5\x3d\x66\x98\x53\x82\x7d\x76\xf6\x14\x04\xd3\
\x1c\xd2\x98\xd6\x69\x46\x62\x1a\xfd\x2b\x37\x8a\x2f\x83\x2f\x8e\
\x6f\xb8\x52\x79\x32\x9a\xd0\xaa\xdc\x4d\x5a\x54\x11\x78\x25\xd2\
\xd7\x24\x75\xac\x5f\x0c\x50\x93\x46\x20\x6d\x43\x3c\x29\x8c\x8c\
\x24\xa1\x83\x29\xf2\xc8\xa3\x51\xef\x44\xe2\x64\x16\x43\x13\x90\
\x2c\x33\x8b\x11\x55\x27\x4e\x22\xe4\xdc\x48\x47\xf4\x9f\xe2\xb9\
\xd6\x88\xc4\x1c\x46\x1d\x8a\xe2\x62\xf1\x50\x68\x24\x1b\xfd\x69\
\xf8\x3e\x27\x04\x70\x18\xb3\xba\x03\xb1\x61\x7c\xbf\x3a\x94\xc1\
\xe2\xda\x2f\xbe\x25\x54\xf2\x60\x49\x34\xb1\xc3\xb0\xef\x7f\x0a\
\x44\x27\x98\x91\x8a\x5b\xf5\xac\xf7\x3c\x3d\x35\xb0\xc6\x29\x30\
\xaa\xd1\xe2\x52\x4b\x5e\xe6\xfa\xd7\x1c\x9c\xe4\x4f\xfb\xab\xcf\
\x62\x70\xf8\x9c\x0c\x99\xd6\xe8\x2f\xa3\x2b\xdc\x1f\xca\x8a\x3e\
\x2d\x36\xd8\x80\xb3\x2f\x32\x56\xcd\xf5\xa7\xf7\x3e\x8c\x6e\xbf\
\x87\x6b\xf8\xd1\xe9\xfd\x54\x99\x24\x85\x75\x69\x47\xfd\x3a\xd5\
\x28\x24\xc3\x62\x85\xb0\xf2\x15\x7b\x5f\xc3\x7d\x3f\x3a\x19\x94\
\xa9\xb3\x0b\x11\x4d\xe5\xd2\xc5\xa6\xc5\xc2\x86\xe8\xae\xc7\xbe\
\x82\x82\x6e\x21\x24\xa9\xe1\x95\xca\x84\x58\x8c\xc4\xfe\xb5\x50\
\xd4\x56\x3f\x54\x86\x4c\x34\xa4\xfd\xd6\x3a\xea\x0e\xce\x4a\xda\
\x97\x34\x18\xd4\x5c\xca\xe9\x32\x0d\xf6\x20\x8e\xf7\xa7\x57\x2b\
\x14\x60\x54\xd8\xd1\x90\xeb\x35\x22\x84\xc8\x64\xc3\x01\x0c\x84\
\x1c\xd1\x1f\x44\x9e\xdd\x0d\x56\xc4\xc7\x97\x12\x63\x41\xec\x08\
\xd6\xb6\x71\x18\x13\x8b\x9d\x1a\x01\x95\x9f\x57\xe8\x2b\x51\x70\
\xf8\x68\xe1\x10\x01\xaa\x8b\x09\x40\xd5\x4d\x13\x86\xad\x79\xdc\
\x26\x0f\xc3\x0b\x2e\x24\x58\x9d\x55\x39\x9f\x7a\xbe\xb3\xc4\x8c\
\xa9\x3b\x64\x59\xd0\x59\xc0\xd8\x82\x77\xfa\xd0\x63\xf0\xd2\xe1\
\xe6\xb4\x87\x30\x6d\x43\xfe\x2a\x56\x2b\x0e\xf8\x98\x62\x58\x99\
\x3c\x80\xdf\x33\x58\xd1\xe7\x8b\xd6\xa6\x0b\x08\xe6\x70\xe5\xbc\
\x8b\x66\x0c\xba\xde\xae\x66\x0f\x3b\x2a\x87\x76\xbf\x9b\x28\x00\
\x2f\xcc\xd6\x3f\x0a\x3c\x4f\x02\xa5\x22\x78\x24\x43\xbc\x65\xef\
\x7f\x6a\xd6\xc0\xbc\x72\x42\x23\xb1\x8a\x76\xf3\x34\x64\xdf\xf3\
\xae\xbc\x2f\x4c\xd3\x4a\x91\x6b\x42\xef\xfe\x97\x1a\x7b\xd4\xa3\
\x5e\xe2\x42\x07\xff\x00\xd6\x87\x4f\x99\xe7\x42\x47\x6a\x95\x51\
\x7b\x90\x0d\x6c\x08\xb1\x0b\x95\x55\x54\x74\x51\x52\x80\x93\x43\
\x94\x11\x6d\x7e\xb4\x8c\x6b\x65\xfb\x95\x63\x7f\x8f\xf8\xab\x71\
\x2b\x83\xde\xd4\xc0\x53\x28\x2d\x2a\x29\x23\x66\x3a\xd2\x63\x8c\
\xbf\x94\x31\xa6\xaa\xa0\x3e\x33\x59\xf4\x01\x01\x1c\x86\x99\xab\
\x11\xa3\x4a\x84\x17\x91\x85\xb9\x65\x20\x93\xec\x2b\xac\x4a\x12\
\x30\xb3\xda\xdb\xdc\x54\x09\x5b\x93\x28\xee\x00\x15\xd7\xb9\xb9\
\x37\xfc\xe9\x06\xd9\xef\xe5\x85\x35\xe4\xf2\x58\xd4\x48\xd9\x06\
\x66\xc3\x4e\x07\x5b\x83\x40\xb7\xb0\x06\xa5\x4b\x03\x71\x71\x4a\
\xc0\x7d\xa8\xb7\x97\x0d\x03\xbb\x75\x61\x7a\xbf\xc3\x9a\x35\x16\
\x95\x44\x53\x30\xbd\x8e\xdf\xf9\xa8\xe1\xea\xce\xc5\xdf\xd2\xbc\
\x86\x97\x35\x66\x78\xa3\x9c\x5a\x54\xb9\xea\x34\x35\xa9\x2f\xa2\
\xd0\xca\xab\x88\x21\x33\x5e\x34\xd5\xcf\xe2\x3d\x05\x1a\xe8\x32\
\x8d\x14\x6c\x06\xc2\xb8\x21\x48\xc2\x20\xb2\x8d\xaf\x5d\x66\x3a\
\x11\x61\xce\x96\x51\x1f\xa3\xe7\x44\x05\x48\x15\xd6\xa9\x38\x54\
\xaa\xdc\xf4\x00\x5c\x9e\x82\x88\x21\x22\xf6\xd3\xa9\xaa\xdc\x47\
\x12\x91\x44\x61\x8d\x83\x3b\x8b\x31\x07\xd2\x29\xea\x7a\x99\x7f\
\xe2\x39\xde\x5c\x44\x6f\x1b\x11\x1b\x0c\xaa\x3a\x5a\x91\xc3\xb1\
\x72\xe1\xa5\xf5\x96\x46\xf5\x03\xaf\x3a\x2c\x59\xcf\x85\x0d\x63\
\xf7\x6c\x0f\xd7\x4a\xaa\xbe\xab\x74\xae\x36\xf7\xad\xe3\xd9\x42\
\xe2\x58\x04\x8b\xb7\xed\xca\xa2\x64\x12\xc2\x63\x26\xda\x82\x0d\
\x66\x7f\x87\xb1\x36\xc2\x80\x7f\xe5\xb1\x56\x17\xe4\x79\xd6\xa9\
\xec\x6e\x0e\xc6\xbd\x12\xec\x61\x2b\x65\x01\x57\x45\x5d\xab\x98\
\xde\xa2\xba\x94\xea\xe1\xb8\xae\xae\xa9\x05\x3d\x03\xda\xa1\x88\
\x02\xa2\x43\x96\xe7\x91\xde\x96\x5c\x6f\x63\x6e\xb4\x6a\x13\x35\
\xea\x2f\x43\x7b\xed\xad\x70\xa1\x08\x7f\x73\x40\x5c\xca\xb9\x22\
\x46\x2a\x4d\x99\xf9\x5a\x8b\x42\xac\xa4\x8f\x30\x22\x81\xa6\x8e\
\x1c\x22\xc8\xdb\x81\x60\xbd\xea\x49\xc7\x62\x57\x0d\x0e\x55\x3e\
\x72\x2c\x8b\xd0\x75\xac\xe4\x6b\xeb\x49\x9e\x57\x9a\x63\x23\x9b\
\x93\x4d\x48\xe4\x45\x0c\xc8\xc0\x75\x22\xb1\xfa\xda\xd6\x61\xa9\
\xbd\x6e\x70\x7c\x3f\x81\x17\x88\xe3\xef\x1f\x6e\xc2\xb0\xe1\xb6\
\x71\x7d\xaf\xad\x7a\x53\xea\x1d\x2c\x2d\xf4\xae\x9c\x27\x6c\xf2\
\x30\x1d\x2a\x0d\x4a\x54\x91\x5d\x59\x29\xc5\x03\xfa\x69\xac\x29\
\x6c\x39\x51\x51\x47\x7a\x5c\x83\xa9\x03\xdc\x8a\x29\xdf\xc2\x89\
\xa4\x3c\xb6\xf7\xac\xa4\xbc\xd8\x91\x9c\x17\xcc\x75\xb5\x63\x95\
\x31\x79\xd7\x4b\xdd\x6d\xfe\xa1\x49\x25\x4e\xc7\x37\xfa\x05\xff\
\x00\x3d\xa8\xca\x46\x86\xe9\x1a\x2d\xb9\xda\x85\x89\x3b\x92\x68\
\x20\x21\xef\xea\x11\x8e\xde\x66\xfe\x28\x48\xb1\x37\x79\x1b\xdc\
\x81\xfb\x51\x31\xa0\x63\x59\xa8\xb3\x06\x1b\x35\xfc\x22\x4f\x3b\
\xb5\x42\xbc\x71\xe9\x1a\xe4\xf6\x1a\xd4\xb1\xa0\x26\x82\x23\x3b\
\x6f\xe2\x3d\xfd\xcd\x04\x92\x17\x5b\x00\xbe\xf9\x05\x4e\x9d\x3f\
\x2a\x06\x00\xf2\xb7\xb5\x45\xca\xea\x17\x21\x8a\x36\x5e\x61\x96\
\xa9\x71\x0c\x0c\x0e\x9e\x26\x1f\x07\x11\xfc\x48\x14\xdc\x77\xab\
\x7a\xdb\xad\x40\x24\x1b\x8b\x82\x3a\x51\x64\xa9\x84\xcb\x00\x24\
\x1c\x32\x0e\xb6\xb8\x22\xb4\x70\x03\x0f\x89\x83\x2b\x02\x64\x5d\
\x02\x97\xd6\xdf\x4a\xb3\x89\x86\x1c\x40\x1e\x34\x76\x61\xf1\x2d\
\x81\xaa\x72\xf0\xdb\x31\x31\x62\x00\x3c\x83\x0b\x7e\x75\xcf\xf3\
\x65\x6b\x61\xcd\x86\xc3\x29\xb3\x40\xe0\x8e\xae\x69\x58\x98\x70\
\x91\x2e\x62\x18\x5f\x61\x7b\xd4\x88\xf8\xbc\x63\x2c\x72\x19\x07\
\xf4\xb0\x6b\x50\xcb\x3e\x35\x7c\x98\x8c\x1e\x7d\x39\x2d\xbf\x4a\
\x7a\xfe\x01\x61\xa3\xc2\x4b\x19\x61\x1b\x12\xbb\xdd\xad\x4e\x8d\
\x63\x12\x00\xb0\xc6\xba\xda\xe0\x6b\x55\x63\xe2\x10\x44\x32\x36\
\x19\x96\xfe\xa0\x0d\xa8\xcf\x11\xc1\x5e\xf9\x27\xfa\x8a\xb6\x21\
\xc4\xa0\x33\xce\x45\x99\xd8\x81\xfd\x20\x69\x52\x8a\xcc\xc0\x0d\
\x49\xa0\x18\xec\x26\x63\xe4\x93\x2b\xea\xb7\x61\x6c\xdd\x3b\x5e\
\xaa\x62\xf8\x94\x85\x5a\x28\xa2\x10\x83\xa1\xeb\x46\xc8\x85\xc6\
\xa6\x8d\x82\x40\x87\x31\x8c\x9c\xcd\xcb\xda\xa9\x28\xa1\x07\x5a\
\x35\x3a\x57\x3d\xde\xda\xc4\x93\x6a\xe4\x62\xae\x19\x4d\x88\xd8\
\xd7\x1a\x8e\x75\x44\xdd\xc3\x4d\xf6\x9c\x1a\x4c\x4f\x9c\x79\x5f\
\xdf\xad\x1a\x9a\xce\xe0\xb8\x85\x8e\x46\x86\x56\xb4\x72\x0d\xfa\
\x1e\x46\xb4\x8a\x95\x6b\x1a\xed\xc6\xec\x66\xa5\x9f\xc3\x85\xe5\
\xe6\xa3\x4f\x7a\xce\x04\x93\x73\xa9\x35\x77\x19\xff\x00\xed\xf2\
\x7f\xa9\x6b\x3f\x52\xb6\x1b\xde\xae\x4a\x2c\x46\xaa\x50\xeb\x9a\
\x31\xff\x00\xfd\x0f\xf1\xfa\xd1\x31\x2c\xd7\x63\x44\xe1\xcb\xf9\
\xc5\x8f\x4b\x5a\xd5\x39\x68\x41\x17\xe5\xa5\x4d\x8f\x3b\x9a\x20\
\xbd\x68\x82\xd3\x8b\x42\xa2\x8a\xd4\x40\x57\x01\x48\x5e\xc1\xae\
\x5c\x2a\xdb\xe2\x24\xd3\x00\x3a\xd8\x6b\x6d\x2f\xd6\x87\x00\x73\
\x61\xb2\xde\xe5\x0e\xdd\xa8\xe5\x0c\x19\x55\x05\xdc\xec\xbf\xb9\
\xad\xb2\xab\x84\x5c\x44\xa5\x9b\xc6\x65\x50\x75\x37\xe7\x56\x82\
\xb8\xf8\x8c\xab\xce\xfa\x30\xfe\x68\xa0\x87\xc2\x84\x25\xee\x49\
\xbb\x7b\xd1\xaa\xda\x99\x10\x14\x06\x17\x53\x7b\x6e\x2d\x62\x3d\
\xe8\x71\x32\xc7\x86\x8c\x33\x82\xcc\xde\x95\x15\x60\xda\xe1\xc8\
\xdb\x46\x3d\x8f\xf0\x6a\x97\x1e\x53\x68\x4d\xb4\x00\x8f\xce\xab\
\xd4\x4a\x38\xbc\x44\xb3\xb9\x2c\x48\x5e\x4a\x36\x15\x56\x6b\x86\
\x03\x91\xa6\xb6\xf4\x32\x8c\xc9\xa6\xe0\xd7\x2b\xdb\x63\x2b\x9a\
\x39\x13\x7c\xc8\x6c\x2b\x32\x31\x6a\xd6\xc3\xa4\x84\x24\x99\x18\
\xaf\x33\x6a\xcd\x99\x32\x4c\xeb\xd1\x88\xac\xf2\x89\x77\x80\x37\
\xdf\xc9\x17\xfe\xe2\x7e\x63\x51\x5b\x38\x2c\x46\x82\x29\x2d\x6f\
\x84\xf4\xaf\x3d\xc3\xa4\xf0\xb1\xd1\x49\x6d\x98\x56\xcc\xca\x15\
\xca\x8e\x46\xba\x70\xbd\x0a\xd2\x20\x8f\xe6\xa2\xb3\xe2\x9a\x58\
\xfd\x0e\x40\xe9\x4e\x18\xd9\x00\xf3\x22\x37\xca\xd5\xd3\xf5\x19\
\xc5\xaa\x83\xb5\x57\x18\xdd\x75\x85\x7e\x44\xd7\x1c\x62\x69\xf7\
\x47\xbf\x9a\xad\x8b\x0c\x7d\xe8\x0d\x29\xf1\x84\xe8\xb0\xaf\xd4\
\x9a\x53\xe3\xa4\x07\x41\x18\xed\x96\x8d\x87\x0f\xc8\x3b\xfd\x6a\
\x42\xad\xf6\xaa\x9f\x6c\x97\xf1\xaf\xd0\x51\x2e\x32\x41\xba\xa3\
\x0f\x6a\x36\x2c\x5b\x05\x63\x46\x91\xb4\x54\x17\xf7\xac\x8c\x4c\
\xad\x2c\xcd\x2d\xaf\x7d\xd0\x73\x1d\xbb\xd6\x9a\x62\x20\x91\x1a\
\x39\x03\x28\x75\xb1\xe9\x59\x93\xc4\x62\x90\xad\xee\xa7\xd2\x7a\
\xd6\x79\x18\xb1\xc2\x70\xcb\x2c\xa6\x56\xf3\x46\x9a\xfb\x9e\x95\
\xab\x9f\x30\x2a\xe0\x32\x9f\x84\xed\x59\x3c\x3b\x10\xd0\xc8\xc0\
\x65\xcb\x26\xe0\xec\x4f\x7a\xd5\x4b\x32\x07\x5f\x49\xfc\x8f\x4a\
\xd7\x0f\x05\x51\xc7\x41\xe0\x4a\x0a\xff\x00\x96\xfa\xaf\x6e\xd5\
\xb1\x80\x98\x4f\x83\x47\xbe\xaa\x32\xb5\x52\xc7\x28\x6c\x03\xdf\
\xe1\x21\x85\x07\xf8\x7e\x52\x27\x78\x4e\xce\xb7\x1e\xe2\xb5\x3a\
\xa2\xf8\xd8\x8d\xb5\xb1\xa6\x83\xa5\x57\x06\xfa\xd3\x11\xb4\xae\
\xac\x8d\xe9\x2f\xbd\x30\x9b\xd0\xb6\xba\x54\x95\xf1\x68\x1e\x02\
\xa4\x68\x59\x7f\x5a\x51\x45\x40\x42\x22\xa8\xec\x2a\xcc\x80\x98\
\xd8\x0d\xed\x71\xdf\x9d\x2d\xc0\x3a\x8d\x8e\xa2\xb1\x61\x55\x90\
\x69\x6a\x5b\x69\x56\x1d\x75\xa4\xc8\xa4\x56\x69\x25\xa8\x1f\x4a\
\x61\x14\xb7\x15\x84\x5b\x9d\x68\x68\xce\xf5\x0c\x2a\x32\x80\xef\
\x43\xa5\xe8\x88\xd2\x84\xd4\x50\x68\x6b\x89\xa8\xbd\x1a\x93\x42\
\xdb\x57\x72\xa8\x6b\x05\x2c\xcc\xaa\xa3\x99\x34\x20\x10\x2f\xb5\
\x48\x69\x14\x59\x64\x70\x3b\x35\x46\x68\x7f\xf7\xd0\x7b\xd1\x32\
\x90\x3b\x75\x07\x4a\x93\xbc\x57\x1b\x90\x7d\xd4\x1f\xda\x93\x24\
\x78\x69\x2e\x24\xc3\x47\xee\x83\x29\xa3\x61\xce\x85\xa8\xa6\x14\
\xd8\x4c\x13\x23\x28\x59\x10\x1d\xfc\xc1\x81\xf9\x50\xae\x0b\x09\
\x75\x52\xf2\xca\x3e\x1c\xda\x11\xfd\x34\xeb\x50\xca\xa4\x02\x14\
\x7d\xe3\x8f\x20\xef\xd6\xb3\x90\x99\x19\x48\x85\xa1\x85\x23\xe5\
\xe9\xb9\xa3\xce\x24\x19\x65\x8e\x37\x1d\x0a\x8f\xda\x97\x23\x03\
\x26\xfa\xf3\xf7\xa9\x43\xad\x86\xf5\xa0\xa7\xc5\x70\x89\x08\x59\
\xa1\x2d\xe1\xb9\x37\x07\xe1\x3d\x2a\x90\xad\x4e\x35\x2a\xc7\x85\
\x18\x60\xc0\xbb\x36\x67\x03\x90\xe5\x59\x43\x38\x37\xbe\x9d\xab\
\x9f\x39\x34\xc3\x2d\xa5\x5a\xc3\x71\x0c\x4c\x4a\x13\x30\x74\x02\
\xc1\x5c\x5c\x0a\xa6\xcc\x2e\x32\xde\xe3\xad\x19\x3a\x5f\x29\x17\
\xa2\x74\xbd\x6b\xc5\x8a\x87\x17\x87\x92\x30\x9e\x1c\x85\x7d\x37\
\xd0\xfb\x55\x58\xe3\x66\x6c\x80\x6b\xfa\x52\x38\x79\x61\x89\x89\
\x81\xd4\xb8\xd6\xb5\xf1\x50\x09\x99\xd6\x18\xec\xaa\x7c\xe8\x3e\
\x3d\x34\x1e\xdd\xab\xa4\xff\x00\x28\x3c\x5e\x6c\xad\xe4\x91\x03\
\x0e\xa7\x7a\x54\xd8\x20\x7c\xd0\xb5\xff\x00\xa4\xef\x56\x05\x88\
\x2d\x96\xc6\xe4\x10\x77\x16\xa9\xb7\x3d\x2f\x5d\x71\x86\x6b\x23\
\x23\x65\x60\x41\x1b\x83\x51\x6a\xd2\x9d\x12\x58\xfc\xe8\x73\x5b\
\x46\xb6\xd5\x9f\x30\x68\xc9\x52\x3c\xc3\x4b\x56\x6c\x6a\x22\xa4\
\x0a\x08\xcb\xde\xe7\x5f\x7a\x60\x64\x04\x29\x0e\xcc\x75\x08\xa3\
\x5a\x91\x98\x67\x68\x98\x32\x9b\x66\x60\xbf\xef\xe5\x5a\x3e\x81\
\x68\xc5\xc1\xd7\x37\x36\xee\x6b\x32\xc7\x38\x67\x00\x65\xf4\x20\
\x37\xd7\xa9\xab\x58\x09\xf4\xf0\xa4\x3a\x7c\x24\xf2\xad\x71\xa2\
\xac\x80\x48\xe9\x5d\x62\x07\x5a\x3b\x5b\x43\x5d\x5b\x08\x16\x23\
\x5d\x54\x8b\x1a\x46\x22\x29\xe6\x84\xc2\x02\xb8\x02\xea\xd7\xb1\
\x27\xa5\x3f\x28\xde\xd5\x0c\x0a\xea\x9a\x1d\xed\x45\x4c\x09\x4b\
\x47\x7f\xf8\x69\x49\x1c\x88\xfe\x2b\x94\x87\x85\x64\x00\x8b\xe8\
\x47\x42\x2b\x5f\x88\x61\x9a\x71\xe2\xc1\xfe\x67\xc4\x2f\xbf\x71\
\x54\x57\x0f\x2b\x4c\x10\xc6\x47\x8b\xdb\x98\xfe\x47\xe9\x5c\xef\
\x1c\xad\x4a\xd2\xc2\x1f\x0f\x86\x21\x51\xb4\x45\x88\xeb\xad\x60\
\x71\xa8\xc0\xc5\x78\xaa\x34\x70\x09\xf7\xb5\x7a\x0c\x53\x2a\xe0\
\xce\x5d\xb2\x84\x51\x59\xfc\x6b\x0c\x17\xc2\x4b\x82\x64\x8c\xe5\
\x27\xad\xf4\xfe\x29\xe7\x36\x08\xc4\xad\xd5\x7f\x17\x0f\x14\xa3\
\xe2\x40\x0f\xb8\xac\x47\x5f\x2e\x70\x08\x04\x90\x41\xf8\x4f\x31\
\x5a\x7c\x20\x93\xc3\x98\x1f\x86\x4d\x3e\x95\x8e\x3e\xe3\x55\x61\
\x45\xc8\x02\xa4\xa9\xcb\x7d\x0d\xba\x1b\xd0\x48\xc5\x30\xee\xe3\
\x70\x2c\x3e\x75\x21\x12\x28\xe3\x91\x17\xd3\x6c\xdf\xd4\x0d\x6c\
\x3a\xb9\xb4\x15\x2c\x32\xb1\x1d\x2a\x18\x12\x45\xad\x61\xca\xf5\
\x00\x15\x36\xd7\x5a\x19\x0a\xc7\x11\x76\x07\x28\x36\xd0\x53\x09\
\x6c\xd9\x42\xeb\xef\x43\x32\x13\x86\x93\x31\xbf\x97\x6b\x54\x41\
\x19\x49\x14\xb4\x6c\x0d\xb7\x16\xd4\x57\x65\xe9\x4a\xc1\xe1\xa4\
\xc8\x25\x49\x42\x13\xe9\x16\xde\xac\x44\xfe\x20\x60\xcb\x96\x44\
\xf5\x2f\xef\x44\xff\x00\xa8\x20\x75\x15\x24\x02\x2c\x45\xc5\x16\
\x95\x1f\x5a\x51\x7f\x67\x66\x04\xa0\xcc\x2d\xa8\x1b\xd1\xe0\xb1\
\x72\xe1\x9f\x2c\x77\x65\xb8\xcf\x13\x7c\x5e\xdd\x0d\x1c\x32\x34\
\x52\x07\x4d\xc5\x5c\x9e\x1c\x3e\x2e\x21\x29\x05\x5b\x9b\x2f\x2f\
\x7a\xa4\xfe\x2d\x51\xe2\x1c\x60\x3e\x58\x63\x8c\x04\x27\xef\x05\
\x8d\xea\xd7\x03\x8f\x36\x29\x67\x43\x78\xd0\x5e\xff\x00\xb5\x4a\
\x70\x9c\x37\xda\x04\xcd\x3b\x16\x03\x5f\x2e\xe7\xad\x5c\x8a\x35\
\x89\x6d\x0e\x97\xd4\x92\x3d\x5e\xe2\xb5\x25\xdd\xa2\xe7\xc5\x85\
\x24\x53\x01\xb8\xa4\x23\x66\x21\x6d\x95\xed\xe9\xeb\xdc\x75\xa3\
\x0d\x63\xa9\xb5\x74\x60\xca\xeb\xd7\x02\x0d\x56\x9b\x14\xd1\x4a\
\xc8\xf1\x03\x6d\x88\x3b\xd5\x6e\x25\x9b\xeb\x42\xe2\xde\x65\x17\
\xe6\x54\x73\xf6\xef\x41\x87\xc4\x47\x30\xb0\xf2\xb7\xe1\x3c\xe8\
\xf3\x6b\x56\xea\x29\x80\x6f\x4d\xc8\x3b\x11\x48\x11\xda\xf9\x97\
\xe7\x56\x25\xba\x13\x2c\x6a\x48\xf8\xd0\x7e\xa2\xa1\xca\x90\x19\
\x48\x65\x3b\x11\xce\x8b\x0c\x54\x91\x2d\xb5\x2d\x94\xf2\xb5\x5a\
\x75\x06\x94\xc9\x58\xb0\xab\x32\x91\xc8\x7c\x8d\x03\xe6\x03\xd2\
\x6a\xc3\x2d\xa8\x6d\x59\x4a\xb9\x81\xed\x42\xe5\x6d\xbd\x14\xb1\
\x95\x6b\x5b\x4a\xe0\x05\xb6\x15\x34\x43\xb0\x1d\x6a\x01\xb8\xb8\
\xd0\x51\xca\x97\x3a\x69\x4b\x54\x65\x3b\xdc\x56\x6a\x49\x02\xdb\
\x5f\xde\x86\x50\x81\xa2\x76\x00\x05\x72\x09\xe9\x71\x44\x76\xae\
\xd0\x82\xad\xaa\xb0\xb1\x15\x20\xc8\x5b\x35\x9c\x66\x1c\xc1\xd6\
\x80\x8f\x08\xf8\x90\x6a\xa7\xd7\x1f\xf1\x44\x97\xcf\xe0\xbe\xac\
\xa2\xea\xdf\x8c\x54\xae\x8c\x0e\x97\x15\x17\x2b\x24\xa9\x9e\x32\
\x48\xe6\x08\xda\x84\xad\x74\x51\x04\xc4\x4d\x97\x45\x75\x0c\xa0\
\x1e\xf5\x24\x74\x63\x42\x27\x15\x2b\x40\xea\x88\x80\x16\xb7\x9d\
\x85\xc5\x31\x62\x60\xe5\x54\x34\x92\x36\xef\xcc\xfb\x76\xa9\x0c\
\x1a\x3c\xb2\x00\x50\xee\x0d\x54\xe3\x12\xcd\x87\x85\x30\xb8\x7c\
\x59\x8d\x59\x73\x12\x45\xcd\x8d\x17\xae\xd2\xd4\x91\x88\x96\xf3\
\xba\xc6\xbd\xcd\xef\x55\x66\xe2\x49\x1d\xd7\x0a\xa4\x9b\x7a\xdf\
\x97\xb5\x50\x88\xb1\xf2\xac\xcc\xe3\xf0\xcc\x74\x6f\x63\xca\xad\
\xe1\x38\x78\x95\x81\x93\x34\x00\x9d\x15\xac\x4b\x7b\x56\x76\xdf\
\x0a\xb2\xe6\x91\xaf\x62\xce\x4e\xbc\xcd\x5e\xc3\x70\xc9\x99\x33\
\x4a\xcb\x08\x23\x4c\xda\x93\xf2\xab\xf8\x68\xe2\xc3\x8c\x98\x68\
\x80\x6e\x6e\xda\xb7\xf6\xae\x79\x61\x56\x39\xe5\x24\x8d\xca\xae\
\x6b\x7c\xe9\x9c\x27\xd1\xaa\x91\xf0\x60\xe4\x01\x8c\x4b\x9d\x00\
\xc8\x75\xaa\x33\x06\x8e\x46\x8d\xac\x72\x92\x2f\xd6\xb7\xf0\xcd\
\x1e\x51\x88\x0d\x78\xd2\xec\x49\x16\xda\xb1\x1c\x78\xd2\xb1\x03\
\xd6\xd7\x03\xde\x9e\x5c\x64\xf1\x4a\x77\x02\x84\xcb\x8d\x0c\x7d\
\x11\xf9\x9e\xb7\x20\x8c\x04\x90\xa9\xd5\xa4\xd7\xb5\xa9\x38\x5c\
\x32\xe1\x30\x8b\x11\xf5\xb6\xb2\x69\xf9\x55\xc4\x06\xf9\x94\x8b\
\x91\x62\x1b\x66\xe9\xec\x6b\x7c\x78\xe4\xc6\x6d\x42\x92\x54\x16\
\x3e\x66\x25\x9b\xb5\xff\x00\xd8\xa9\x06\x93\x01\xcf\x96\x55\x1e\
\x52\xa3\xf4\xb5\x36\xb4\x04\x0d\x74\xa8\x26\x8f\x21\xb6\x61\xe9\
\x6e\x86\x86\xf5\x20\xd2\x99\xec\x0a\xb1\x07\x71\x52\x4f\xfc\x4b\
\x8d\x8f\x82\xb6\xfa\xd5\xae\x20\x83\xc3\x38\x83\xb2\x0f\x38\xeb\
\x58\xf1\x4e\xff\x00\x6b\x38\x86\x42\x55\xae\x0e\x9a\x5a\xb1\x7a\
\x6a\x2e\x8a\x35\xa1\x4c\xae\x99\xa3\x60\xcb\xfa\x51\x8e\x94\xa6\
\x86\x0a\x6f\x16\x3c\xac\x46\x75\x1a\x77\x14\xda\xce\x81\xcc\x72\
\x87\x07\x63\x5a\x4c\x3c\xda\x6c\x75\x15\xb9\x59\xa9\xa8\x22\xa4\
\x6d\x5c\x69\x45\x95\x2a\x6e\xbf\x4f\xe2\xa6\xe5\xd3\x29\x73\x66\
\xd8\xf4\x3d\x68\x88\xa0\x65\x20\xdd\x34\x3d\x3a\xd4\x95\xe3\x49\
\x65\xc4\x2a\x4a\xbe\x55\x62\x49\xb6\xfa\xd4\x7f\x88\xd4\xb6\x06\
\x39\x47\xc2\xf6\x3d\xaa\xcc\x57\x08\x01\xb8\xae\xc6\x28\x7c\x04\
\x88\xcb\x71\xa1\xac\xe7\x49\x82\xf8\x49\x64\x8f\xed\x09\x18\x39\
\xc7\xde\xa0\xdd\xba\x30\xef\x56\xb0\x91\x78\x18\x35\x8c\x9b\xb3\
\x9c\xe7\xf6\xa6\x66\x21\x81\x5d\x2d\xa5\xab\xac\x02\x15\x1b\x29\
\xba\xff\x00\xa4\xff\x00\xb3\x58\x93\x1a\xd7\x05\x0e\x8c\x8c\x6c\
\x18\x5a\xfd\x2a\xa3\x62\x4c\x70\x18\x0a\xdc\xdb\x2a\x91\xef\x56\
\x74\xcc\x09\xda\xd5\x2e\xb0\xbc\xaa\xcd\x18\x62\x39\xd5\x40\x97\
\x3b\x46\xac\xc1\x41\x65\x04\xde\xa0\x82\x39\xaf\xbd\xa8\x9d\xff\
\x00\x2e\x54\x05\xa9\x4e\x1a\x0b\x0b\xf7\x35\x28\x03\x5d\x18\x68\
\xc2\xc6\x82\xfa\xd7\x4a\xe6\x38\x19\xc6\xe3\x41\xda\xf5\x27\x46\
\x2d\x87\x41\xcc\x5c\x7e\x74\x13\xab\xf8\xab\x88\x8c\xdd\x90\x79\
\xd7\xa8\xa6\x40\x19\xf0\xf1\x04\x17\x25\x2d\xa7\x5a\xb5\x86\xc2\
\x28\x23\xc6\x24\x96\xd3\x28\x36\xb5\xea\xcd\x4a\x6e\x41\x01\x94\
\xdd\x58\x5c\x50\xd5\xfc\x3a\xe1\x9e\x20\x82\x38\x83\x47\x71\x66\
\x3c\xaf\x4a\xc4\xc3\x87\x22\xf0\xc8\xa5\xbf\x00\xd6\x9c\x5a\xab\
\x4d\xc3\xca\x62\x7b\xee\xa7\x42\x3a\xd2\xb5\x04\x82\x2c\x6a\x77\
\x34\x16\x9c\x46\xf1\xa9\x06\xe0\xec\x68\xef\x54\xb8\x7b\xb3\x46\
\x50\x1f\x49\x24\x03\xd2\xad\x8a\xdc\xac\x98\x32\xba\xe5\x71\x71\
\xf9\x8e\xe2\x97\x2c\xb2\xc5\x20\x49\xc9\x92\x36\xd5\x5f\x98\xf7\
\xa2\x5a\x62\x3e\x53\x72\x74\x3a\x1a\x50\xa3\x7f\x28\x2b\xe6\x1e\
\xf5\xd8\x88\xd7\x11\x1f\xf5\x0d\x8f\xec\x6a\xbe\x2c\xf8\x0c\xb3\
\xc4\x72\x82\x6c\x40\xf4\x9a\x6e\x16\x5f\x19\x4b\x65\xca\xcb\xbd\
\xb9\x8a\xb7\xe5\x0a\x48\xae\xd2\x05\x4f\x55\xf4\xab\xe2\x56\x5b\
\x2e\x23\xca\xc7\x66\xf8\x5b\xe7\xd6\x95\x2c\x46\x2c\x40\xc4\x47\
\x72\xb7\xf3\x2d\xef\x6a\x6b\x32\x11\x63\x62\x0e\xd7\xe7\x54\x98\
\x85\x72\x06\x61\xb7\x51\x49\x90\xe4\x6c\xcb\xa2\xc8\x6c\x47\x2c\
\xdd\x6a\x56\x24\x56\xcc\x85\x93\xfd\x26\xd4\xbc\x44\x02\x4d\x4c\
\xd2\x5f\x95\xf5\x15\x5d\x43\x06\xf4\x2f\x42\x09\x03\x5f\x50\x16\
\x36\xd9\xbb\xd4\x92\x0a\xdf\xad\x04\x04\xde\x81\x85\x11\xde\x85\
\xa8\x41\x61\xa5\x25\xc6\xb4\xe6\x34\xa6\x35\x94\x4c\x82\x96\x69\
\xce\x39\x52\xd8\x6b\x53\x40\x61\x42\x45\x19\xa1\xac\xa2\xe7\x42\
\xf1\x66\x53\x69\x22\xd5\x4f\xed\x52\xb9\x65\x8d\x65\x5d\x2f\xbd\
\xb9\x1a\x35\xd1\xae\x39\x52\x9e\x19\x52\x63\x26\x1e\x40\x81\x86\
\xa0\x9d\x8d\x45\xd3\x12\x98\xac\x39\x20\x9c\xca\x41\x15\xd7\x63\
\x7b\x0f\x9d\x32\x30\x14\x16\xbe\x79\x1b\xd4\xe7\xf4\x1d\xa8\x58\
\x00\x2e\x36\xe9\x42\x2c\x29\x1b\xd5\x4f\xf1\x08\x6f\x16\x27\x03\
\xca\x63\x02\xfe\xd5\x7c\xed\x4a\xe2\x31\x78\xbc\x3d\xf6\xcd\x17\
\x98\x1e\xdc\xe8\xb3\xa5\xac\xfe\x0b\x1a\xcb\x8e\x51\x20\xb8\x00\
\x9b\x56\xb3\xbf\x8a\x32\xc9\xe9\xf8\x6d\xf0\xfb\x56\x67\x04\x36\
\xc7\x82\x7f\x09\xfd\x2a\xfa\x99\x5d\xc8\x81\x63\xc8\x86\xc5\x9f\
\x99\xa3\x87\x8a\x8d\x56\x76\x19\x67\x99\x5a\x3f\xe9\xf5\x37\x63\
\x4d\x57\xd3\x2e\x55\xc9\xf8\x6d\xa5\x0a\xa9\x63\x94\x80\xaf\xbe\
\x50\x6e\x1b\xb8\x35\xda\x83\x62\x2d\x5b\x05\x71\x29\x46\x1f\x02\
\xd8\x54\x56\x19\x9a\xe0\xf2\x2a\x75\xa5\xff\x00\x87\xe2\x0d\x88\
\x69\x98\x5d\x62\x17\x00\xf3\x3c\xa9\x9c\x63\xcc\x72\x58\xe9\x11\
\x00\xf2\xba\x9f\xe2\x8f\x81\x9f\xff\x00\x1d\x25\xbf\xf7\x47\xe9\
\x47\xff\x00\x47\xe2\xf0\x39\xef\x7e\x74\xd8\x90\x0f\x88\xe9\xa9\
\xa4\x47\x47\x88\x7c\x98\x29\x9b\x63\x96\xc0\xf7\x35\xb6\x04\xa5\
\x40\x01\x54\x01\xca\xc2\x8a\x92\x9a\x0f\x2e\x9d\xb9\x1a\x60\x61\
\x60\x7a\xd4\x85\x7a\x91\x40\x0d\xe8\x85\x49\xd3\x58\xe0\xe6\x56\
\x1a\x3a\x65\xff\x00\x7f\x4a\xa2\x80\xc5\x12\x46\x0e\xca\x2f\xde\
\xae\x62\xce\x5c\x04\xff\x00\x89\x92\xca\x3b\xd5\x18\x24\x12\xc4\
\xac\xcc\xaa\xca\x2c\xe0\x9b\x5a\xab\xe9\x89\xca\xab\x8b\x81\x96\
\xca\x5e\xf9\x80\xd8\xd3\x4e\xf4\x99\xd1\xe6\x64\x31\xb0\x44\x4d\
\x9c\xf3\xee\x05\x1f\x88\x43\x85\x94\x7a\x8d\x84\x8b\xb1\xf7\xa2\
\x13\x2b\x4f\x0c\xd9\xb0\xc8\xda\x6d\x63\x59\x7b\x1b\x1d\xc5\x5f\
\xe1\xcc\x0e\x19\x97\x9a\x9b\xd6\xb8\xfa\x29\xe4\xd7\x03\x51\x5d\
\x5b\x02\xae\x35\xc2\xba\xa4\x1a\x92\x33\x44\xeb\xd5\x48\xa9\x22\
\xba\x2f\x58\x1d\x74\xa9\x32\x69\x8b\xa8\xf7\x4d\xbd\x8f\xf7\xa8\
\x98\x65\x91\x97\xa1\x22\xa0\xb8\x46\x88\x9d\x03\x12\xa7\xe7\x5c\
\xcb\x98\xe9\x60\x37\xae\xf4\x8b\x57\x35\xd5\x85\xc5\xb7\xa1\x26\
\xf5\x24\x31\xae\xd0\x2b\x33\x1b\x2a\x8b\x9a\x93\x50\x50\x3c\x61\
\x0e\xcc\xd7\x3e\xc0\x5f\xf8\xa8\x82\x19\xe1\x95\xb2\xa1\x60\x79\
\x06\xe7\x57\x70\xf8\x4c\xe8\x7c\x6d\x10\xfc\x36\xd4\xd0\x43\x85\
\x18\x84\x0e\x55\x54\x21\xf2\x5b\x4f\x95\x5b\xc3\xc8\xef\x9d\x25\
\x5b\x3c\x7c\xfa\x8a\x64\xfe\x8b\x5d\x0c\x71\x61\x80\x48\x97\x2c\
\x6f\xb1\x26\xe5\x5b\xdf\xbd\x17\xa5\xae\x46\xa0\xd4\x92\x2c\x41\
\x17\x07\x71\xd6\xa2\xc4\x28\x55\x90\x10\x3f\x1a\xdf\x4e\x97\xde\
\xb6\x19\xb8\xf8\x72\x62\x2c\xb7\x21\xc5\xc5\x0e\x59\xb0\xd2\xab\
\xb2\x15\x23\x51\x71\x57\xd2\x39\x3c\x7f\x1a\x56\x4b\x80\x42\x2a\
\xeb\x6a\x31\x7b\x65\x36\x20\xee\x0e\xd5\x9c\x3a\xcb\xc5\xe2\x13\
\xc4\x0e\xf1\x39\x32\x0b\xf9\x0d\xea\x22\x78\xe5\x04\xc6\x4e\x61\
\xba\xb0\xd4\x53\xf8\xb8\x5c\x2c\xb0\xcf\x1a\xdd\x42\x90\xca\x0e\
\xab\x7a\xcd\x77\x93\x11\x8e\x0f\x87\x52\xad\x6e\x7f\xa9\xac\x5b\
\x94\xc5\xfc\x1b\x14\xc4\xa9\x02\xe0\x9b\x69\x57\xa3\xf1\x33\x9b\
\xdc\x0e\xf5\x47\x0e\x59\x5d\x5a\x50\x8d\x62\x09\x68\xf9\x7c\xab\
\x51\xc7\x98\x90\x41\x5b\xe8\x45\x6f\x88\xa1\x19\xba\x8f\xa5\x12\
\xb6\xe0\x8a\x8a\xe5\xf5\x8f\x7a\xd0\x06\x28\x2c\xa5\x70\xcb\xa1\
\xbd\xd8\x81\xb5\x33\x0f\x14\x31\xae\x58\xc3\x2b\x1d\xc9\x35\x53\
\x0d\x37\x87\x8d\x71\x29\x23\x35\xc1\x35\x70\xd8\xe8\x75\xf6\x34\
\x4f\xea\x10\x72\x0d\x8e\xff\x00\xad\x0e\x89\xa0\x07\xc3\xfa\xe4\
\xfe\xd4\x19\x6e\xc7\x33\x1d\x0e\x95\x20\x10\x74\x66\xa5\x08\xf9\
\x40\x20\xdd\x08\xd0\x8d\xa8\x58\xd0\xc8\x59\x15\x98\x05\x2a\x7d\
\x63\xf7\x15\xce\x6c\x05\x8d\xc1\xd8\xf5\xa9\x21\xbd\x54\x27\x7f\
\x7a\xe6\x3a\x7b\xd0\xb1\x36\xd3\x71\x59\x4e\x34\x2d\x44\x35\x17\
\x1b\x50\xbd\x15\x16\xc7\x7a\x07\xda\x88\xef\x40\xf4\x20\x9a\x17\
\xa2\xa1\x6a\x9a\x03\x0a\x13\x47\x42\x45\x15\x06\xd5\xc4\x03\xa5\
\x49\xd0\x57\x0c\xdd\x85\x08\x20\x11\xa0\xd4\x54\x10\x49\xd6\xd6\
\xa9\x62\x46\xeb\xf4\xa3\x58\x99\x97\x35\xac\xb6\xf5\x1d\x05\x48\
\xa5\x60\xd5\x18\x86\x58\xb0\x92\xbb\x91\xe6\x52\xaa\x0f\x33\x4a\
\xc4\x62\x20\xc3\x7c\x42\x57\xe4\x14\xe9\xf5\xaa\x38\xa9\xe7\xc5\
\x38\x0e\x6f\xaf\x95\x40\xd2\xb3\x79\x61\xc1\xf0\x78\xd9\xf1\x7a\
\x72\x46\xb9\xe9\xa5\x68\xc2\x99\x21\x64\xb5\x8a\xc9\xaf\x7b\xed\
\x43\x81\x80\xe1\x70\xc5\x5a\xde\x24\x9e\xa1\xd0\x53\x88\x62\x33\
\xa2\xe6\x60\x2c\xcb\xf8\x87\xf2\x29\xe3\x32\x2b\x43\xe5\x65\xc8\
\xe2\xeb\x7f\x98\xf6\xa9\x80\x3b\x19\x22\x63\x9a\x48\x9a\xda\x6e\
\x45\x28\xca\x89\xe6\x6c\xe0\x77\x4b\x52\x10\xc9\x89\xc7\xb1\x52\
\x63\xce\x6e\x48\x3b\x0a\xb5\x62\xdf\x12\x2a\x20\x45\x76\xca\xea\
\x49\x23\xaa\xda\xc6\xff\x00\x2d\x7e\x55\x5f\x81\x4e\x23\x99\xb0\
\xf2\x1b\x24\x9a\x7b\x1e\x55\x6e\x18\xa0\x89\x83\x14\xce\x79\xb3\
\x9d\x4d\x63\x71\xbc\x46\x27\x09\xc5\xa5\x82\x19\x59\x62\x43\x74\
\x17\xe5\xca\x8e\x5d\x76\xa7\xf1\xbc\x64\xcb\x70\x07\x9a\xfc\xc6\
\xd4\x67\x58\xd5\x5c\x96\x56\x7b\x9b\x74\x03\x5f\xda\xab\xae\x35\
\x25\x8e\x27\x92\x36\x66\x96\x30\xc0\x82\x2f\xd3\x5f\xa5\x58\x52\
\xe0\x16\x75\xc8\x58\x65\x55\xe6\xa3\xbd\x74\x96\x56\x52\xba\x0a\
\xeb\xdb\x6d\xb9\x8a\x59\x6b\x30\x1c\x88\xa9\x27\x43\x52\xc3\x97\
\xda\x8a\xe1\x53\x33\x9b\x2f\x7a\xab\x2c\xa2\x30\x19\xda\xec\x46\
\x8b\xfc\xd5\x77\x9a\x49\x48\x32\x1d\x46\x80\x0d\x80\xa3\x62\xc3\
\xa6\x99\xa5\x92\xe3\x45\x1b\x0e\x94\x24\x86\x3e\x75\x56\x3d\xc5\
\x2c\x1a\x2b\xd1\xa4\x45\xae\x75\xa0\xc6\x10\x30\x4f\x7e\xa2\xde\
\xf4\x57\x19\x80\xe6\x6a\x9f\x13\xc4\x23\x48\xb0\xa9\x39\x57\x9f\
\x22\x6a\xb7\xa5\x17\x60\x72\xf8\x68\xdd\x8d\xc9\x1a\x9a\xbb\xc2\
\x5b\xef\x99\x09\xf5\x2e\xdd\x4d\x64\xf0\xb7\xf2\x3c\x6c\x74\x1a\
\x8b\x9a\xb9\x0c\x85\x5c\x3a\x5e\xe3\x9e\xd5\xae\x35\x56\xbe\xbc\
\xaa\x6c\x29\x65\x54\xd9\x94\xe8\xc2\xe0\xde\x89\x43\x03\x7c\xd5\
\xd1\x91\x8a\x9a\x81\xa8\xb8\xa9\xa9\x22\xbb\xbd\x71\xae\xa9\x28\
\xf1\x24\xcb\x89\x2d\xc9\xf5\x15\x5d\xd1\x64\x88\xc6\xe6\xc0\xea\
\x0f\x43\x5a\x18\xf4\xcf\x86\xcc\x37\x8f\x5f\x95\x67\xd6\x39\x4e\
\xcc\x20\x26\x36\x26\xf2\xdd\xd5\x76\xd6\xe0\xd3\xc3\x23\xb9\x0a\
\x0a\xb8\xdd\x1b\x7a\xe3\xa8\xf9\xd0\x63\x43\x18\x44\xab\xeb\x8d\
\xb7\xea\x2b\x3e\x11\xda\xf4\xd8\x21\x69\x65\x65\x03\x45\xb2\xdf\
\xa7\x33\xfb\x7d\x2a\x93\x62\xe7\xd1\x44\x4a\x19\xb4\x0d\x6a\xd9\
\xe1\xd1\x78\x58\x04\x51\xab\x90\x4b\x9d\xf5\xa7\x8f\x74\x51\x80\
\xaa\xa1\x14\x59\x57\x6a\xed\x06\x20\x72\x2f\x1f\xd6\xc6\xa4\x82\
\x37\x04\x50\x49\xa6\x2b\x0c\x7b\x91\x5d\x00\x8e\xd4\x35\x26\xd6\
\x24\x9b\x00\x2e\x4f\x4a\xa3\x89\xe2\x45\x5e\xd0\x22\x65\x1f\x13\
\x0b\x93\x45\xb2\x25\xda\xe1\xa9\xb5\x56\xe1\xf8\xdf\xb4\x4a\x21\
\x95\x50\x33\x7a\x59\x45\xb5\xe9\x6a\xb2\x18\x28\x2e\x4d\x82\x82\
\x6f\x54\xb2\xa5\x0e\x34\x7c\x44\x9d\x41\x16\x4b\x58\xf5\x02\x93\
\x1c\x6b\x04\x62\x34\xdc\x80\x59\xb9\x9a\x94\x39\x99\xcb\x5a\xc5\
\x49\x6b\xed\x40\xaf\xe2\x43\x1c\x82\xc7\x32\xeb\x6e\xda\x56\x3e\
\xeb\x46\x43\xa4\xab\xee\x2b\x46\x42\xb0\xe2\x80\xd5\x63\x92\xe0\
\xf4\x06\xa8\x60\x54\x3e\x32\x35\x61\x70\x5a\xaf\xe2\x54\xc9\x87\
\x71\xf1\x0f\x30\xd2\xb5\x3c\x14\x6c\x08\x36\x3c\xab\x85\xb5\x63\
\xb2\x8b\x9a\xa7\x1e\x2e\x61\x95\x06\x56\x3b\x00\x56\xe6\x9b\x8a\
\x13\xb4\x2a\xc5\x95\x90\x1b\xba\xa0\xb5\xbd\xe9\xd0\x08\xf0\xc6\
\x60\x66\x95\xca\x97\x24\x81\x6a\xb6\x00\x3e\x5b\x82\xca\x35\xbe\
\xf5\xce\x41\x20\x8f\x49\x1e\x5f\x6a\x16\x8d\x1e\x65\x94\xb3\x06\
\x51\x6f\x2f\x3a\xb1\x05\x1a\xc4\x8b\x12\x29\x9b\xeb\x41\x89\xb7\
\xa8\x69\x73\xad\x44\x4c\x72\xdb\x29\x20\x73\x15\x21\x5e\xc6\x84\
\x00\xb9\xd1\x76\x04\x38\x16\xd8\x1a\x92\x45\xed\x7d\x68\x26\x57\
\x62\xad\x1b\x5a\x44\xd0\x03\xb3\x0e\x95\x54\x82\x68\x49\xa5\x7d\
\xad\x09\x2a\xf0\xb0\x6b\xec\xa6\x8f\xc4\x87\xf1\x38\xee\xc8\x40\
\x15\x9d\x43\x24\x8b\xb5\xc1\x52\x06\x9d\x08\xff\x00\xcd\x09\x52\
\x76\xd6\x85\xf1\x18\x74\xff\x00\x98\x58\x8e\x40\x68\x7e\x75\x4a\
\x58\xf1\x0e\xc6\x61\xa6\x6d\x40\x07\x51\x55\xa7\x16\xa4\xb2\x30\
\x56\x04\xb3\x6c\xa3\x7a\x16\x23\x36\x42\xa5\x09\xf4\xdc\xdc\x37\
\xce\xab\x0c\x54\x58\x58\x8e\x70\x5e\x56\xe5\x7f\xd6\xb9\x38\x84\
\x33\x44\xe9\x24\x45\x34\x26\xe0\xdc\x0a\xce\xac\x3c\x8b\x1b\x1a\
\x83\x41\x0c\xa1\x80\x49\x18\x6b\xe8\x93\x93\x0a\x36\x04\x0b\xee\
\x3b\x6b\x49\x06\x80\x16\x3b\x28\xb9\xaa\xd8\x7c\x53\xc9\x89\xc9\
\x27\xa1\xcd\x80\xfc\x35\x65\xf2\xb4\x4e\xa4\xd8\x32\x91\x7e\x95\
\x99\x2e\x1e\x78\x4e\x7f\x84\x1f\x50\x3a\x56\x79\x69\x8d\x09\x06\
\x51\xdc\x1a\xe1\x72\x2e\x45\xa9\x31\xe3\x22\x68\xc3\x4a\xcd\x9c\
\x0b\x1b\x0d\xe9\xb0\x49\x2c\xaa\x1d\x63\x8e\x38\xfa\xb6\xa5\xa9\
\xd0\x0c\x7c\xc7\x0d\x84\x56\x50\x0b\xbb\x58\x5f\x5b\x0a\xc9\x96\
\x59\x24\xbe\x77\x2d\x7d\xee\x6b\x5b\x89\x2b\xbe\x04\x93\x87\x8d\
\xb2\x38\xb5\xa4\xeb\xde\xb2\x64\x31\x89\x04\x72\x2b\x42\xcd\xe9\
\xb9\xba\x9f\x9d\x73\xe7\xe9\x85\xac\x2d\x2c\x81\x62\x42\x49\xe5\
\x5b\x38\x2c\x32\x61\x63\x1b\x3c\xbc\xd8\x8d\xbd\xa9\x7c\x16\x33\
\x16\x1e\x49\x34\xcc\x5b\x28\xed\x4f\x7b\x8d\x47\x3a\x78\xcc\xed\
\x54\x9d\xf5\x3a\x9a\x9f\x6a\xe5\x51\x6d\x45\xfb\xd7\x59\xbb\x56\
\xc0\x96\x47\x3e\x5d\x5a\xdc\x9b\x51\x55\xe7\x88\x41\x8f\x8d\x97\
\x28\x0f\x6b\xad\xce\x9d\x45\x3e\x30\x55\xee\x0d\xef\xbf\xf3\x54\
\x4a\x48\xb8\xc0\xa4\x33\x30\x6d\xba\xd1\x4c\x5f\x58\x4a\xcd\x96\
\xf7\x0a\x4d\xcf\x40\x2b\x17\x8d\x83\x8f\xe3\xea\xb1\x82\x04\x81\
\x7e\x55\xa3\xc5\x78\xb6\x0e\x0c\xf0\xa5\xe4\x91\xbf\xcc\x03\x61\
\xda\xaa\xe1\x63\x68\x10\xe2\x26\x00\x62\x27\xd4\x0b\xea\x8a\x6b\
\x3c\xb2\xf4\xa2\xfe\x04\xab\x63\xce\x50\x32\xa4\x64\x2e\x9d\x06\
\xf5\x64\x58\xef\x54\x78\x41\xff\x00\x89\x6f\xfe\x36\xfd\x2a\xe0\
\x35\xae\x3e\x0a\x00\x33\x35\xce\xc3\x6e\xf4\x5e\x55\x46\x76\x1e\
\x55\x17\xb7\x53\x45\x90\xa9\x0a\x75\x3f\xad\x56\xe2\x32\x02\xc2\
\x25\x20\x85\xde\xdc\xcd\x37\xa4\x44\xae\xd2\x39\x66\x37\x26\xb8\
\x54\x0a\x90\x2b\x0d\x0d\x4d\xa9\x80\xd2\x41\xb5\x4e\x6a\x74\x60\
\xb1\x12\x98\x85\x94\x5d\xdc\x59\x47\xef\x54\x1b\x09\x89\x92\x75\
\x41\x13\x0d\x37\x23\x41\x57\xdc\xf9\xe6\x6b\xf9\xae\x32\xf5\x0b\
\x6d\x28\x92\x46\x0b\x94\xb1\xef\xad\x16\x6a\xd0\x88\xca\x20\x45\
\x4d\xb7\x6e\xb4\xf0\x75\xa0\x0d\x44\x0d\x6e\x05\xac\x1e\x27\xc3\
\x19\x1c\x5d\x39\x5b\x71\x5a\x09\x62\xb9\x94\xdd\x7a\xd6\x3a\xd3\
\x60\x95\xe2\x37\x46\x23\xb5\x6a\x51\x8d\x45\xd3\xdb\xf4\xa2\x1b\
\x8a\x46\x1f\x15\x1c\x9a\x3f\x91\xbf\x23\x4e\x22\xde\xd5\xb8\x04\
\x45\xe8\x48\xa2\x5d\xaa\x6d\x7a\x40\x2d\x7d\x0e\xaa\x74\x22\xb3\
\x71\x31\x18\xa5\x2b\xcb\x91\xeb\x5a\x85\x74\xa0\x91\x12\x44\xcb\
\x20\xb8\xe5\xd4\x56\x6c\xd3\x2b\x2f\x99\xed\x52\x85\xaf\x65\xd4\
\x9a\xb8\xd8\x38\x80\xb8\x77\xbd\xf4\xd2\x9d\x1c\x51\x45\xe8\x5d\
\x7f\x11\xde\x8f\xcd\x3a\xa7\x88\x85\xd2\x14\x91\xf5\x60\x79\xfc\
\x34\xcc\x2c\x4e\x22\x25\xdd\x95\x5b\x65\x06\xd7\xa7\xcc\x03\x64\
\x8d\xbe\x23\x98\x8e\xc2\x86\x47\x24\xf7\xab\x02\x02\x5b\x44\x96\
\x45\xf9\xde\x97\x1c\x32\x7d\xa5\x64\x92\x5c\xc1\x75\xbf\x3a\x2c\
\xda\x5c\x9a\x25\x34\xa2\xf8\xab\x95\xc0\x1b\x7f\xcc\x6b\x56\x39\
\x17\xda\xb6\xb1\xe9\xe2\xe0\x1d\x46\xe9\xe7\x15\x8c\x6b\x1c\xfd\
\x31\x38\x42\xcb\x8a\x8d\x94\x90\x43\x8d\x6b\x6b\x88\x9c\xb0\xcd\
\x61\xb9\xcb\xf9\xd6\x18\x62\x08\x23\x71\x5b\x78\xe1\x9f\x08\xc4\
\x10\x6e\xaa\xd7\xbe\xfa\x6b\xf9\xd5\xc7\xca\xaf\xac\xb2\x85\xe0\
\x96\x30\x45\xd9\x0d\xbf\x5f\xda\x87\x0e\x99\x70\x71\x03\xcc\x16\
\x16\xe8\x69\xd1\x30\x52\x58\xe8\xa1\x4d\xc9\xe5\xa5\x09\x37\xca\
\xb6\xb1\x08\x05\xbe\x55\x13\x78\x71\xb6\x36\x22\xd6\xdf\x7a\xd2\
\xbe\x53\x73\xcb\x7a\xc9\x43\x96\x45\x27\x91\xb9\xad\x49\x08\x63\
\x9b\x93\x6b\x5a\xe2\x2a\xa3\xdb\x0d\x8f\x59\x00\x39\x0b\x66\x1e\
\xd5\x61\xb1\x10\xa4\x6c\x43\x86\x26\xf6\x51\xce\xf4\x18\xb4\xcd\
\x11\x1b\x91\xaa\xf5\x15\x45\x2d\x9d\x73\x6d\x7e\x74\x78\x9a\x58\
\x5c\xdf\x64\x8f\x36\xfa\xdb\xda\x8a\xe6\xf6\x16\xbf\x7e\x54\x33\
\x92\x24\xb2\xb5\x94\xed\xd8\x57\x0b\x28\xb0\xf9\xd6\x82\x58\x66\
\xf5\x12\x7d\xb4\xa9\x16\x0b\x61\x43\x9a\xa4\x1b\xd4\x9c\xe7\x63\
\xd0\xd0\xb0\x37\xbd\x49\x60\x7c\xa3\x73\xa5\x11\xb0\x1a\x9b\x7b\
\xd4\x80\x59\xaf\x7d\x01\xeb\x6d\x7e\xb5\x05\xd8\x0f\x33\x5c\x73\
\x07\x51\x52\x59\x3a\x8e\xe7\x61\x49\x9f\x11\x85\x45\x39\xde\x4d\
\xb4\xfb\xb2\x2f\xf3\xa1\x2a\x71\x18\xd5\x66\x53\x10\xb8\x93\x60\
\x39\x1a\x64\xb8\x94\x8a\x20\xcb\xe6\x90\x2d\x89\xbf\x95\x2c\x39\
\x9a\xab\x89\xc6\x99\x2f\x1a\xa6\x44\xe5\x66\xd4\xfc\xeb\x37\x1d\
\x02\x2a\xc7\xe1\x33\x2a\xc8\xd6\x2a\x5a\xff\x00\x3a\xe7\x79\x67\
\x8d\x48\x63\xa9\x99\xcb\xa4\x89\x2b\x1d\xf2\x9b\xd0\xba\xb4\x78\
\x69\x8b\xa9\x19\x93\x28\xb8\xb5\xc9\xa3\xc9\x0c\x0f\x91\x20\x43\
\x94\xd8\xb1\xbd\xcd\x41\x2a\xba\xac\x44\x13\xb1\x72\x5b\xe9\x7a\
\xcb\x48\xe1\xe2\x4c\x3c\x19\x27\x70\x50\x9b\x15\xdc\xa5\xf6\x35\
\x77\xc3\xc4\x61\xe3\xf1\x95\xac\x2f\x6b\x75\xaa\x31\x0f\x10\xbc\
\x6d\x7f\x3a\x9b\x9b\xfc\xe9\xff\x00\xe1\xe9\xc6\x30\x0c\x3c\xcd\
\xe6\x88\x5d\x6e\x7d\x42\xa9\xfc\x15\x78\xd9\x91\x58\x8b\x07\x50\
\x48\xe9\x42\x9e\x47\x22\xe3\x5e\xb4\xd9\x83\x03\x66\x16\x3d\x29\
\x6c\x9d\xf5\xae\x81\x5e\x7c\x28\x7c\x75\x94\x05\x52\xa1\x9b\xb5\
\x58\x36\xb0\x55\x16\x55\x16\x15\x28\xa0\x48\x0d\xcd\xda\x36\xbf\
\xc8\x8b\x7e\xf5\xce\xcb\x04\x46\x69\x6c\x00\xf4\x83\xf1\x1a\x27\
\x49\x5b\x8c\xb8\x8f\x06\x20\xbf\x9a\x43\x98\x8e\xd5\x9b\x2c\x26\
\x60\x98\x7b\xfa\x48\x77\x62\x3d\x3d\x05\x14\xaf\x26\x27\x11\x99\
\xdb\xcc\xc6\xde\xd5\xab\x81\x81\x53\x0e\x31\x0e\xbe\x77\x76\x65\
\x53\xf9\x13\xf2\xae\x7f\xed\x4f\x82\x0a\x23\x88\x26\x97\x3e\x66\
\xb5\x75\xaf\x44\x41\x2d\x73\xa9\x34\x4a\x2d\xef\x5d\x70\x01\x12\
\xc6\xf7\xd3\xa5\x11\x14\x56\xae\xa9\x04\x21\x24\x00\x09\x27\xa5\
\x54\xe3\x73\x5b\x0c\xeb\x86\x65\x33\xc6\x9f\x78\xc3\x9a\xf4\x1e\
\xd5\x67\x1f\x2b\x41\x81\x2c\x9a\x34\x8d\x92\xfd\x05\xab\x37\x05\
\x1f\x8d\x21\x42\x33\x07\xd1\x87\x51\xbf\xec\x07\xce\xb3\xca\xfc\
\x30\x8f\xf0\xde\x03\xc5\xc5\x7d\xa7\x14\xb6\x46\x6f\x28\x27\x57\
\x35\xb4\xf1\x45\x33\xf9\xb0\xeb\x72\x7e\x0d\x0d\x09\xc2\xb7\x88\
\xae\x31\x08\x85\x3d\x2a\xaa\x6c\xbd\xab\xa6\x4c\x63\xbe\x51\x2a\
\x88\xce\xec\xa2\xc2\x8e\x33\xf3\x33\x15\xba\xad\x12\x18\xf8\xa0\
\x8e\x27\x06\xcf\x60\x6a\xe3\x6a\xc7\x2e\xd7\xd2\x97\x1e\x12\x34\
\x90\x3c\x53\xb2\xba\x9b\xdd\xd7\x4f\xca\x98\xd2\xb4\x6c\x7c\x40\
\x8e\x83\x76\x45\xca\x57\xe5\x4c\xe8\x16\x71\x1e\x1e\x0d\x2d\x6f\
\x13\x2d\x87\x61\xfc\xd5\x46\x94\xa6\x2e\x38\x40\x41\x9c\x02\xcc\
\xc2\xfb\xd5\x63\xc4\x30\xb8\x62\x13\x19\x88\x46\x3b\xd9\x63\x24\
\x8d\x7a\xd0\xe3\xf1\xb8\x39\x22\x47\x11\x4a\x1d\x96\xe1\xae\x34\
\x1c\x85\x66\xf2\xff\x00\xad\x62\xfc\x8c\x06\x29\x60\x70\x2e\xe2\
\xea\x42\x80\x54\xf7\xa0\xbd\x67\xe0\xb1\xf8\x48\xdb\x34\x89\x2b\
\x39\xd0\x35\xc6\x95\x64\x62\xb8\x7b\x35\x97\x18\xc2\xfb\x66\x8c\
\x80\x3d\xcd\x13\x94\xa8\xeb\xd7\x5e\x83\x36\x1f\xff\x00\xe7\x61\
\xbf\xef\xfe\xd4\x48\x82\x40\x7c\x19\xe2\x97\x5d\x91\xbf\x9a\x50\
\xc3\x31\x65\xcb\x6c\xc0\x11\xae\xcc\x3a\x54\xab\x44\xf6\x2b\x2a\
\xad\xfe\x16\xdc\x52\xb5\x56\x01\xb4\x20\xd3\x16\x57\x1a\x83\xaf\
\x5b\x6b\xf5\xa8\x05\xf1\x29\x19\xb2\xc3\x23\x0e\x6c\x45\x85\x58\
\x56\x0c\xa1\xd0\x92\xad\xb1\xa5\x09\x5f\xf1\x93\xf3\xa9\x8e\xc0\
\xdd\x0e\x42\x4e\xb6\xd5\x4f\xb8\xa6\x2a\x78\x34\x6a\x69\x2a\x7c\
\xd9\x48\xca\xc7\x61\x7b\x86\xf6\x34\x52\xc8\x21\x8f\x3c\x8a\xc7\
\x5b\x58\x69\x5a\xd0\x70\x37\xab\x38\x4c\x51\x8c\x64\x71\x99\x7d\
\xf5\x15\x4e\x09\x23\x99\x43\x44\xc2\xfc\xd5\x8e\xa2\x9b\x91\xaf\
\xcb\x5e\x57\xa6\x54\xd4\x56\x05\x43\xa1\xba\x9a\x35\x70\x45\x67\
\x60\x24\x29\x88\x08\x76\x6d\x08\xab\xbb\x1b\x8a\xdc\xac\xd8\x63\
\x1d\x28\x76\xa8\x56\xbd\x71\x34\xa0\xb9\x6c\xe0\x1f\x4d\xea\x24\
\x9a\x14\x62\xae\xe7\x30\xdc\x01\x45\x7b\x29\xbf\x4a\xce\xc2\xc6\
\x65\x66\x67\xf3\x2a\xeb\x6e\x67\xde\x8b\x52\xea\x31\x75\x32\x95\
\xb1\x7b\x65\xec\x05\x2d\x8d\xc9\x00\xd4\xa1\x24\x90\x4f\xf6\xa0\
\xcc\x10\x1b\xee\x0d\x15\x25\x88\x28\x00\x3b\xd1\x03\xad\x21\x49\
\xce\x0f\x7a\x60\x27\xa5\x11\x2c\x42\xc2\xfa\xec\x74\x3e\xc6\xb1\
\x31\x89\xe1\x62\x5e\x3f\xc2\x48\xad\x68\xc9\x3e\xe4\xda\xb2\xf8\
\xc3\x2b\x71\x19\x8a\x9b\x8c\xd4\x72\xf0\xf1\xf5\x5e\xf5\x73\x07\
\xc4\x0c\x71\x78\x32\xa0\x74\xd8\x1e\x6a\x2a\x8d\xcd\x1e\x1f\x0f\
\x34\xe6\xd1\xa5\xc7\x53\xb0\xac\x4b\x67\x8d\x55\x9c\xd0\xb3\x08\
\xcb\xe4\xcb\x62\xca\xfa\x66\x27\x61\x7e\x95\x0e\xcc\x65\x21\xd4\
\x8b\xeb\x73\xce\x87\x89\x70\xb7\x91\x51\xc4\xd1\x99\x32\x65\x65\
\x23\x47\xb6\xc2\xfc\xa9\x38\x61\xc4\x70\xa9\x67\xc2\x46\xd1\x74\
\x12\xde\xfe\xd7\xa7\x6e\xf6\x16\x01\xab\xfc\x3e\x6c\xf1\xf8\x2d\
\x6b\xaf\xa3\xbf\x6a\xce\x8f\x11\x85\x90\xd9\xcb\xe1\xdf\xa3\xa9\
\xb5\x19\x93\x0c\x84\x13\x8c\x86\xfc\x80\x7b\x7e\xb6\xad\x4a\x9a\
\xc0\xd8\x82\x37\x06\x96\xf8\x6c\x33\x36\x6c\xac\xb7\x37\x21\x4e\
\x87\xf8\xa4\x47\xc4\xb0\xf2\x58\x11\x63\xd4\x87\xf3\x7b\x1b\x5a\
\x8f\xed\x98\x5b\x5e\xed\xed\x63\x7f\xa5\xab\x5b\x19\xca\x73\xaa\
\x93\xd3\x4b\x6f\x51\xcb\xb8\xa5\x2e\x26\x16\xbd\x96\x40\x06\xec\
\x08\x36\xa1\x97\x13\x02\x59\xbc\x4c\xf7\xdc\x2e\xf5\x6c\x47\x0a\
\x25\x22\x90\xb3\xc6\xe2\xe8\x49\x1c\xef\xb8\xa3\x92\x68\x61\x8b\
\xc4\x95\xec\x0e\xca\x06\xa6\xad\x46\xb1\x5f\x9f\x6a\x8b\x80\x6f\
\xcf\xa9\xac\xe9\xb8\xab\xe6\xfb\x98\xa3\x55\xfe\xa1\x72\x6b\x93\
\x8a\x3d\x8f\x89\x04\x6f\xd2\xde\x5f\xd2\x8f\xd4\x39\x57\xa5\x62\
\xaf\x98\x12\x3b\x8a\x57\x15\x7f\x17\x86\xc8\x49\xb9\x0e\xb9\x6f\
\xb8\xaa\xe7\x89\x86\x5b\x7d\x95\x07\xfd\x66\x93\x8a\x9e\x4c\x51\
\x16\x50\xaa\xa3\xd2\x0e\x9e\xf4\x5e\x53\xe2\x92\xaa\x85\x2c\x40\
\x1b\xdc\x55\x9c\x16\x08\xe2\xe4\x13\xb0\x0b\x1c\x77\x11\xe6\x1a\
\x7b\xf7\xa8\xc2\x44\xcf\x32\xf8\x56\x77\xbe\x83\x5b\x03\xdc\xed\
\x5a\xcf\x00\x8e\x34\x89\x1a\xe2\x35\x03\xdc\xf3\x35\x9e\x3c\x74\
\xda\x4a\x43\x87\x87\x58\xe2\x52\xc3\xe3\x71\x72\x6b\x9a\x42\x56\
\xed\x1a\xb2\x5b\x62\xa2\x89\xa3\x76\xdc\xfd\x68\x5a\x0d\x34\x6a\
\xdb\x2c\xfc\x7e\x13\xec\xee\x31\x10\x8c\xd1\x13\xa8\xfc\x3d\xab\
\x23\xec\xf3\x41\xc4\x55\xa1\x52\x54\xb5\xd5\x80\xe5\x5e\x9d\x15\
\xd4\xda\xde\x52\x3c\xc1\xb6\x34\xac\x46\x02\x27\xb9\x86\x66\x86\
\xff\x00\x0e\xb9\x6b\x1c\xb8\x6f\x8d\x4e\x4c\xd6\xc6\xe2\x61\x93\
\x2a\x38\x65\x04\x80\x18\x66\x00\x51\xe2\x78\xba\x43\x87\x0e\xf8\
\x54\x66\x7f\x48\x0c\x45\xfb\xd5\x96\xe1\x98\x60\xb6\x33\x4a\x48\
\xd6\xe1\x45\xa9\x27\x85\xc5\x36\x30\x93\x3a\xe5\x85\x00\x0a\xea\
\x4d\x8f\x5f\xad\x59\xc9\x6c\x26\x2e\x29\x24\x90\x1c\x40\x82\x38\
\xcb\x9c\xaa\x2e\x58\x58\x55\x77\x79\x31\x58\x90\x65\x62\xd9\x88\
\xf9\x55\xfc\x47\x0f\xf0\xd0\x49\xf6\x88\x88\x26\xcd\xf7\x66\xdd\
\xb4\xe4\x7b\xd4\xe0\x61\xc8\x59\xe0\x88\xc8\xdc\x98\x0d\x17\xeb\
\x46\x5f\x29\xd8\x8c\x06\x10\x78\x4c\xf2\xbe\x53\xb0\x55\xf8\x3f\
\xbd\x59\xb0\x0a\x15\x6f\x65\x16\xd6\x8e\x08\x7c\x28\x8a\x12\x0b\
\x33\x5d\x88\xda\x88\xad\x6a\x4c\x66\xd2\x80\xa9\x02\x8e\xd5\xda\
\x52\xb4\x36\xee\x2a\x00\xd6\x8c\x81\x5c\x05\x43\x55\xb8\xc2\x16\
\xe1\xaa\x40\xf4\x49\x73\xf3\x15\x4b\x84\x3a\xc7\x8c\x57\x7f\x40\
\x23\x31\xb6\xdc\xbf\x5b\x56\x9c\xf2\xc5\x10\x31\x3a\x97\x2e\x2c\
\xca\x0e\xd5\x56\x6e\x1b\x30\xb1\x8c\x2b\x20\x20\xf8\x69\xea\x3d\
\x2f\x59\xb3\xbd\x6a\x78\xb5\x20\x21\xf2\x8d\xaf\xa3\x6e\x0f\xb5\
\x09\x17\xde\xe6\xb3\x44\xb8\xcc\x2c\x8c\x4a\xba\xdc\xea\xac\x34\
\x26\xad\xc5\xc4\x70\xf2\x1f\xbd\x8d\xa3\x3d\x57\x51\xf4\xa6\x72\
\x8b\x0e\x39\x86\xda\xfb\xf2\xae\xd3\x0f\x19\x92\x43\x61\x63\xbf\
\xc5\xda\xa7\xc6\xc3\x88\x5a\x54\xc4\x26\xd6\x01\xae\x0d\xea\xa6\
\x27\xec\xeb\x08\x92\x4c\x6a\x34\xcc\x2e\x11\x8f\x3a\xad\x4a\x32\
\xf0\xfc\x1c\xb3\x45\x23\x46\xf2\x85\x16\x32\x33\xd8\xad\xba\x8e\
\x74\xc9\xdb\x0e\xcf\x98\x60\xa0\x37\x3a\x0c\xa6\xf6\xfa\xd0\x1b\
\xf2\xb1\xf9\xd0\xb6\xe7\xb5\x85\x73\x68\x77\xc3\x5b\x5c\x0e\x1c\
\x7f\xd1\xfd\xe9\x72\x61\xb8\x74\xb7\xfb\xb7\x89\x89\xff\x00\x96\
\xd7\xfc\x8d\x75\x45\x81\x7c\xdd\x3f\x3a\x2e\x22\xcf\x0b\x81\x89\
\x30\xe3\x2d\xa6\x8b\x22\x9b\xfd\x76\xaa\x78\xac\x3e\x27\x07\x20\
\x12\x29\x53\xf0\xb0\x3a\x1f\x63\x5a\x20\xda\x98\xcb\xf6\x9c\x1c\
\xb8\x63\xa9\xca\x5a\x3d\x79\x8f\xf7\xf9\x51\x78\x4f\x8b\x41\xc3\
\x71\x5f\x6b\xc3\xb4\x52\xff\x00\x9b\x1a\xdd\x5b\x9b\x0a\x76\x6a\
\xc4\xc1\x4e\x70\xf8\x94\x94\x7c\x27\x5e\xe2\xb7\x4a\xab\xc2\xb3\
\xc5\xac\x6d\xff\x00\xd7\xb1\xab\x8d\xd8\xab\x81\xa3\x53\x4b\x5d\
\xef\x44\x34\xad\xa3\x95\xc6\x5c\xac\x2e\x0e\xe2\x95\x89\x86\x49\
\x2d\xe1\xc8\xce\xa3\xe1\x66\xd4\x7b\x57\x5e\xa4\x1e\x86\xa1\x83\
\xc1\x26\x1e\x4c\x3e\x52\x80\xc8\xbe\xa1\xb3\x54\x4f\x86\x60\x33\
\xc2\xec\xd6\xd4\x83\xb8\xa2\x46\x25\xc3\x05\xbb\x01\x60\x46\xf5\
\x6f\x0b\x0b\xe6\x0f\x25\xd4\x74\xe7\x5a\x93\x55\xe8\x1c\x17\x14\
\xf9\xb3\x4f\xe6\x45\xd0\x13\xb8\xad\x65\x65\x65\xcc\xac\x18\x1e\
\x62\xb3\xe7\x84\x36\x28\x22\x79\x55\x85\xfb\x0a\x38\xe2\x9e\x07\
\xcd\x13\x09\x07\x30\x39\xfc\xab\x7c\x76\x33\x57\x74\xb5\x0b\xca\
\xaa\xf9\x0c\x8b\x7e\xe3\x6a\x1c\xd7\x50\xc0\x11\x7e\x47\x71\x5d\
\x98\x2c\xb7\x36\x02\x40\x06\x63\xc8\x8e\x47\xde\xb5\xa0\xac\x76\
\x20\x78\x5e\x1a\x4a\xad\x9b\x72\xbc\xa9\x1e\x22\xc4\xaa\x60\x91\
\xb3\x11\x67\xb8\xa9\xe2\x51\xa2\x15\x65\x00\x16\xbd\xc7\x2a\x3e\
\x1c\x88\x22\xf1\x48\x05\xb3\x5b\x5e\x55\x9f\xa5\x6a\x26\x49\x23\
\x0f\x1f\x31\xa8\xe9\x49\x9d\x6f\xa8\x1a\x93\x46\xde\x6f\x32\xd9\
\x5c\x6c\x6d\x6f\x91\xed\x49\xc6\x49\x29\x4f\x0d\x21\x65\x6f\x88\
\x8d\x40\x1d\xa9\xa1\x31\x80\x07\x23\x7a\x23\xf9\xf2\xa4\xe1\x6f\
\xe0\xa5\x81\xf2\x5c\x1f\xf7\xf3\xa6\xa1\x0c\xc4\xd1\x10\xd0\x88\
\xa3\x69\x0e\xbe\x1a\x96\xac\x29\x1b\x33\x96\x3b\x93\x7a\xd6\xe2\
\x72\x78\x7c\x39\xb6\xbc\x87\x27\xef\x55\xf8\x54\x28\xb0\xfd\xa1\
\xd4\x16\x63\x65\xbf\x2a\xcf\x2e\xee\x35\x3a\x80\xc1\x60\x4c\x8a\
\x24\x95\xb2\x2d\xf4\x5b\x6a\x6b\x44\x65\x54\x08\x8b\x95\x06\xc2\
\x96\xce\x49\xb9\x35\x19\xe9\x9d\x0b\x74\xc0\xd6\xda\xdf\x31\x43\
\x2a\x89\x2e\xca\xc5\x24\xfc\x43\x9f\xbd\x0e\x6d\x2a\x03\x54\x15\
\xe7\x52\x3c\x93\x46\x3b\x15\x39\x4f\xc8\x8a\x57\x85\x87\xfc\x33\
\x7f\xfe\xe6\xae\xbb\x23\xa6\x57\x19\x87\x6e\x55\x56\x48\x98\x5c\
\xa0\x2c\x3f\x31\x45\x87\x49\x38\x6c\x16\x6c\xc2\x19\x2f\xd7\xc5\
\x34\x66\x34\x90\x91\xe2\xba\x5f\x6b\xd8\x8f\x9d\xa8\x0b\xdb\x90\
\x34\x25\xbb\x0a\x3a\x21\x6f\xb4\x61\x58\x30\x24\x03\xb1\x1b\x1a\
\xbb\x82\xe2\x10\xc8\xbe\x1c\xa1\x22\x70\x3d\x45\x46\x53\x55\xc4\
\xc4\xc7\xe1\xbd\x99\x3a\x1a\x19\x70\x0e\xcb\x9f\x0f\xf7\x8b\xd3\
\x9d\x53\x67\x87\xff\x00\x56\xf1\x73\xc1\x14\x82\x68\xd9\x64\xcc\
\x9a\xaa\x6c\x75\xb5\x67\xcf\x3b\xca\xe5\xdc\xdd\x8d\x48\x56\x55\
\x55\x70\x54\x32\xb2\x6a\x39\xdc\x11\xfb\xd5\x76\x24\x12\x0e\x96\
\xa3\x95\xaa\x41\x86\xa2\x07\xa5\x29\x4e\xb4\x6b\x59\x26\x03\xf9\
\xd5\x84\x8c\xbc\x82\x30\xb9\xb2\x90\x15\x6d\xa3\x36\xe4\x9e\xb6\
\xa4\x44\x06\x60\x5b\xd2\x35\x3e\xd5\xab\xc2\xa3\x29\x84\x18\x89\
\x57\xef\x24\x62\xc9\x71\xa8\x07\x9d\x6f\x8c\xd1\x6a\xd4\x00\xe1\
\xe1\x11\x83\xe6\xb7\x9c\x8e\xbd\x2a\x43\x03\x4b\x1b\xd1\xa0\xd6\
\xba\x30\xe2\x0f\x4a\x13\xa1\xa6\x69\x7d\xea\x1b\x28\xd4\x91\xf2\
\xab\x11\x7a\x54\x15\xa6\x05\x0c\x2e\x2a\x4a\x91\x52\x21\x85\x01\
\x8b\xef\x3c\x48\xd8\x23\x9f\x50\x23\x46\xab\x05\x6f\x49\xc5\x32\
\xc7\x26\x42\xe7\x35\xb5\x55\x17\xb7\xbd\x18\x8a\x92\x39\xe5\x53\
\x13\xa2\xc4\x81\x86\x63\x9a\xf9\xbd\xa9\xb9\x54\x28\x44\x16\x45\
\xd8\x52\x92\x78\x90\xf9\xd2\x40\x4f\x33\x4d\x61\xa5\xc1\xb8\x3b\
\x1e\xb4\x17\x1b\x0a\x12\xdd\xab\xac\xc4\x5e\x84\x9a\x82\x49\xa8\
\x2d\x42\x4d\xe8\x6a\x38\x3c\xc2\x97\x8d\x96\x48\xa0\x46\x89\xca\
\xdd\x88\x36\xe7\x53\x7a\x5e\x30\xd8\x43\x21\x52\xd1\xa1\x39\x87\
\x7a\xaf\x8b\x15\x15\x99\x66\x56\x7b\x8d\x41\xd6\xb4\xa7\xbe\x72\
\x79\x31\xf2\x9e\xb7\xaa\x78\x89\x5b\x1b\x32\xaa\x29\x55\x1f\x11\
\xd6\xde\xf4\x79\x9f\x0a\xaa\x4b\x09\x62\x2d\xb1\x16\x3b\x72\xac\
\xc3\x56\x04\x8f\xcd\xb3\x0e\x8c\x2e\x3f\x3a\x07\xc3\x61\x5d\x73\
\x49\x87\x54\x1b\xdd\x5b\x2d\xff\x00\x9a\x4b\xe3\xa2\x0b\xf7\x71\
\x31\x6f\xeb\x3a\x55\x69\x25\x92\x46\x0e\xe4\x34\x92\x1b\x46\xa7\
\x6f\x7b\x74\xa6\xf2\x8b\x1d\x88\x8b\x0d\xe2\x59\x1e\x45\x54\x37\
\x16\x50\x48\xbe\xd7\xd7\x7a\xab\x26\x0b\x07\x23\x97\x79\xb1\x04\
\x9f\xe9\x5a\x6c\xac\x01\xc8\xa6\xea\x0e\xe7\xe2\x3c\xc9\xa5\xb3\
\xa8\x6c\xa4\xd8\xfe\x95\xce\xb4\xe8\xf0\xef\x31\xb4\x71\x13\xa6\
\xe2\x82\x58\x65\xc3\xf9\x64\x52\xa4\xeb\xad\x6b\xb8\x1a\xa2\x59\
\x63\x4f\xa0\xee\x6b\x3f\x1f\x30\x96\x50\x17\xd0\x82\xcb\x7f\xd6\
\xab\xc7\x04\xaa\xeb\x7d\xcd\x72\xe9\xa7\x4d\xaa\x6b\xbb\xf4\xa0\
\xba\x9d\x82\x36\xc5\xa0\x3b\x31\xca\x7e\x7a\x7e\xf4\xa0\x2f\xd7\
\xe9\x44\xa2\xcd\x71\x7d\x3a\x5b\xf9\xa6\x26\x3e\x21\x04\x78\x99\
\x10\x03\x65\x72\x07\xb5\x3b\x05\x8a\x9f\x0a\xf7\x8a\x4c\xb9\xbe\
\x13\xb1\xad\x2c\x46\x1f\x0b\x8b\x9c\xcd\x2f\x8b\x1b\x9f\x51\x40\
\x0e\x63\xd7\x5a\x98\xb0\xbc\x3e\x16\x0c\xb0\x34\xac\x07\xaa\x56\
\xd3\xe9\x58\x9c\x6c\xa7\x4a\x8b\x8b\x21\xb0\x9b\x08\x84\xfc\x45\
\x09\x1f\x96\xd4\xf4\xc6\xf0\xf7\x5b\xf8\xb2\xc6\x7a\x32\x66\xfd\
\x2a\x5a\x3c\x24\x82\xcf\x83\x8c\x5c\xef\x1d\xd4\xd4\x45\xc1\xe0\
\xc4\x4b\x9b\x0f\x23\x28\x06\xe5\x1f\x6f\x60\x6b\x73\xf4\x0f\x89\
\x23\x98\x06\x86\x65\x65\x3d\x74\x35\x6d\x30\xf8\x44\x8d\x5e\x59\
\x90\xdf\xab\x7f\x15\x9b\x8b\x86\x68\x9f\x2c\x91\x14\x03\x61\x6d\
\x00\xa4\xd3\xb9\xf0\x36\x86\x33\x05\x18\xca\xb3\x10\x07\x25\x8f\
\x7f\x9d\x70\xc7\xe0\xd5\x0c\x85\xa4\xb2\xf3\x22\xb2\xe0\xc3\x4b\
\x2a\x66\x58\xe4\x61\x7b\x79\x56\xac\x37\x09\x92\x58\x8c\x52\xc8\
\x89\x73\x71\xe6\xd4\x7b\x8a\xd6\xf2\xf9\x06\x45\xa8\x31\xf0\x29\
\x2d\x20\x92\xf2\x0b\xdf\xa0\xa7\x47\x8f\xc0\xb7\xfc\xe6\x07\xba\
\x55\x01\xc3\x8c\x91\xae\x7c\x42\x29\x5f\x21\xb0\x3a\xda\xa3\xff\
\x00\x4d\x60\x72\xa6\x22\x33\x61\xbe\xa2\xf5\x4b\xc9\x64\x6c\xa1\
\x59\x17\x34\x52\x2c\x83\xaa\x9b\xd7\x5f\x75\x22\xe0\xee\x0d\x62\
\x9c\x16\x36\x23\x9e\x3f\x31\xe6\x63\x6b\xfd\x69\x90\xf1\x4c\x44\
\x44\x47\x89\x84\x32\xed\x98\x8b\x1f\xad\x3f\xaf\xe8\xc6\x89\x86\
\x03\xae\x43\xa7\xf5\x69\x53\x9e\x35\x5c\xaa\x00\x1d\xa9\x11\x62\
\xb0\xf8\x96\xc8\xb2\x80\xc0\x5e\xcd\xa7\xe7\x4f\x58\xd9\x45\xd5\
\x2e\x3a\x8d\x69\x9f\xf1\x09\x1e\xe3\x45\x35\x21\x9f\x36\x6b\xdb\
\xe7\x4b\xcf\x6a\x8c\xf7\xab\x40\xda\x45\x8a\x47\x32\x12\x16\x42\
\x18\x10\x36\x36\xb5\xa8\x23\x99\x1a\x7b\x05\x74\xcd\xe9\xcd\xb3\
\x51\x26\x76\xf4\x82\x7e\x57\xa5\x62\xe4\x48\x94\xb4\xee\x3b\x28\
\x3a\xdf\x95\x45\x5f\xfc\x44\xf7\x58\xa0\xda\xc3\x31\xf7\x34\xdc\
\x3c\x80\x60\x60\x04\x58\xe4\xe5\xef\x59\x53\x62\xa6\x93\x16\x44\
\x90\xab\x48\x41\x2a\xf9\xb4\xb7\x5a\xd2\x0d\x96\x18\x45\xf3\x11\
\x18\xcd\xde\xb3\x2e\xdb\x4e\x74\x66\x75\xfc\x55\x19\xd6\xfa\x30\
\xa5\xdd\x4d\xed\x6a\x02\x7c\xd6\x14\xea\x58\xcd\x6a\xe7\x63\x94\
\xd8\xda\x96\x49\xa8\x6b\xe5\x34\xa3\x81\xb0\x16\xe9\x50\x5c\x82\
\x08\xde\x96\xad\x71\x53\x98\xdc\x5e\xa4\x99\x60\x8e\x7f\x31\x39\
\x1f\xa8\xd8\xfb\xd5\x3c\x42\x3c\x4d\x66\x5f\xa5\x5d\x06\xa5\xb2\
\xb2\xe5\x71\x99\x7a\x1a\x2c\xd4\xce\x07\x9d\x33\x0f\x88\x78\x5a\
\xe8\x74\xe6\x0e\xd5\x38\xac\x2b\x20\x2f\x11\x2c\xbc\xc7\x31\x55\
\x43\x9b\xd8\xd6\x3c\x6b\xd6\xbc\x58\xb5\x99\x4d\x88\x70\x47\x99\
\x1b\x5f\xa5\xe8\x13\x3d\xf2\xc3\xe1\x15\xfe\xa1\x62\x2b\x2d\x49\
\x49\x33\xa1\xb1\x15\x7b\x01\x8a\x59\x49\x0e\x32\xbf\x23\xc8\xd6\
\xa5\xd6\x71\x63\xec\xf1\x6a\x71\x11\xa3\x06\x3a\xc8\xba\x65\xfe\
\x28\x26\xe1\x8b\x7b\xc3\x25\xbf\xa5\xbf\x9a\xb1\x11\x21\xec\x47\
\xb8\xeb\x54\x9a\x79\x17\x34\x71\xb9\xc8\x09\xb7\xb5\x37\x14\xd0\
\x47\x10\x17\x59\x2c\x56\xfa\x80\x77\xfe\xd5\xae\x1b\x3c\x51\xb5\
\xad\x74\x1a\x74\xac\x85\x37\xad\x4c\x3b\xe7\xc2\xc6\xcb\xf0\x8c\
\xa6\x9e\x22\x9a\xa3\x5a\x25\xb9\x24\x03\x60\x37\x22\x97\x7b\x0a\
\x6c\x43\x2a\x81\x5a\x80\x59\x53\xf0\x0a\x90\x00\xf4\x80\x3d\xab\
\xaa\x6c\x6b\x49\x04\x03\xb8\x07\xdc\x54\x65\x53\xf0\x8f\xa5\x15\
\xab\x97\x46\x07\xa1\xa9\x05\x97\xc3\x06\x46\x19\x02\x8b\xde\xd6\
\xaa\xd8\x51\x6c\x32\xb1\xb8\x67\x62\x49\xe6\x69\x8b\x08\xf1\xe5\
\xf1\x49\x65\x46\xd0\x13\xa5\x11\x60\xcc\x06\x5b\x0e\x5c\xa8\x40\
\x7c\xb2\x0c\xb2\x8c\xc2\xd6\xb9\x1a\x8a\xa4\x9e\x3c\x38\xaf\xb3\
\x29\x04\x16\xb0\xb8\xd3\xde\xae\xb5\x81\xb7\x3e\x82\xab\xe3\xd5\
\xef\x0c\xab\xa1\x0d\x97\xde\xb3\x4c\x1b\x95\x0f\xa6\x5d\x09\x2b\
\x7e\x54\x82\xc4\x9b\x20\xbd\xb7\x35\x62\x45\x01\x99\x40\xd2\xfb\
\x52\x9b\xca\xa4\xa8\x1a\x50\x00\x7a\x8a\x8a\x30\xa0\x0d\x2e\x7b\
\xd0\xb0\x00\x5c\x9d\x2a\x3a\x12\x2b\x90\x90\x74\xf9\xd4\xfb\x6b\
\x50\xdb\x5b\xad\x45\x25\xcb\x02\xa2\xd6\xe7\x61\x6b\xd0\xe2\x23\
\x59\xa1\x11\xb3\xe5\xb3\x5c\x1b\x5e\xa4\xda\xdd\x85\x42\x92\x58\
\x00\x37\xa9\x13\x26\x0b\x0e\xb1\xb4\x9e\x24\x96\x51\xcc\x00\x0d\
\x50\x9c\x5b\x13\xf6\x8f\x13\x33\x0f\x4d\x86\x8b\xf5\xab\x5c\x52\
\x6c\xcf\xe0\xaf\xa5\x37\x23\x99\xaa\x65\xac\x6b\x9d\xcd\xe8\xc0\
\xe7\x62\x7d\x63\xfe\x91\x40\xcc\x11\xb4\xdc\x8d\x6f\x44\xc0\x36\
\xbb\x1e\x44\x0a\xe8\xa1\x69\xb1\x48\xa4\x8b\x31\x02\xb3\x49\xf3\
\x4f\x3c\xa9\x95\xa4\x25\x7f\x08\xa4\x53\x0d\x71\x53\xae\x97\xf9\
\xd3\xa8\xba\x90\x28\x97\x53\x63\xa1\xa2\x0a\x4e\x9c\xea\x40\xb5\
\x10\x5a\x24\x0d\x94\x92\x01\xb1\xd4\x73\xa3\xca\x4a\x82\x96\xf9\
\xd4\x8b\x17\xa9\xa6\x15\xe9\x50\x16\xac\x41\x15\xa9\x87\x4f\x0b\
\x0e\xa9\xf1\x37\x99\xbf\x6a\xa5\x84\x88\x49\x89\x44\xd8\x13\xad\
\x68\xb0\xbb\x93\xca\xf5\xbe\x30\x5a\xe5\x77\xcb\x96\xf7\x1d\x0e\
\xa3\xf3\xa8\x6c\x84\xdd\xa1\x88\x91\xcc\xad\x48\xf6\xae\x22\xf5\
\xa6\x50\x5d\xed\x60\xc4\x0e\x83\x4a\x53\x13\x7a\x32\x2d\x42\xd5\
\x17\x06\x3e\x21\x1f\x0c\x9a\x8e\xcd\xd2\x96\x58\xe7\x35\x33\x06\
\x31\xa2\x25\xb3\x3c\x82\xd7\xed\xad\x74\xa6\xf2\x16\x1d\x68\x42\
\x0c\x46\xa0\xda\x8f\xc5\x63\xeb\xb3\x8f\xea\x17\xa4\x13\x5d\x9a\
\xad\x4e\x97\x0b\x81\x94\xdc\xc1\x90\xf5\x8c\xd8\x0f\x95\x25\x70\
\x53\xc4\x6f\x85\xc7\x01\x6d\x81\x25\x69\xad\x73\xb3\x11\x50\xb9\
\xc1\xd4\x82\x28\xe9\x25\x5f\x8c\xaf\x34\x9b\xd8\x83\x4b\x9b\x1b\
\xc4\xd1\xb2\xb4\x45\x4f\x68\xe9\xd7\xa2\x59\x64\x51\x65\x91\x80\
\xe8\x1a\xac\xff\x00\xa9\x49\xa6\xe2\x73\xe8\x7c\x73\x7d\xbc\xa4\
\x52\x8e\x0f\x1c\xce\x41\x85\xef\xcc\x9a\xd1\x92\x57\x6f\x29\x76\
\x6f\x73\xb5\x43\x3d\x97\x56\x36\x14\x7e\x67\xf4\xea\xb4\x78\x27\
\x45\x47\x9d\xd0\x04\x24\x35\xbc\xc7\x29\xdf\x6d\xaa\xd6\x21\x46\
\x7f\x2e\xd6\xf2\xf7\x1c\xa9\x62\x51\x7d\x14\x91\xb1\xa8\x88\xe4\
\x2f\x16\x62\x46\x50\xe9\x7e\x5d\x45\x33\x27\x81\x39\x0d\xaf\x7b\
\x1a\x94\x52\x0d\xc9\xbd\x0e\x6a\x82\xd7\x39\x41\xf7\xa9\x19\x7a\
\xeb\xd2\x55\x9b\x35\x81\xd0\x75\xa3\x2c\xdf\x87\xe9\x56\xac\x1d\
\xec\x6f\xf5\xae\xbf\x2a\x58\x25\xb4\x0b\xda\x89\x4a\x33\x65\x59\
\x55\x98\x7c\x20\xeb\x56\xa1\x23\xdc\x5e\xf4\x79\xcd\x27\x55\x3b\
\x7b\x8a\x95\x6b\x8a\x75\x1a\x1a\xc6\xe3\x43\x49\xc5\x61\xd2\x62\
\x5d\x3c\xaf\xd3\x91\xa2\x26\xc2\xf5\x19\x88\x3b\x1a\x92\x83\x02\
\xad\x94\x82\x08\xe4\x6b\x81\xf3\x5c\x68\x6a\xf4\xea\xb3\x80\x18\
\x59\x86\xcd\x54\xa5\x56\x8e\x42\xae\xba\x8e\x87\x7a\xc5\x8d\x2e\
\xe0\xb1\x6c\x06\x49\x2e\xc3\x61\xd4\x55\xdc\x19\x5f\xb1\x21\x4c\
\xac\xba\x86\xd3\x9f\x7a\xc5\x8c\xd9\x89\xd7\x5a\xb9\x86\x9d\xe3\
\x7c\xc8\xd6\x63\xa1\xbe\xa1\xbb\x1f\xe6\x99\xc8\x58\xb9\x2e\x16\
\x27\x6c\xc8\xde\x19\x3c\xad\x71\x52\x15\xf0\x6a\x64\x57\x12\x27\
\xc4\xb6\xb5\x0f\xdb\x20\x24\x28\x56\x59\x0f\xfc\xb6\x3f\xa1\xe7\
\x51\x36\x22\x16\x56\x5b\x3d\xd9\x6d\x62\x2b\x5d\x33\xda\xe4\x6c\
\xae\xe0\xc7\x72\xac\x34\xa7\xa5\xf9\x8d\x6a\xa6\x15\x55\xb0\x28\
\x14\x58\xe5\x20\x1e\x60\xd3\xb8\x79\x90\xc0\xa5\xe5\x62\x0e\xc0\
\x80\x6b\x70\x2c\x2d\x10\x17\xa8\x19\xbf\x1c\x7f\xf6\x1f\xe6\x8c\
\x1b\x0f\x30\x00\x7e\x25\x3a\x7c\xfa\x56\x83\x80\xa9\x2b\x70\x45\
\x15\xba\xd4\xd8\xf4\xad\x05\x59\x62\x65\xfb\xc3\x72\xa0\x59\xc0\
\xdc\x8e\xbf\x2b\xd7\x10\xac\xb7\x52\x19\x7a\x8a\xb5\x1f\xac\x03\
\xb5\xf5\xaa\x78\x38\xb2\x89\x7f\xd6\x40\x17\xe5\x7a\xcd\x87\x5c\
\x14\x0d\x86\xf4\xa6\x2a\xd8\xd0\x83\xfe\x52\x93\xf3\xab\x25\x7b\
\x81\xef\xa5\x56\x96\x48\x53\x1b\xe2\x86\x04\x37\x95\xc8\x1a\x5c\
\xf3\xa2\x94\x30\xa0\x2b\x4d\x0f\x13\x1b\x2b\x8b\xf4\x3a\x57\x3a\
\xdb\x71\x59\xc4\x41\x45\x1b\x28\xa0\x74\x0c\x2d\x76\xfd\x69\xe4\
\x50\xb2\xd0\x88\xc8\x14\x69\x70\x6a\x08\xf3\x5c\x9b\xd3\x1a\x81\
\xad\x7a\x8c\x09\x1a\x5a\x82\x67\xf0\x30\xed\x27\xc4\x7c\xab\x4d\
\x51\x73\x6f\xf6\x2a\x87\x11\x97\xc5\x94\x85\xf4\x26\x8b\x47\x2b\
\x90\xab\x7b\xd0\x91\xda\x8a\xa2\xb9\x34\x1b\x0a\xe4\xcc\xac\x18\
\x1b\x11\xb1\xa2\xa9\xb5\x3a\x9c\x6c\x05\xe8\xe3\xb3\x0f\x28\x27\
\xf2\xa2\xcb\xe5\x3a\x5f\x4d\xa9\x9c\x3a\x13\x23\x14\xb5\x94\x6a\
\x5b\xa5\x32\x22\x63\xc2\xcf\x34\xb9\x51\x33\x13\xae\x9c\xaa\xf4\
\x3c\x3f\x28\x02\x59\x46\x9b\xe5\xd6\xaf\x22\x24\x71\x84\x41\x61\
\x6d\x7a\x9a\x12\x40\xd8\x5e\xb7\x38\xc6\x6f\x25\x77\xc0\x46\xc2\
\xf1\xcc\xca\xc3\xf1\x0d\xea\xab\xc0\xd0\x93\x1b\xea\x6f\xca\xb5\
\x14\xad\xaf\x7b\x54\xcb\x08\x9e\x1c\xbf\x16\xe8\x6d\xf9\x55\xf9\
\x83\x59\x16\xed\x5d\x96\xfc\xa9\xe5\x2c\x6c\x46\xb5\x19\x68\xc6\
\xb4\x7c\x2d\x7f\xe2\x4e\x9f\x01\xab\x65\x6d\xca\xf5\x5f\x87\x69\
\x8a\x02\xde\xa0\x54\x55\xb3\xbd\x33\xc6\x69\x2c\x2b\x81\xeb\x4d\
\x65\xa5\x95\xa4\x3a\xc2\xc4\xda\xf9\x45\xec\x39\xd5\x27\xc6\x81\
\x70\x60\x1f\xf7\x1a\xba\x2e\x0d\xc6\xf5\x9d\xc5\x54\x2e\x36\x50\
\x05\x80\x34\x72\xdc\x6a\x1b\x0b\xb6\x21\x84\xec\x02\xa4\x67\xc8\
\xa3\x99\xa9\x3b\xd4\xe1\xd1\x93\x09\x1a\xb0\x00\xea\x48\xf7\xa1\
\x7c\xdc\x80\xab\xe2\x76\xde\xd5\x07\xf2\xa1\x0c\xe4\x1b\x28\xa0\
\x05\x96\xf9\x86\x9d\xb9\x50\x70\xc9\x1e\x28\x80\x32\xbe\x5b\xec\
\x00\xb9\x35\x1e\x2c\x44\x5f\x39\x5e\xee\x84\x0a\x5c\xca\xb3\x2a\
\x82\xc1\x59\x4d\xd5\x88\xa9\x94\xcf\x24\x45\x24\x68\x54\x11\xa9\
\x04\xb7\xe5\x46\xa3\x58\xd8\x03\x7b\x83\xb1\x1b\x1a\x8b\xd2\x60\
\x41\x0a\x14\x59\x1d\xef\xca\xd6\x14\x46\xe0\xea\x08\xab\x56\x25\
\x9b\x29\xb8\x3b\xef\x52\x1e\xe3\x51\x4a\x66\xf3\x0b\xf2\xa6\x01\
\xa6\x62\x42\x8e\xa4\xd8\x55\xa9\x0c\xf4\xac\x54\xe6\x17\x8d\x86\
\xae\x01\x05\x4e\xd9\x7b\xd1\xea\x5f\x42\x51\x7f\x15\xac\xcd\xed\
\xd0\x54\xfd\xd2\x5e\xd1\xae\xbb\xe6\xd4\x9a\x12\x19\xae\x15\x97\
\x40\xe2\xe0\x57\x2f\x99\xb5\xa8\x6d\x40\xb0\xb2\x8d\x00\x14\x4a\
\x54\x0d\xef\x51\x12\xd9\x5b\x53\xbd\x49\x7f\xc3\xad\x05\xb3\x0b\
\x93\xf2\xa8\xd5\x45\xd7\x7e\x94\x83\x12\xeb\x20\x20\xea\x75\xa0\
\x91\x62\x86\xee\xca\x0c\x4e\x75\x36\xb9\x53\x5c\xa4\xde\xe7\x7f\
\xd2\x88\xe5\x74\x68\xdf\xd2\xfb\xf6\xa9\x07\x0d\x3f\x8d\x19\xd4\
\xe7\x8c\x6a\x4f\xc4\x3a\xd1\x0b\xdf\xcb\x54\x07\x8b\x83\xc4\x66\
\x2b\xdb\x5d\x88\xa6\xcf\x8d\x56\x8b\x2c\x28\xca\xcd\x6b\x92\x76\
\xf6\xac\xfe\xbf\xa9\x6a\xe6\xfa\xd1\x66\x14\xac\x3c\xab\x88\x4e\
\x62\x45\x1e\x60\x79\xf7\xa2\xb9\xad\x6a\x15\xea\x64\x55\x95\x32\
\x3f\xc8\xf4\xa0\xa9\x53\x6a\xb5\x62\xa4\x8a\xd1\x49\x95\xfe\x47\
\xad\x10\x6b\x55\xa9\x51\x66\x4c\x87\xd4\x3d\x26\xa8\x6a\xa4\x83\
\xc8\xd8\xd1\x61\x58\xba\xc9\x1e\x47\x06\xdc\x88\xdc\x51\x0f\x17\
\x25\x83\x24\xd6\xda\xfa\x30\xaa\xea\xda\xd3\x15\xaa\xd4\xb1\x0f\
\x15\x38\x7b\xc6\x20\x36\x3b\x86\x3b\x1a\xd2\xe1\xb8\xc8\x64\xc1\
\xc6\x58\x14\xb1\x23\xa8\x15\x98\x72\x3d\xbc\x6f\x0c\xdc\x69\x9b\
\x7f\x95\x16\x09\x44\x73\x4d\x02\x9b\xa6\x50\xc0\x74\x35\xa9\x6e\
\xb3\x63\xd0\xae\xa9\x98\x10\xc0\xf3\x14\x71\x9a\xc7\xc2\xce\xf0\
\xb5\xc3\x58\x74\x3b\x1a\xd3\xc2\xcc\x93\x26\x75\x20\x11\xea\x17\
\xda\xba\xca\xcd\x8b\x08\x0a\xd8\x25\xad\xf8\x4f\x2f\x63\xca\x8d\
\x5d\x49\xb1\xf2\xb0\xf8\x5b\x4a\x5a\x3a\xdf\x9f\xbd\x14\xae\xa6\
\xca\x40\x61\xdf\x5a\xdb\x26\x01\x67\x06\xd6\xd6\x90\xb1\xa1\x8d\
\x6c\x0e\xbd\x34\xbe\xb4\xc3\x1a\x15\x39\x4b\x2d\xc7\x23\xa7\xd2\
\x82\x72\x21\xc3\x31\x5d\x32\x2d\x85\x49\x9d\x8e\x25\xa5\x31\x86\
\xba\x2f\xe6\x6a\x9e\xd2\x18\xed\xe5\x7f\x2d\xaa\xc3\x1b\x9d\x69\
\x2f\xa1\xcd\xcc\x57\x1a\xdc\x00\x6c\xf1\xab\x5f\x5b\x59\xbd\xc6\
\x94\xc8\x71\x0f\x19\xb1\xf3\x2f\x43\x4b\x72\xa1\x09\x1b\x2b\xb5\
\xfe\x7a\x8f\xd6\x95\x09\x25\x0d\xce\xc6\x8d\x2d\x28\xa4\x49\x41\
\xc8\x6c\x7f\x09\xa2\x61\xae\xa2\xb3\x01\x21\xb4\x36\xa7\x47\x8a\
\x99\x16\xd7\x0c\x3f\xa8\x5e\x9d\x18\xb0\xe3\x5a\x0c\xa5\x8e\x9f\
\xf8\xa5\x49\x8e\xd3\xfc\x95\xbf\xb9\xaa\xd3\xe2\x65\x94\x59\x98\
\x01\xd1\x45\x85\x1b\x14\x94\xcc\x74\xea\x23\xf0\xa2\x37\x27\xd4\
\xc3\xf4\xaa\x4f\xb5\x1d\xa8\x5c\x69\x58\xb7\x5b\x2a\xa6\x8b\x2d\
\x48\x5a\xce\x20\x5b\x5a\x9a\x92\x2a\x42\xd3\x89\x66\x38\xc9\x60\
\x06\xf5\xaa\xb0\xac\x71\x08\x80\xbd\xbd\x47\xa9\xa4\xf0\xa8\xaf\
\x39\x73\xff\x00\x2c\x5e\xae\x85\xae\xdc\x63\x16\x91\x94\x07\x00\
\xb5\xc5\xa9\x80\x0e\xab\xf5\x14\x52\x45\x98\xdc\x1b\x1a\x01\x11\
\xce\x14\xd3\x80\x56\x1b\x58\x37\x60\x2a\x51\x32\xa8\x1f\xbd\x31\
\x50\x01\x60\x2a\x72\xd3\x89\x47\x89\xc3\xaf\x8e\xa3\xd5\xea\xec\
\x6a\x99\x15\xb5\x94\x10\x55\x85\xd5\x85\x8d\x66\xe2\xe1\x68\x64\
\xca\x57\x36\x6f\x4d\xb9\x8a\x2c\x31\x5d\x09\x57\x0c\xa7\x50\x6e\
\x2b\x47\x47\x41\x22\xec\xc2\xfe\xd5\x9e\x79\xf9\x0e\x9f\x95\x5a\
\xe1\xf2\x00\x3c\x26\x3e\x56\xd5\x49\x3b\x1a\x22\xa6\x91\x6a\x12\
\x2f\x4e\x28\x79\xd0\x32\xda\xac\x05\x65\x25\xac\x06\xa6\xab\x49\
\x0f\x8b\x8b\x92\x72\x2f\x18\x6b\x8b\xfc\x56\xab\x8c\x0e\x5d\x3d\
\x4f\x75\x5d\x3e\xa7\xe5\x42\xea\x2d\x94\x68\xa0\x58\x0e\xd4\x58\
\x62\xbc\x9a\xb1\x26\x94\xe2\xd4\xf9\x01\x51\xa8\xbd\xb9\x8e\x74\
\xa3\xe6\xd2\xd6\xa8\x93\x6d\x49\xa1\x61\x4d\x22\xd4\x2c\x2b\x28\
\x86\x16\x3d\x8d\x0d\xc5\xed\x4e\x65\x14\x21\x2d\xb5\x18\x74\x9c\
\x54\xb2\x46\xd1\xaa\x3f\x86\x8e\x35\x7b\x52\xb1\x38\x86\x81\xc0\
\x8f\x11\xe3\xa1\x1a\x83\xad\x5a\x9a\x21\x2c\x26\x36\x6c\xba\xdc\
\x35\xb6\xa4\xe0\xe0\xf0\x26\x3e\x2d\xb3\x7c\x04\xec\x68\xba\x81\
\x0c\x92\xcd\xac\x78\x5b\x13\xbb\x35\xec\x29\xc4\xfd\xe7\xa8\xb1\
\x03\x56\xfe\x3a\x53\x24\x67\x6d\x19\x8f\xb7\x2a\x5a\x7a\x6d\x51\
\x71\x37\x37\x22\xf5\xc2\xd6\xb5\x87\xd2\xa6\xd5\xd5\x27\x0e\xd4\
\x5a\xda\x86\xe0\x1d\x6a\x0b\x12\x6c\x34\xa9\x08\xd4\x57\x1a\xea\
\x93\xaa\x49\xbd\x09\x35\x17\xbd\x48\x77\x05\x72\xba\x86\x5e\x86\
\x96\x23\x8e\x07\xfb\x44\x6a\x4a\x8f\x52\x9d\x6c\x3a\x8a\x20\x6a\
\x64\x91\x61\x81\xa4\x61\x7c\xc3\x28\x5e\xb5\x22\x31\xb3\xc4\x65\
\x8a\x48\xdb\x33\x29\xf3\x10\x2d\xa5\x5a\x70\x58\xe7\x40\x4a\xb6\
\xa0\x8a\xcf\xc1\xe1\x9e\x6f\x31\x39\x50\x6e\xc6\x9f\x8a\x85\x70\
\xf0\xe7\x8e\x69\x03\x5f\x40\x4d\xaf\x44\xb7\xd4\xb1\x66\x02\xe5\
\x48\x1d\x6d\x51\x71\x55\x70\x12\x62\x1f\x14\xa3\x3b\x30\x3e\xab\
\x9b\x8b\x55\xa2\x2e\xf6\x51\x7d\x74\xa6\x76\x84\xa7\xa1\xaa\xdc\
\x4b\x49\xd5\xbf\x12\xeb\x56\x42\x30\x17\x20\x7d\x6a\xb7\x12\x20\
\xca\x88\x06\xa8\xba\x9a\xaf\x80\x9a\x34\x3a\x50\x81\x73\x47\x10\
\xf3\x00\x39\x6a\x68\x22\x96\x3f\x16\x48\x16\xf6\x24\x35\x58\xc0\
\x42\x60\x8d\xf3\x90\x5d\x8d\xbe\x54\x97\x76\x8e\x48\x1d\x23\x2e\
\xc0\x36\x80\x55\x9c\x39\x12\x47\x9d\x41\x52\x0d\x99\x4e\xe2\x99\
\xe8\xa6\xaf\xad\x47\x7a\xbb\xc3\x74\xc5\xa8\x03\x46\xd0\xd5\x35\
\xbe\x60\x40\x27\xda\xb4\xb8\x7c\x3e\x18\xf1\x18\xf9\x88\xb2\x8e\
\x95\xd7\x8f\xac\xd5\xc8\xb6\xa3\x74\x04\x5c\x0d\x4d\x25\xa4\xf0\
\xec\xc7\x61\xbd\x14\x18\x98\xa4\x75\x4d\x54\x93\xa5\xf9\xd7\x4d\
\x9e\x32\x72\x5c\x79\x4f\x2a\xaf\xc5\x0d\xa1\x09\xf8\x8d\xea\xd1\
\x03\x37\x7a\xa3\xc5\x9b\xef\x55\x7a\x2d\x5c\xba\x8a\x28\xb5\x2d\
\xbb\xd1\xb9\x16\xde\x96\xfb\x57\x26\xa1\x13\xa3\x11\x9e\x3d\x5c\
\x0d\x54\xec\xc2\xa3\x0f\x66\x80\xb0\x04\x10\xd6\x65\xe8\x68\xce\
\xfa\x53\x06\x1a\x56\x1f\x68\x44\xd6\xde\x60\x74\x12\x0f\xe6\x8c\
\x24\xf3\xa8\x6a\xb0\x70\x93\x11\x99\x56\xe0\xea\x35\xd6\x97\x26\
\x1e\x70\x0f\xdd\x9d\x2a\xca\x95\x9c\xd0\xd3\x1a\x19\x83\x10\x62\
\x6d\x3b\x50\x15\x60\x2e\x54\x81\xd4\x8a\xc3\x48\xae\x22\xbb\xf2\
\xfd\x68\x80\xf7\xff\x00\xb8\xd4\x83\x96\xb8\x8a\x62\x8f\xea\x3f\
\x3d\x6b\xb2\x83\xb9\x3f\x2d\x29\xc1\xa5\x65\xa9\x00\x02\x37\xb9\
\xda\x9b\x91\x4f\x23\xef\x7a\xe8\xe3\xc8\x73\x6e\x3f\x4a\xb1\x6b\
\x63\x86\xa6\x5c\x1e\x6b\x83\x98\xe8\x47\x41\x56\x00\xa4\x70\xf5\
\xb6\x15\x5a\x32\x10\x96\x3e\x5f\x84\xf6\x23\xf7\xa7\x86\x05\x73\
\x00\x45\x8d\x88\x3b\xa9\xe8\x6b\xb4\xf1\x87\x5a\xba\x50\x91\x47\
\xe2\xc8\xe1\x00\xd0\x5f\xe2\xed\x51\x34\xb1\xe1\xe2\xf1\x24\xd4\
\x9f\x4a\xf5\xac\xdc\x64\xef\x89\x6c\xd2\x01\x61\xb2\xf2\x15\x5b\
\x89\x6e\x4e\x25\x1a\xbd\xa2\x83\x30\x1c\xdc\xd0\x2f\x13\x7c\xb6\
\x78\x51\xbb\x8d\x3f\x4a\xa4\xaa\xc4\x90\x01\x6b\x7d\x68\x95\x24\
\xbf\xf9\x6f\x6e\xb9\x4d\xab\x3f\xaa\x72\x34\x62\xc6\xe1\xa4\xd1\
\xb3\x44\x7b\xea\x28\xe4\x48\xf1\x29\x95\x24\x46\xc9\xb1\x53\xad\
\xfa\x56\x77\x81\x27\xfe\xd3\xff\x00\xda\x68\x4c\x52\x03\x70\x8e\
\x08\xe6\x06\xd4\xea\xc3\x9a\x09\x6e\x54\xc6\xe6\xda\x6d\x40\x90\
\xc8\xad\x90\x8c\xa4\x6d\x98\x81\x7f\xad\x54\xc6\x89\xe4\x52\x22\
\x95\xc4\x8a\x2f\xe5\x3e\xa1\xfc\xd6\x6e\x20\x4a\x04\x71\xbb\x33\
\x3c\xc6\xc0\x9b\x9b\x0b\xf2\xae\x77\x96\x7c\x32\x3d\x5c\x0e\x1d\
\x0f\x88\xea\x19\x46\xa7\x91\xae\x70\x48\x3e\x12\x67\x3b\x66\x3a\
\x28\xaf\x3d\x04\x86\x2c\x6c\x62\x2b\x00\xac\x00\x36\xf5\x0b\xd7\
\xa2\xf1\xd6\x59\x99\x0e\x8e\x09\x00\x72\x35\xd3\x8f\x2d\x16\x61\
\x79\x4a\x82\x5d\x83\x3b\x68\x48\xd8\x0e\x82\x81\x85\x39\xc6\x96\
\xa4\xc8\x72\xa7\x7d\x85\x54\x16\xe5\x7a\x8a\x43\xaa\xf8\x83\x21\
\xf7\xa7\x20\x19\x4d\xc8\xd7\xf2\xa1\xb0\x58\xaf\xd7\x7a\xc9\x85\
\x3a\xeb\x4b\x6b\x03\x6e\x74\xf6\x04\x8e\x42\x96\xc8\x09\xd6\xf7\
\xeb\x51\x28\x8a\x82\xb4\xc6\xd3\x7f\xad\x0d\x18\x80\x45\x04\xca\
\x0e\x16\x55\x22\xf6\x52\x47\xbd\x3a\xb9\x00\x2c\x57\xf1\x02\x28\
\xc4\x54\xc3\xcc\x17\x9d\x85\xf5\xed\x50\x16\x89\x98\x30\x43\x6b\
\x12\xa2\xff\x00\x4a\x86\x20\x0b\x93\x41\x09\x1a\xd0\x9b\x57\x3c\
\x82\xfb\x50\x9b\x9d\x49\xb7\x61\x41\x71\x55\xbd\xcd\x4a\x82\x4d\
\x95\x6f\xec\x2a\x15\x03\x30\x5e\xa6\xab\x4f\x88\x91\xa4\xf0\x60\
\x2c\x17\x6d\x0e\xad\x45\xb8\x96\xe4\xc9\x18\xbc\xb2\x2a\x76\xbd\
\xcf\xd2\x85\xed\x94\x32\xe6\x21\x86\x84\xad\xa8\x70\xf0\x24\x3e\
\x67\x01\xe5\x3b\xdf\x50\xb4\x6e\x4b\x35\xc9\xb9\xa5\x16\x73\x54\
\x44\x18\x9b\x93\xa5\x18\x17\xf6\xfd\x68\xd5\x49\x36\x03\xd8\x54\
\x83\x6a\xaf\xc5\x3f\xcd\x45\xbe\xcb\xb7\x4a\xba\x88\xc4\x7d\xd2\
\xab\x9e\x6c\x7d\x2b\xfc\xd4\xa4\x10\xc6\xe5\xdb\xef\x64\x3b\xb3\
\x6d\xf2\x15\x66\x8d\x66\xc3\x2e\x22\x24\xb4\x6c\xca\xb4\xdc\x3e\
\x1e\x4c\x43\x78\xb3\x31\xc9\xd4\x9d\x4f\xb5\x68\x34\x84\xef\x6f\
\x6b\x50\xc8\xdb\x17\xb9\xcd\xa2\xa8\xe7\xfc\x55\xf9\x5a\x04\x50\
\x13\x24\x4a\x11\x79\xff\x00\x73\x48\x9a\x62\x5b\xc1\xc3\x0b\xb1\
\xd0\xb7\xf1\x56\x99\x54\xaf\xde\x6a\xa3\x52\xa3\x44\x1e\xfd\x69\
\x18\x63\x0c\x0c\xee\x54\xdd\xbd\x23\x98\x14\xd4\xe8\x50\x61\x22\
\x2e\xde\x67\x6f\xd7\xa5\x55\xb3\xbc\x85\xc9\xb9\x63\x73\x7a\x6c\
\xce\xf3\x49\x99\xb4\x1c\x80\xe5\x5c\xab\x61\xa5\x66\x90\xaa\x91\
\xfd\xaa\xce\x0f\x0c\xd2\xb5\x94\x58\x0d\xcf\x21\x4c\xc1\x60\xda\
\x4f\x3c\x9e\x54\xeb\xd6\xb4\x11\x00\x50\xaa\x2c\xa3\xf3\xad\x4e\
\x3f\x68\xb5\x38\x60\x21\x5c\xb1\x0b\x77\xb6\xa6\x8c\x24\x47\x16\
\x19\xa3\x5c\xd2\x21\xbf\x42\x46\xd5\x2a\xb6\xda\x83\x17\x13\xb2\
\x2c\x88\x7d\x1c\xbf\x7a\xe8\xc1\xe9\x1a\x81\x65\x40\xbe\xc2\x99\
\x15\x93\x36\x76\xb0\x1a\xde\x86\x09\x63\x68\x95\xd9\xc6\x6b\x6a\
\xa3\x7b\xfb\x54\x82\x64\x90\x31\x1a\x8f\x48\xe4\xbf\xde\xb4\x83\
\x8b\x49\x5c\x09\x32\xd9\x57\x40\xbc\xc7\xbd\x06\x10\x67\xc4\x22\
\xf5\x61\x57\xa3\xf2\x8b\x6f\x4b\x8a\x20\x98\xd2\xc0\x79\x42\xe6\
\x1d\xb9\x55\x78\xf6\x34\xf6\xca\x75\xb7\x3a\xa1\xc5\x54\x78\xe0\
\xda\xfe\x51\xbe\xb5\x74\x1a\xa5\xc5\x35\xc4\xfb\x28\xa7\x97\x8a\
\x2a\xb1\xd2\x90\x49\x00\x8e\x86\xd4\xd9\x0d\x28\x9b\xfc\xcd\xeb\
\x9b\x50\xec\x04\x41\x98\xc8\xc2\xea\xbb\x0e\xa6\x9f\x8a\x62\x30\
\xce\xc4\xea\xde\x51\x53\x87\x19\x30\x88\x3a\xdd\xab\xa4\xb1\x78\
\x94\x81\x60\x0b\xeb\xcc\xed\x5a\xf8\x91\x7c\xa0\x06\x16\xd0\x7e\
\x94\x12\x1e\x94\x52\x90\xd7\xcc\x6f\x7a\x51\x1d\xcd\x14\x27\x33\
\x6c\x09\xd3\xbd\x48\x91\x8e\xfb\x0e\x47\x5a\x0b\x80\x2c\x2b\x94\
\x92\x6c\x01\x26\x84\xe9\x62\x86\x4f\x52\x05\x27\xe2\x5a\xa7\x3c\
\x4d\x0b\xe5\x61\x70\x76\x23\x63\x57\xb4\x0c\x14\xba\x06\x3b\x02\
\xd4\x18\xb0\x87\x0c\x44\x92\x22\x95\xd5\x75\xbf\xe9\x45\x86\x29\
\x0b\x72\xa2\x50\x49\xb0\xd4\xd0\x23\xc1\xcf\x13\x17\xd4\xd0\x89\
\x96\x69\xd3\x0f\x09\x21\x5c\xf9\xda\xda\x9f\xed\x46\x93\x90\xa6\
\x6c\xab\x9a\x46\x1f\x0a\x0b\xfe\x74\xc1\x95\x45\xe4\x8a\x48\x87\
\xe2\x6b\x11\xf9\x54\x02\x02\xe5\x40\x15\x7a\x0a\x34\x62\xbb\x73\
\xe5\x4c\x4b\xdc\x39\xc3\x61\x32\xf3\x46\xbe\xfb\xde\x8f\x15\x30\
\x86\x45\x7b\x5c\xb2\x80\x41\x36\x04\xea\x45\xfe\x42\xa8\x60\x24\
\x11\x62\xb3\x92\x6c\xe3\x2b\x0e\xd4\x7f\xe2\x58\x25\x93\x04\x92\
\x46\x0b\x04\x7f\x35\xba\x01\x5a\xdf\xf1\x1f\x49\x96\x79\xb1\x38\
\x96\x58\xbc\xf2\xf3\x6b\xf9\x53\xda\xb4\x21\xc1\xb4\x08\x1b\x13\
\x31\x9d\x86\xad\x1d\xb6\xf9\xd5\x5f\xf0\x8a\x34\x65\xf3\xad\x89\
\xbb\x2d\xc6\xfa\x6f\x5a\xc4\x02\xc1\xb9\x8e\x74\xf1\x9b\x35\x5b\
\xf0\x11\x95\xb2\xac\x2a\xa8\x1b\x50\x40\xb1\xa6\x80\xe1\x6e\x25\
\x72\x07\x22\x69\x2a\x8f\x1b\x05\x6d\x81\x25\x1f\x71\x6e\x86\x9d\
\x1b\x07\x25\x43\xad\xf9\x81\xbd\x6e\x32\x3f\x12\x66\x5f\x29\x20\
\x1e\xad\x43\x92\x5b\x1b\xc8\x75\xdc\x5e\xf7\xa6\x0a\x25\x50\x6b\
\x58\x08\x58\x54\x91\xe2\x47\x1b\x01\xa8\xf2\xdb\xf4\xac\x2f\xf1\
\x06\x0d\x61\xe2\x8f\x2c\x60\xe5\xf0\x96\xc3\x90\x26\xbd\x13\x34\
\x48\x49\x79\x05\x97\x53\x6d\x4d\xab\x23\x12\xed\x89\x9d\x8b\xaf\
\xf9\xc7\xd3\xd0\x72\x15\xcf\x9c\x99\x8d\x4b\xdb\x1f\x83\x61\xdc\
\xce\x64\x91\x09\x8e\x3d\x4d\xfa\xf2\x15\xa4\xcd\x9c\x97\xd8\xdf\
\x5a\xb4\xf8\x6f\xb1\xc6\xb0\x0b\xdb\x72\x7a\x9a\x44\x82\xcf\xee\
\x2b\x19\x9d\x1d\xd1\xc1\x8a\x01\x42\x4d\xa8\x1b\x30\xde\x9a\xea\
\x08\xb8\xb3\x29\xd8\xf2\xaa\x4e\x35\xae\x8e\x57\x88\xdd\x0f\xb8\
\x3b\x1a\x75\x62\xd3\x28\xbd\xec\x3e\x94\x0f\x72\xc0\x5b\x63\x72\
\x68\xe3\x92\x39\x63\x2c\xba\x11\xea\x1d\x2a\x06\xe4\xf5\xab\x01\
\x4c\x2c\x68\x1c\x53\xa4\xda\x96\xda\x8a\x11\x46\x94\xc2\xcc\x7a\
\x1a\x75\x2e\x41\x53\x41\xa8\xcd\x91\x1d\xf9\xaa\x93\x5d\x6e\xe6\
\xa0\xd8\x5c\x1d\x98\x10\x6b\x28\xb4\x92\x03\x02\x31\x99\x14\x05\
\x02\xd7\xd4\x50\xab\xc3\x2d\xc4\x66\x59\x0f\x50\x9a\x55\x7f\xb1\
\x44\x8f\x98\xb9\x71\xf8\x6d\x6a\x79\x26\xc0\x1d\x00\xd9\x46\xc2\
\xb3\xb7\xe9\x10\x8a\xdf\x04\x97\xf6\x5f\xe6\xa3\x28\xd6\xe0\xff\
\x00\xd4\xc0\x7e\x97\xa1\x35\x16\xbd\x25\x33\x06\x28\x56\x29\x23\
\x42\x45\x8d\x81\xfd\x6a\xae\x12\x29\x60\xc7\x20\x65\x07\x36\xc4\
\x1d\x3d\xea\xd2\xad\xcd\x80\xd4\xd1\xaf\x99\x83\x28\xb8\x00\xaa\
\x77\xea\x68\xcd\xed\x00\xee\x40\xae\xb6\x94\x41\x2c\x2e\xcc\xa0\
\x75\x2c\x29\x89\x16\x72\x08\x05\x97\xae\x81\x4f\xcc\xef\x56\x22\
\xe3\x17\x51\xa5\x31\x14\x0d\x6c\x3e\x62\xa3\x10\x71\x10\xa0\x65\
\x48\x72\xed\xa1\xcd\x4c\xb1\x08\xb9\xc8\x2f\x6f\x35\x86\xd5\xa0\
\xe6\x62\xdb\x9d\xb6\xed\x42\x75\xa2\xae\xb7\x5d\x6a\x01\x02\xf4\
\x6b\x14\x9f\x6b\xf1\x33\x47\x95\x54\xaa\x0d\xcd\xbe\x55\x20\x00\
\xa5\x98\x85\x51\xb9\x35\x5f\x11\x89\xcd\x74\x8b\xca\x9c\xcf\x33\
\x55\xc8\x85\x8a\x91\x41\xca\xa7\x3b\x0e\xde\x51\xfe\xfe\x7e\xf5\
\x57\x29\x26\xe7\x73\x46\x05\xa9\x98\x78\x9e\x56\xb2\x0d\x39\x9e\
\x95\x9f\x5a\xf0\xb4\x42\x4e\x55\x04\x93\x57\xf0\xb8\x05\x5b\x34\
\xda\xb7\xe0\x1f\xbd\x3b\x0f\x1c\x70\x0b\x26\xad\xcd\xbf\x8a\x66\
\x6a\xdc\x98\xcd\xa9\xb5\xed\xca\xdc\x85\x12\x8a\x1b\x82\x2c\x6a\
\x56\xca\xb6\x17\xa5\x91\x8b\x5e\xd7\xa6\xa1\x51\xcc\x77\xaa\xac\
\x75\x00\x6e\x68\xe3\x00\x6e\x6f\x5a\x95\x1b\x68\xd4\xe5\x8d\x40\
\xbe\xe7\xad\x14\x5e\x49\x6d\x7b\xe9\x42\xac\x2d\x63\x4c\x52\x00\
\xd3\x4a\x50\xc3\x50\xcb\x23\x27\xde\x04\xcc\xa1\x6c\xc0\x1d\x46\
\xb5\x05\xa8\x59\xc8\x46\x23\x73\xe5\x1e\xe7\x4a\x75\x1d\x04\xa9\
\x2a\x07\x50\x40\x26\xda\xd6\x7e\x2e\x5f\x12\x77\x6e\xa6\xac\x4a\
\xe2\x0c\x29\x50\x76\x19\x57\xbf\x7a\xce\x26\xe3\x53\xf2\xac\xf2\
\xaa\x44\x3b\x5f\x40\x68\x4b\x54\x33\x5a\x82\x23\x9e\x74\x51\xcd\
\x85\x63\x5a\x6a\x31\xb2\xaa\xdf\x40\xa0\x7e\x54\x8c\x63\x95\x30\
\xba\x9d\x43\xe5\xa2\x99\xc6\x63\x6d\xaf\x49\x91\x81\x40\xcc\x2e\
\x23\x70\xde\xc3\x9d\x36\x88\x37\x6f\x39\xe9\x7a\x16\x7a\x17\xbe\
\x72\x07\xd6\x96\x59\x7f\x18\x62\x3e\x14\x37\x35\x23\x01\x19\x4b\
\x31\xca\xa3\x73\x55\x71\x18\xdb\xdd\x61\x05\x57\xaf\x33\x4d\x6f\
\x31\x0d\x20\x04\x8d\x97\x92\xff\x00\x26\x97\x26\x1e\x29\x1f\x35\
\xca\x75\x00\x69\x59\xbb\xf0\xac\x41\x1c\x22\x04\x26\x35\x72\xeb\
\x98\x96\xde\x8e\x3c\x3e\x1a\xe7\xc8\xc2\xea\x6f\xe6\xed\x42\x18\
\x58\x28\x16\x0a\x2c\x28\xe2\xdc\xff\x00\xa4\xfe\x95\xa4\xcc\x8b\
\x0f\x85\x51\xe8\x67\xff\x00\x51\xb5\xa9\xb0\xc5\x04\x73\x2c\xb1\
\x87\x05\x76\x17\xb8\xa0\x5a\x25\x22\xf5\x89\x84\xe5\x34\x57\xa5\
\xa9\xa2\x06\xb5\x00\x85\x68\x60\x1d\x71\x18\x76\xc3\x48\x75\xb7\
\x94\xed\x59\xe0\x12\x36\x34\x50\xb3\x23\x86\x06\xc4\x1a\xd4\xaa\
\x8e\x09\x64\xc3\x63\x41\x63\x66\x43\x95\x81\x35\xab\x81\x99\xb1\
\x31\x34\x99\x57\x2d\x81\x16\xf8\x4f\x30\x7d\xa9\x18\xec\x32\x63\
\xb0\xeb\x22\x0b\x4b\x6b\x82\x39\xf5\x15\x9d\x85\x69\x30\xf2\x96\
\x89\xca\xb6\xc7\xca\x35\xf7\xeb\x4e\xde\x35\x9f\x5b\xea\x48\xd8\
\x91\xed\x51\x1a\x83\x8b\x92\x4f\xc2\x32\xfb\x9b\x6a\x6a\x84\x3c\
\x4e\x41\x61\x2a\x44\xfa\xea\x40\x20\xd5\xdc\x06\x25\x31\x39\xc0\
\x39\x5f\x31\x60\x87\x72\x39\xfb\xd6\xe5\x94\x61\xf4\x9c\x74\xb2\
\x20\x54\x5f\x2a\xb6\xed\xd7\xb5\x3a\xab\xf1\x17\x55\x54\x56\xb1\
\x0a\x0b\x9d\x7f\x2a\x6f\x82\x3b\x8a\x4c\x90\x61\x7c\x08\xf7\x90\
\x0b\x91\xd2\x93\xc2\x21\xb1\xfb\x4b\xec\xa7\xc8\x2d\xb9\xeb\x54\
\x73\x34\x85\x09\xdd\xc5\xfe\x64\xff\x00\x7a\xda\x08\x22\x81\x63\
\x03\xd1\x61\xf3\xac\xce\xee\x9f\x11\x32\x78\xb1\x95\x36\xd7\x6b\
\xf2\x35\x97\x2a\x14\x25\x4f\xa8\x1d\x6b\x58\x9a\xa3\xc5\x96\xcc\
\xae\x3e\x21\xad\x5c\xe7\xd5\x14\x5f\x6a\x51\xfd\xe9\x8f\x4b\xae\
\x6d\x40\xe1\xdc\xa6\x2d\x6e\x6f\xae\xa3\xad\x5f\x7d\x18\xdb\xad\
\x67\x46\xb9\xa5\x04\xef\x9a\xb4\x64\xd6\x43\xef\x54\xf1\x52\xdf\
\x51\x4b\x90\xe9\x6a\x63\x02\x74\x1a\xd2\x64\x68\xc1\xb7\x88\x3e\
\x40\x90\x3e\x75\x00\xd0\xbe\xd4\x4b\x94\x93\x96\x45\x6b\x6e\x06\
\xe2\x84\x82\xc4\xdb\xff\x00\x15\x34\x59\xde\x85\xaa\x25\x95\x82\
\xe6\x8a\x02\xe8\x37\x73\xcf\xda\xa6\xe1\x91\x5c\x02\x03\x0b\xd8\
\xd6\x51\x4e\x35\xa8\x6a\x63\x2d\xec\x6f\x50\x10\x66\x1c\xfa\xd1\
\x87\x40\x14\x9a\x2c\xb6\xa6\x65\xa9\xcb\x56\x2d\x28\x28\x2a\xca\
\x4b\x2e\x61\x6b\x8d\xc5\x14\x71\xc6\xb1\xf8\x65\xe5\x75\xe8\x4e\
\x5b\x7d\x29\x81\x68\x82\xd3\x80\x0a\xa9\x18\xb4\x51\x2a\x9e\x46\
\xd7\x35\x2c\x5e\xd7\x73\x6e\x57\x34\xc8\x94\x5c\xb1\x04\xf6\x1c\
\xea\x70\xe2\x62\xec\xf3\x46\xca\x76\x4f\x29\x21\x7e\x94\xe2\x05\
\xac\xc1\x9d\x48\xb7\xa1\x5b\xf5\x3f\xc5\x47\x3a\x6b\x5b\x35\x83\
\x02\x77\xb6\xb4\x06\x94\x1d\x7a\x57\x3b\xc7\x12\xe7\x96\xe3\xf0\
\x8e\xb5\x2e\xc9\x14\x5e\x23\xeb\xf8\x47\x53\x54\x26\x91\xe5\x90\
\xbb\x1b\x9f\xd2\xb3\x6e\x24\xe2\x71\x0d\x33\x6f\xec\xa3\x61\x4b\
\x42\xc3\x73\xf2\xa8\xb6\xba\x1a\x76\x16\x23\x2b\x1b\x90\xaa\x37\
\x35\x8e\xeb\x46\xe1\x30\xe6\x5f\x3b\x9c\xa8\x39\xf5\xf6\xab\xc0\
\x85\x5c\x88\x32\xa8\xe5\x4a\x04\x9b\x6b\x60\x05\x80\xa2\x26\xba\
\x48\xc8\xf3\x1a\xec\xc6\x97\x9a\xba\xe6\x95\x86\x86\xa9\xce\x00\
\xb9\x34\x9c\xe3\xdf\xb0\xa8\x05\xcb\x5e\xd6\xa8\x62\xc4\x6d\xb9\
\xe6\x69\x81\xc5\x56\xbd\x10\x6e\xf5\x2c\x59\x06\x8c\x3d\x56\x57\
\xa6\x23\x16\x36\x02\xe6\x9d\x06\xe6\xbd\x0e\x22\x41\x14\x51\xb9\
\x22\xe6\x41\x61\x7d\x4d\x26\x6c\x54\x10\x86\xbb\x66\x2b\xea\xb6\
\xcb\x54\xa7\x9d\xe7\x91\x66\x94\xda\xc2\xf1\xa7\xe1\xee\x7b\xd1\
\x79\x19\x0e\xc5\x4e\xd3\x49\xad\x85\xb4\x03\xa0\xa4\xbb\xd2\xf3\
\x80\x34\x34\xb7\x92\xb3\x79\x35\x83\x76\xbd\x1e\x00\xdf\x12\x0f\
\xe1\x05\xaa\xa3\xc9\xad\x85\xc9\xab\x1c\x3a\xe2\x39\x1e\xfb\xd9\
\x45\x12\xf6\xbe\x2d\x96\xd2\xe7\x41\xd4\xd0\x19\x54\x5e\xec\x08\
\x22\xc6\xd4\xb6\x60\x35\xa0\x2d\x7d\x6b\x5a\x12\x4b\x15\xca\x5d\
\xdd\x47\x27\x3a\x7f\x7a\x92\xee\x45\x89\xb0\xe8\x28\x0b\x50\x93\
\x59\xd3\x86\x73\x05\x4d\xa8\x81\x6b\x5e\xff\x00\x51\x48\xbb\x5c\
\x5b\xe7\x46\x49\x23\x4f\x9d\x5a\xb0\xe5\x62\x45\xf2\xe9\x4f\x84\
\xf9\x88\xec\x7f\x4a\xac\x5c\x11\xe5\x3b\xd3\x62\x62\xa6\xd6\xb9\
\xca\x7f\x4a\xd4\xa1\x48\x1a\x90\x75\xa5\x67\xca\x2e\x41\x3e\xd4\
\x68\xc1\xb5\x17\xac\x34\x7c\x66\x8c\x52\xe3\xa6\x2d\x69\x94\x8b\
\x74\xbf\x7a\x24\x37\x72\x37\xb5\x56\x92\x6b\x68\x01\x04\xef\x7a\
\x98\x4b\x13\xa5\xea\xd3\x8d\x9e\x17\x29\xb1\x88\x9e\xeb\xef\x43\
\xc4\x70\x8c\xef\xe3\xc0\xb7\xbf\xad\x40\xd8\xf5\xaa\x51\x4b\x6b\
\x16\x24\x30\x3d\x2b\x57\x0f\x88\x59\x62\xf1\x43\x00\xc3\xd4\x01\
\xfc\xeb\xa4\xb2\xcc\x65\x93\xa8\x36\x3a\x1a\x38\x5d\xd2\x55\x78\
\xdb\x2b\xae\xa1\xbb\xd6\xb4\xc9\x16\x25\x2d\x2a\xef\xa8\x60\x2c\
\x45\x65\xe3\x30\xef\x87\x97\x2b\x6a\x0e\xaa\xc3\x63\x45\x98\xa5\
\x6c\xc5\x8c\x8a\x58\x83\x94\x71\x26\xcc\x80\x73\xa8\xc4\x44\xad\
\x85\x99\xe6\x5b\xb9\x5c\xc7\x5d\xba\x0a\xcb\xe1\xb8\xaf\xb3\x4c\
\x49\x17\x56\x16\x60\x37\xab\xd8\xbc\x76\x1d\xb0\xae\x91\xbb\x33\
\x38\xb5\x8a\xda\xd5\xb9\xcb\x67\x63\x19\xc8\xe4\x48\xad\xa6\x84\
\x56\xec\xad\xf7\x82\xfc\xc6\x6a\xc0\x17\x2c\x15\x45\xc9\xda\xb7\
\x4a\x02\x40\x3a\x9c\xa2\xe4\xfb\x51\xc1\x72\x43\x30\x2e\x14\x1d\
\x46\xa6\xaa\xf1\x66\x45\x85\x03\xba\xa9\xb9\xde\x89\x8b\x16\x26\
\x2b\xaa\xde\xc2\x43\xcf\xda\xa3\x1e\x8a\x30\xcc\xc8\x35\x0d\xb9\
\xd4\x9b\xf5\xa6\xdd\x81\x9d\x2d\x94\xf9\x9d\x47\x4d\x6e\x4f\xb5\
\x2d\x8a\x9f\x2a\x2c\x8c\xd6\xbf\xa6\xc0\x7c\xcd\x1e\x6c\xb7\x2a\
\x02\xf5\xb0\xb5\x2d\xdd\x8a\x9b\xb1\xd7\xbd\x73\x6c\x58\x50\xfe\
\x38\x26\x38\xd9\x13\x52\x15\xae\x6a\xdb\x32\x9b\x95\x0e\xc7\xba\
\xe5\xfc\xcd\x27\x00\xb6\x89\x9f\xf1\x1b\x0f\x95\x35\xc3\x91\xa2\
\xb1\xaa\x0a\x53\x9b\xe8\xd6\x61\xf8\x7e\x1f\xef\x40\xee\xd6\xd1\
\x88\xf6\xa9\x9d\xa2\x89\x6f\x34\xca\xbd\x86\xa6\x93\x16\x27\x0b\
\x34\xa2\x24\x66\x0c\x76\x2c\x34\x35\x6a\x82\x36\x76\xb9\x16\x61\
\xb3\x8d\xc5\x57\xc4\x3c\xc9\x2a\xa6\x22\x4b\xc4\xc7\x52\x39\x8a\
\xb6\x14\xe6\x20\xe9\x61\xaf\x6a\xad\x28\x49\xe5\x12\xbd\xcc\x6b\
\xa4\x6b\xf8\xbb\xd1\x48\x89\x12\x21\x0b\x70\x84\x58\xb5\xad\xa7\
\x45\x1f\xbd\x73\x9b\x9b\xec\x06\xc3\xa0\xae\x66\xb9\xeb\xd0\x74\
\xae\xb5\xf7\xa9\x04\x02\x7b\x0a\x35\x4b\x6c\x2a\x54\x51\x9d\x2a\
\xc4\x10\x95\xd9\x45\x10\xa9\x03\xa5\x28\x39\x6a\x72\xf7\xa3\x60\
\x91\xae\x69\xa4\x58\xc7\x7d\xea\xac\xfc\x4a\x24\x16\x82\x3c\xc7\
\xf1\x3e\xdf\x4a\x91\xc1\x09\x16\x5b\x9e\x82\xa5\xad\x10\x1e\x34\
\x89\x1f\x42\xc6\xb3\xe5\xe2\x38\xa7\x5b\x66\x54\xff\x00\x42\xda\
\xa8\xcd\x21\x24\xdc\x96\x27\x72\x6b\x17\x9c\x87\x1b\x6f\x8a\xc2\
\xb9\x03\xc6\x67\x2a\xe2\xc7\x2e\xda\xf2\xa2\x4c\x92\xdd\xa2\x91\
\x58\x73\xed\x59\x31\x00\x11\x46\x9f\x11\x07\xbd\x80\x1f\xa9\xaa\
\xd2\xe3\x0e\x1a\x4c\x98\x7f\x33\x83\xe6\x6f\xda\x8f\xde\x7a\xb1\
\xa3\x8f\x90\x4b\x3f\x94\xdd\x54\x58\x55\x72\x47\x33\xf2\x14\x42\
\x48\xf1\x10\x09\xe1\x16\xbe\x8e\xbf\x84\xd0\x5f\xf2\xa3\xd2\xe2\
\xdd\xad\xf3\xad\x28\x50\x47\x02\x27\xc5\x6b\x9a\xcf\xc3\x29\x93\
\x10\x8b\xd5\xab\x49\xc8\x2e\x48\xeb\x5a\xe3\xd0\xa8\x2e\x41\xf4\
\xde\x84\xca\x2f\xb5\xaa\x76\xa5\x4c\x09\x7e\x80\xd3\xa0\xdc\xeb\
\xd6\xa6\xf7\xdf\xe9\x48\x45\xb3\x5c\x9d\xa8\xcb\x81\xdc\xf4\x15\
\x6a\x30\x9e\xa6\xbb\x38\xef\xf4\xa5\xa9\x20\x6b\xa9\xa3\x8c\x33\
\x9b\x28\xf7\xed\x4e\xa1\x06\x1d\x0f\xd2\x89\x6e\x4e\x83\xf3\xa5\
\x3c\xd8\x74\x62\xa2\x68\xe4\x90\x7c\x0a\x6a\x9e\x23\x19\x2b\xdd\
\x41\xc8\xbf\x84\x51\xfa\x4b\xf2\xcc\x90\xfa\x9d\x58\xfe\x10\x75\
\xaa\xf3\xe3\x65\x71\x95\x3e\xed\x7a\x2f\xf3\x54\x73\x1a\x90\xd5\
\x9b\x6d\x38\xb0\x35\x55\x46\x17\x04\x97\x3d\xc0\xe5\xf5\x35\xd9\
\xd9\x98\x93\x49\x6c\x42\xa6\x3c\xc5\x21\xb2\xb2\x00\x1a\xf7\xb5\
\x1b\x5c\x3e\x5d\x49\xed\xce\x8d\x29\x73\x40\x4f\x7a\x62\xe1\xe7\
\x71\x70\x84\x03\xcd\xb4\x14\x47\x0a\x8b\xeb\x72\xe4\xf2\x5d\x05\
\x59\x52\xbc\x28\xd3\x4a\x72\xe8\x0f\x33\xc8\x55\xdb\x85\x40\x89\
\xb2\x8a\x92\x48\x50\xa0\x05\x1d\x05\x2c\x9a\x7c\x0e\x26\xb8\x9a\
\x12\x7b\x9f\x6a\x16\x63\x7d\xf4\xa8\xa5\x9a\xa0\x35\x05\x48\x1a\
\xd1\xa8\xc5\xa6\x21\x03\x52\x69\x4b\xbd\x1a\xd2\x8c\x50\x73\x66\
\x5b\x0e\x97\xa6\x40\xcc\x24\xcc\xc2\x96\xa7\x4a\x34\xad\x46\x55\
\x71\xe8\xd0\xb9\x16\xf2\xb6\xc7\xb5\x0e\x18\x31\xd4\x5b\x5d\x35\
\xad\x06\x41\x34\x46\x33\xbd\xbc\xa6\xa8\x94\x78\xd8\xad\xac\x6f\
\xb1\xe5\x45\x98\x62\x52\x56\x0d\x6b\x0d\xfa\x53\x73\xbd\xaf\x6b\
\x52\x61\x42\xad\x99\x88\xf6\xa3\xbd\xee\x3a\xd1\x0a\x74\x61\x63\
\xa8\xa7\x42\x86\xde\x45\x36\xa5\xa2\xd8\xd8\x00\x5b\x73\x7d\x97\
\xdf\xf8\xa2\x96\x35\x93\x43\x34\x99\xb9\x36\xc0\x7c\xa9\x82\x9a\
\x8a\x19\x8e\x6d\x02\xe9\x4e\x85\xbc\x39\x03\x28\xb6\xba\xf7\xa4\
\xa3\x1b\x28\x60\x3c\x45\x16\x65\xfc\x43\xa8\xa2\x57\xcc\x6c\x10\
\x83\xdf\x95\x6a\x06\xb6\x1d\x95\x96\xe8\x6e\xbb\xdb\x9a\xf6\xa6\
\x90\xb2\x46\x63\x91\x43\x29\xfa\x8f\x6a\xcf\xe1\xb2\x78\x73\x65\
\x63\xe5\x7d\x0d\x68\x01\x63\xde\xba\xca\xcd\x66\x63\x30\x92\x40\
\xd9\x87\x99\x0e\xcc\x29\x1a\x1d\x6f\xad\x6e\x03\xa5\xb7\x07\x71\
\xd6\x90\xd8\x3c\x2b\xbe\x6c\xac\xa4\xf2\x07\x4a\x2f\x1f\xe2\x95\
\x5b\x83\x42\x5a\x63\x3b\xdc\xaa\x0d\x3b\x9a\xd1\x94\x93\x0c\x86\
\xe7\xd2\x4d\x72\x85\x54\x08\x83\x2a\x8d\x85\x73\x6b\x1b\x8e\xaa\
\x7f\x4a\xd4\x99\x06\xf6\x06\x8f\x2b\x80\x1a\xea\xb4\x38\xd7\x8a\
\x3c\x3b\xac\xa7\x56\x5b\x85\x1b\xd3\x66\x91\x62\x8d\xa6\x6d\x94\
\x5c\x77\x35\x8a\x5a\x4c\x46\x2f\x31\xbb\x16\x3a\xfb\x51\xca\xe2\
\x9d\xa9\xcd\xc4\xe3\x5d\x23\xc3\x82\x35\x3e\x76\xaa\xf2\x71\x59\
\xb4\xcb\x14\x40\xff\x00\xa6\xf5\x64\xe0\x30\xe9\x33\x09\x5d\x9c\
\x8b\xf9\x46\x80\x51\xa2\x41\x1b\x7d\xd4\x08\xa7\xa9\x17\x35\xc3\
\x39\x37\xd1\x7f\x6d\xc5\xa4\x68\x04\xa5\x5b\x28\x2d\x6e\xa6\x91\
\x36\x27\x11\x26\x8f\x33\x90\x4d\xce\xb5\xbc\xd6\x51\x91\x51\x00\
\xb0\xd3\x28\xa4\xcb\x0e\x1d\xc1\xcd\x87\x8c\x9e\xa0\x58\xd6\xaf\
\x1b\xfd\x5a\xf3\x6e\xec\x58\xe9\xa9\xe7\x51\x77\x8c\x82\xa7\x5b\
\xd6\xe8\xc3\xe1\x51\x48\x18\x54\xee\x49\xbd\x56\xc5\x70\xe8\xe5\
\xf3\xe1\xce\x56\xff\x00\xdb\x3c\xfd\xab\x17\x85\x3a\x9c\x3e\x39\
\x71\x11\xa4\x78\x80\x10\x1d\x59\xaf\xea\x02\xdb\xfd\x45\x58\x90\
\x66\x5b\xa1\x42\x08\xf2\xd9\x86\xd5\x91\x30\x29\x9e\x33\xff\x00\
\x2d\x55\x2d\xdc\x9b\x9a\x47\x85\x26\x5c\xd9\x18\x0e\xb6\xab\xf5\
\x62\xc6\xc1\x78\x21\x07\xc6\x9d\x10\xf4\x1a\x9a\x4b\xf1\x28\x14\
\xda\x28\x19\xf5\xdd\xcd\xbf\x4a\xcc\xb5\x12\xa8\xe9\x47\xee\xfc\
\x58\xd2\x4e\x27\x11\xd2\x4c\x3e\x5e\x99\x5b\xf9\xa7\xae\x23\x06\
\xc3\x37\xda\x32\xf6\x23\x5a\xce\x87\x05\x3c\xab\x9a\x38\x0b\x03\
\xce\xd4\xd4\xe1\x78\x92\xd6\x78\x84\x63\xab\x5a\xb5\x2f\x25\xd2\
\xeb\x63\x30\x6a\x09\x12\xbb\x9e\x81\x6d\xf9\xd2\xbe\xdd\x88\x98\
\xe4\xc2\x41\x97\x4d\xed\x73\xf5\xae\xc2\xf0\xec\x34\x63\x34\x8e\
\xd2\x37\x41\xa2\x8a\xb6\x32\x85\xca\x8a\x15\x7a\x0a\xd7\x63\xa5\
\x13\x82\x9a\x43\x9f\x11\x38\x56\x3b\x86\xb9\x34\x63\x87\x41\x94\
\x03\x33\x5c\x6e\x72\xe8\x7d\xaa\xdd\x8d\x71\x01\x57\x33\xb0\x51\
\xd4\xd5\x91\x6a\xaf\xfe\x99\x87\x2c\x2d\x3c\x84\x7e\x1c\xbb\xfd\
\x28\xfc\x3e\x1d\x85\xda\x10\xcd\xd0\x9c\xc7\xfb\x52\xf1\x58\x92\
\xde\x58\xae\xab\xd7\x99\xaa\xc0\x58\x97\x63\xa2\xea\x4d\x66\xd9\
\x3c\x38\x6c\xd3\x34\xae\x5b\x2e\x51\x1a\x8b\x5b\x99\x27\xfb\x52\
\x25\xc3\xe1\xf1\x6b\x91\xd3\x24\x9f\x0b\x8d\x01\xf7\x15\xcc\xc5\
\x57\x33\x7a\xa4\x39\x98\x74\x1c\x85\x75\xae\x34\xac\xd2\xcd\xc1\
\xcb\x27\x0f\xc7\x94\x9c\x30\x53\xa3\xaf\x51\x5a\x33\x20\x52\x0a\
\x9c\xca\xc2\xe0\xf5\x15\x5f\x8f\xa6\x7c\x3c\x13\x5b\xcd\xaa\x13\
\x6d\xfa\x55\x8e\x03\x1c\xb3\xe1\x4c\x33\x02\xa1\x35\x46\x6d\xed\
\xcc\x51\x3d\xc5\xff\x00\x4e\xe1\x91\xfd\xe9\x97\x92\x0f\xce\xad\
\x11\x52\x02\xaa\x04\x41\x65\x1f\x9d\x41\xda\xba\x66\x46\x50\x45\
\x0b\x0b\xd4\x9a\x83\x52\x09\x5d\x2c\x49\xb5\x47\x6d\xbe\x54\x44\
\x8a\x8b\x83\x52\x71\xd0\x0d\x0b\x13\xb2\x8e\x75\x57\x17\x8d\x77\
\x06\x3d\x90\x1f\x42\xe8\x3e\x7a\xdc\xd3\xf1\x44\xae\x14\xb8\x36\
\x28\x41\x07\x6f\x7a\xcc\x7b\xe6\x37\xde\xf5\x9e\x57\x0c\x11\x48\
\xe5\xb6\x48\xd2\x39\x14\xdd\x4a\x8b\x03\xd8\xd3\x24\x1e\x21\x32\
\x2a\x90\x6f\xe7\x5e\x6a\x69\x17\x14\x78\x9c\x40\xc3\xe1\x52\x6b\
\xa8\x99\x89\x11\xb1\x1b\x0e\xf5\x82\x91\x7b\x5e\xc6\xdd\x6a\xcc\
\x18\x57\x70\x19\xce\x45\x3c\xce\xe6\xa8\x60\x31\xbc\x52\x66\x76\
\x9b\x13\x85\xc8\x8b\xba\x1e\x7c\xaf\x57\xb0\xdc\x4e\x53\x88\x48\
\x31\x30\xc4\xcb\x26\x89\x2a\x8b\x0b\xf7\xa7\x8d\x95\x76\xb4\x70\
\xf8\x36\xb6\x78\x33\x30\x16\xce\xc6\xe7\xe9\x4c\x8d\xb2\x05\x88\
\x15\x03\x64\x60\xb6\xbf\x63\xde\x80\x99\xd4\x5a\x48\x23\x24\x6e\
\x11\xac\xdf\x4a\x15\x9b\x0f\x22\xb2\x34\x99\x3a\xac\x9a\x11\x5d\
\x3a\x64\xc6\x24\x9d\x77\xef\x40\xc6\xe7\xd8\xeb\x4a\x93\x17\x1a\
\x49\x91\x88\x91\x46\xce\xa7\x5f\x9d\x13\x9d\x99\x4e\x65\x61\x71\
\xde\x8d\x38\x96\x6a\x02\xd5\x04\xdf\x9d\xbb\x50\xb9\xb7\x3b\x50\
\x5c\xcd\x50\x6a\x2e\x2f\xa0\xbf\xb5\x46\x60\x76\xbe\xb4\x24\xeb\
\x7a\x82\x58\x5b\x4b\x51\xc8\x22\x8d\x7e\xf1\x82\x9e\x97\xb9\xa8\
\xc3\x78\x53\x12\x15\xce\x6e\x4a\x74\xbd\x48\x71\x5b\x2e\x9b\xf3\
\xa3\x03\x5a\x0f\x0b\x5d\xed\xef\x46\xa2\xca\x05\xef\x4a\x1a\xd3\
\x12\x82\x31\x4c\x02\xd5\xa8\xc8\xd2\x98\xf1\xac\xcb\x95\xf4\x3c\
\x9b\xa5\x29\x69\xe9\xbd\x69\x33\xe6\x8d\xe2\x72\x8e\x35\x1f\x9d\
\x08\xad\x39\x23\x59\x93\x23\xef\xf0\xb7\x4a\xce\x91\x19\x24\x28\
\xc0\x82\x0d\x62\xcc\x32\xe9\xae\xb9\x4f\x87\xbd\x8e\xa4\x9d\xcf\
\x33\x5c\x05\x1c\xdf\xe7\x37\xb9\xb5\x0d\xe9\x02\x53\x70\x15\xaf\
\x61\xb7\x51\xdc\x53\x16\xec\x72\x9f\x58\x17\xb0\xd9\x87\x51\xfc\
\x52\x81\xa9\x92\xed\x87\x70\x2f\x75\x19\x92\xdb\x83\x4c\x47\x29\
\x37\x16\xde\xb5\x0c\xf1\x05\x52\xee\x01\x20\x68\x35\xaf\x3f\x06\
\x2b\x17\x31\x11\xa1\x05\x8f\xc5\x6d\x7e\xb5\xbd\x87\x89\x61\x85\
\x50\x85\x66\xb0\xcc\x6d\x7a\xd7\x1b\xa2\x9a\x45\x8e\x86\xa4\x5c\
\xd2\xd4\xaa\x7d\xd9\xd1\x6f\xe4\x27\x6f\x6a\x60\x16\xde\xb6\x04\
\x2a\x31\x04\x58\x44\x0d\x8b\xee\x7a\x0a\x0c\x54\xf1\xe1\x94\x67\
\xbb\x3b\x0b\xac\x60\x6a\x47\x5e\xd5\x9d\x36\x26\x69\x59\x89\x01\
\x2e\x2c\x15\x4d\xc9\xf7\xaa\xf2\x90\x48\x77\x10\x99\xb1\x58\x85\
\x82\x1b\x95\x1a\x0e\xe6\x9b\x0c\x71\xe1\x90\xc6\x08\xb9\x1e\x66\
\xeb\x4b\xc1\xc0\x62\x87\x3b\x2b\x09\x1f\x6e\xc2\x98\xca\xbb\x9d\
\x7d\xe8\xff\x00\xa5\x4b\x8a\x80\x31\x02\x40\x2c\x24\x17\xf9\xf3\
\xaa\x97\xd6\xb4\x31\xe9\x9f\x0d\xa0\xd6\x33\x7f\x95\x67\x1d\xeb\
\x1c\xbd\x31\xa6\x5b\x32\x2b\x8f\x89\x45\x2c\x92\xca\x6d\xa5\xff\
\x00\x3a\x5e\x0d\xf3\x41\x97\x9a\x7e\x94\x43\xa0\x36\xa8\x16\x49\
\x36\xb8\xd3\xad\x14\x7a\x4a\xa7\xa1\xa2\x2b\xe4\xca\x35\xcd\xce\
\xb8\x47\x76\x00\x54\x75\x8f\xc4\x6e\xb8\xa6\x70\x80\x17\x21\x9b\
\xb9\x06\xa8\xe2\x23\x9d\x65\xf1\xf0\xec\xc7\x5b\x94\xbd\xed\xf2\
\xad\x4e\x2d\x22\xc9\x8a\x7c\xa3\xc9\x1a\xe4\xbd\xb7\x6e\x75\x44\
\xe6\x04\x10\x6c\x6f\xbd\x72\xe5\x3b\x6e\x22\x71\x63\x72\x02\x92\
\xb7\x20\x72\x34\xce\x16\x10\xe3\x61\x32\x80\xc9\x9b\x51\x4b\x75\
\xcf\xb9\xf9\xd0\x46\x5a\x39\x01\x53\xa8\x34\x79\x53\xd1\x4a\x49\
\x94\x8e\x9a\x1d\x2d\x51\x63\x41\x81\xc4\x2e\x2e\x30\x09\x02\x65\
\x1a\x8b\x7a\xbb\xd3\xc2\x9b\x5f\x61\xde\xbb\x30\x55\xad\xca\x89\
\x41\x27\x4a\x87\x96\x04\xde\x4c\xc7\xa2\x8b\xd5\x79\xb1\x2e\xc3\
\x2c\x63\x20\xeb\xce\x8d\x5d\xac\xcb\x24\x71\x03\x9d\x81\x6f\xc2\
\x37\xaa\x38\xdc\x43\x34\x2d\x29\x4c\xd9\x48\x01\x6f\xa0\x14\x36\
\x24\xde\x89\x55\x4a\x95\x7f\x4b\x0b\x1a\x2d\xb4\xe4\x84\x47\x39\
\x91\x6e\x30\x8f\x6e\x45\x4d\xe9\x18\x99\x64\x98\x18\xa3\x85\x94\
\x73\x16\xb9\xad\x08\xd4\xa4\x69\x11\x36\x2a\x2c\xa4\x1d\x1b\xfb\
\xd4\xb1\x6b\xdc\x92\x1b\x6b\xf3\xac\xe6\xc3\xaa\xb6\x26\x24\x2e\
\x2c\xc5\x7c\xc2\xd5\xd1\xa3\x19\x02\xa0\x27\x31\xb5\xa9\xae\x09\
\x37\x26\x9f\x80\x0a\xaa\x64\xb7\x9a\xf6\x14\xc8\x8a\xe2\x31\x24\
\x18\x35\xcd\xe6\x7f\x13\x4b\x8d\x2f\x6a\xa9\x81\x91\xd7\x88\x46\
\xd9\x89\x25\xad\xa9\xab\xdc\x6e\xc7\x07\x10\x36\xf5\x1a\xad\xc1\
\xe2\x0f\x8a\xf1\x18\x79\x62\x19\xbe\x7c\xa8\xbf\xec\xa7\x8b\xd2\
\x8c\xb2\x30\x1b\x03\x42\x68\x98\xdd\x89\x3c\xe8\x6b\xa0\x41\xa1\
\x22\x8c\xd4\x11\x59\x40\xcb\xda\xba\xdd\xa8\xab\x8e\xd5\x24\x04\
\x8e\x55\xf0\xa4\x24\x03\xcc\x72\xd2\xb3\x71\x11\x15\x95\xa3\x60\
\x04\xa9\xb8\xe4\xc3\xa8\xad\x2a\xab\xc4\xcf\xdf\xea\xc4\x1d\x19\
\x1b\xa7\x6f\x6a\xcf\x29\xd1\x8c\xf2\x29\x7c\x62\x31\x2f\x08\x12\
\x58\xe6\x81\xec\x35\xe4\x6a\xd4\xaa\xa4\x16\x5d\x34\xb9\x43\xb8\
\xfe\x47\x71\x4a\xc5\xdb\xff\x00\x47\xc4\xdc\x1b\x1c\xb6\xb8\xef\
\x5c\xec\xe8\xb3\xb8\x3b\x5e\x39\xe3\xfe\x90\xdf\x4a\xb2\x8c\xa5\
\x32\x48\x09\x5b\xdc\x58\xd8\x83\xd4\x56\x7e\x12\x53\x06\x21\x64\
\x1b\x0d\xfb\x8a\xd0\x9d\x02\xbf\x94\xf9\x48\xba\x9e\xd5\x89\x7a\
\x6a\xb4\xe3\xc7\x4c\x70\xba\xb2\xbb\x47\x6b\x31\x1e\xa5\xeb\xd8\
\xd5\x59\x1c\xc9\x31\x77\xe6\x6e\x6d\x4e\xe0\x20\x3a\x4e\x8e\x33\
\x28\x50\x6d\x56\xc4\x38\x61\xa8\x83\x5e\xec\x48\xae\xb3\x6c\xd6\
\x4a\x9a\x2c\x33\xe1\xcb\x40\x09\x60\x34\x02\xe4\xfc\xc5\x27\x0d\
\x8a\x68\x87\x87\x22\x66\x5e\x9b\x11\x57\x73\x90\x00\x5f\x28\x1b\
\x05\xd0\x0a\x09\x95\x66\x89\xfc\x45\x04\xaa\x92\x18\x0d\x69\xb3\
\xf8\x80\xb8\x8c\x33\x9d\x59\x90\xf5\x23\xf8\xae\x02\x37\x92\xd1\
\xb7\x88\x6d\x7d\x36\x1e\xe4\xd5\x48\xb0\xd3\x49\x1e\x74\x8c\x95\
\xeb\x57\x30\x51\x34\x38\x76\x0e\xb6\x77\x23\x9e\xb6\xa2\x5b\x7d\
\x54\x72\x00\xab\x72\x83\x2f\x36\x46\xbd\xbd\xc5\xab\x92\x10\x40\
\x28\x33\x03\xb1\x06\xf7\xa3\x8c\x95\x37\x06\xb9\xa2\x85\xbe\x12\
\x87\xf1\x21\xb5\x6f\x02\xb6\x32\x0f\x1b\x1a\xa9\x18\xf3\x10\x33\
\xdb\x61\x56\x06\x1a\x0b\x04\x55\x0a\x57\xd3\x27\x3b\xd3\x22\x48\
\xe2\x8f\x24\x40\xd8\xfa\x98\xee\xd4\x56\xaa\x45\xa8\x61\x71\x98\
\x80\x1a\xf9\x5c\x03\xb1\xa5\xa6\x6c\xf6\xb0\xbf\x7a\xb1\xb8\xcc\
\x6e\x54\x8b\x35\xb5\xb5\xb6\x3f\x4b\x83\x5c\x22\xb9\x00\x26\x6b\
\xec\x46\xb7\xfa\x53\x80\x2a\xa7\xf1\x0f\xa5\x15\xb5\xb5\x19\x8e\
\x45\x5b\xb2\xb0\x1f\xd4\xbf\xbd\x42\x03\x7b\x90\x05\xb6\xad\x27\
\x20\xa6\xa0\xa8\x51\x46\x2a\x16\x8d\x57\x4a\x4f\x10\x8e\xea\xb3\
\x00\x09\x0a\x46\xbc\xcf\x2f\xd6\x9e\x9b\x0a\xec\x55\x86\x02\x52\
\x76\x16\xf9\x6b\x50\x8c\xcc\x44\x8d\x98\xe4\x5b\xd8\x74\xd8\x54\
\x07\xba\x86\x1c\xe8\x1c\xb2\x5c\xdb\x51\xb8\x35\x03\x45\x03\xe7\
\x58\xd6\xf0\xd5\x6a\x34\x62\x0d\xc1\xd6\x90\x0d\x1a\xb6\xb5\x6a\
\xc5\xcc\x10\xf1\x27\x55\xb0\x00\x9b\xb5\x85\x69\x06\xb9\xbd\x53\
\xc0\xc7\xe1\x45\x99\xbd\x6e\x3e\x82\xac\x29\xae\x93\xa8\xcd\x3d\
\x4e\x96\x3a\x83\xc8\xd2\xf1\x52\xb6\x16\x0c\xf1\xc8\x00\x26\xca\
\xac\x2f\x63\x52\xa6\xa9\xf1\x32\xd3\x71\x05\x81\x6e\x42\xd9\x40\
\xef\xce\xb5\x6f\x41\xd8\x3c\x3c\x98\x96\x32\x3b\xb6\x52\x7c\xce\
\xdb\xb1\xab\xf0\xc7\x0c\x2b\x68\xd0\x7f\xa8\xee\x6b\x88\x54\x02\
\x34\x16\x55\xd0\x5a\xa2\xe6\xa9\x31\x22\x46\xcb\xa1\xbd\xb9\x1a\
\x02\x74\xa9\x90\xe9\x40\x08\xd8\xde\xfb\x55\x52\x01\xf3\x6a\x2e\
\x0e\xe2\xb3\x71\x91\x18\xa6\x2a\x76\xdc\x1e\xa2\xb4\x89\xd7\x63\
\x49\xc5\xc4\x26\x87\x2f\xc6\xba\xaf\x7e\xd5\x9b\x34\xc6\x7c\x32\
\x98\xa5\x0e\x35\x1b\x11\xd6\xae\xdd\x4a\x86\x53\x75\x3b\x56\x71\
\xd0\xdb\xa5\x1e\x12\x73\x11\xca\xd7\x28\x77\x1d\x2b\x12\x9b\x17\
\xc6\xa2\xa5\xe5\xf0\x20\x79\x88\xcd\x94\x69\xf3\xa1\x5b\x10\x1d\
\x4d\xd4\xd2\xe7\x99\xfc\x76\xc2\xb2\xab\x2b\xec\x6d\xaf\x6a\xd0\
\x8c\xb9\x41\x22\xda\x79\x18\x83\xae\xf7\xd4\x1f\xce\x94\xa3\x3b\
\x65\x22\xc0\x9d\x7b\x55\x8c\x4c\x18\x88\x03\x09\xf5\x77\x6c\xd7\
\x1b\x6d\xa5\x20\xdc\x6a\x2d\x5c\xab\x6d\x75\xe1\xd8\x45\xb4\x6d\
\x03\x35\xc7\xac\x36\xf5\x9d\xc5\xf0\x70\xe0\x98\x66\x72\x43\x6a\
\xa2\xda\xfc\xe8\xb0\xf8\x9c\x42\x59\x44\xec\x14\x6b\x60\x6b\x37\
\x13\x3c\xb3\xc9\x9a\x59\x19\xcd\xf9\xd3\xcb\x94\xcf\x04\x94\xc8\
\x31\xd2\xc2\xe1\xa1\x0a\xb6\xfe\x9b\x9a\xd4\x79\x8e\x22\x24\x9e\
\xe4\x86\x1a\x8b\xec\x79\xd6\x22\x29\x76\x0a\xa2\xe4\xec\x05\x6d\
\x61\x22\x68\x70\x29\x13\xfa\xae\x49\x1d\x2f\xca\x8e\x16\xd3\x62\
\x10\x86\x17\x15\x36\x14\x4a\x8a\xa6\xe0\x57\x11\x5b\x64\x26\xf5\
\xd5\x35\xd6\xa9\x39\x4e\x99\x48\x0c\xa7\x91\x1b\xd1\x7c\x36\xb9\
\x60\x36\x3f\x10\xf7\xeb\xef\x43\x6a\xed\x8d\x48\x2e\xba\x66\x04\
\x10\x76\x20\xdc\x1a\xb1\x82\x17\xc3\x9e\xaa\xd4\xab\x82\x6e\x49\
\x04\xee\x47\x3f\x71\xb1\xa6\xe0\x0d\xa7\x31\xb6\x50\x59\x74\xe8\
\xdd\xc5\x53\xd5\x7c\x27\x8e\x1f\xf8\x58\x56\xda\x96\x26\xbb\x85\
\x47\x93\x04\xd2\x5f\x59\x1a\xdf\x4a\x57\x18\x94\x4d\x89\x58\xa3\
\x20\xaa\x0b\x0f\x7a\xbc\x23\xf0\xa1\x48\x41\x07\x22\xd8\xfb\xd5\
\x3f\xdb\x57\xc2\xce\xf5\x07\x6a\x93\x73\x7d\x28\x4e\xf4\xd4\xeb\
\x9a\xed\xeb\xaa\x39\x50\x9c\x4d\x09\x35\x26\x84\xd4\x90\x5b\xa5\
\x56\xe2\xba\xf8\x6d\xd5\x48\xab\x06\x91\xc5\x01\xf0\x22\x16\x3a\
\xdc\xd6\x6f\x85\x55\x1b\x41\x1b\x6c\x0d\xc1\xe6\x3b\x8a\x2c\x52\
\x99\x70\x18\x88\x55\x7e\xf1\x6d\x70\x36\x24\x6b\xa7\xb8\xa4\xb0\
\x24\xe5\xda\xd4\xd4\x6b\x62\x1a\x3b\xe5\x91\x95\x5d\x0f\xca\xd6\
\xac\x42\xc0\x65\x60\xcd\x71\x6d\x76\x3b\xd6\x86\x05\xbc\x5c\x0d\
\x89\xf3\x42\x6d\xff\x00\x49\xab\xf3\x47\x06\x20\x9f\xb4\xc1\x67\
\xe6\xe9\xa1\xfa\x50\x61\xb8\x77\x85\x24\x86\x09\x44\xa8\xe8\x46\
\x5d\x9a\xfc\xb4\xac\xce\x16\x53\xa7\x70\x01\xac\xe7\x96\x4f\xde\
\xae\x1d\xa9\x5c\x26\x16\x8b\x0b\x23\x3a\x95\x32\x10\xa0\x11\x6d\
\xa9\xc4\x76\xae\xb3\xc0\x00\x09\x36\x02\xf4\x38\xb7\x10\xe1\xdf\
\x31\xb3\xb8\xb0\x1c\xed\x5d\x88\x38\x87\xc5\x88\x22\x93\x22\xb2\
\x06\xf6\x16\xa1\x9f\x06\x83\x0e\xef\x79\x0b\x28\xbe\x76\xd8\xf6\
\xab\xff\x00\x01\x18\x2c\x48\x48\xc4\x72\x29\x28\x35\x04\x6e\x2a\
\xca\xcb\x86\x94\x12\x98\x80\x00\x3f\x10\xb5\xeb\x39\x04\x92\x9f\
\x06\x25\x2c\x58\xed\x6a\xb6\x9c\x39\x96\xe0\xcc\x99\xbf\x09\x16\
\xfc\xeb\x32\xdc\x2b\x48\x84\x0c\xc0\x82\xa7\x40\x41\xbd\xe8\xd4\
\x69\x4b\xc2\x44\xd0\x21\x8d\xca\x92\xc7\x37\x94\xdf\x91\xa6\xd6\
\xe0\xa9\x14\x4c\x52\x38\xcc\xb2\xb6\x54\x1c\xed\x72\x7d\xaa\x10\
\x28\x53\x24\x87\x2a\x2f\xa8\xfe\xd5\x99\x8b\x9e\x4c\x5e\x23\x41\
\xa0\x36\x45\x1c\x85\x56\xe0\x5b\x6e\x26\x43\xe5\xc2\xc0\x2f\xc9\
\x9b\xcc\x4f\xca\x8a\x57\xc7\xaa\x91\x24\xb9\x21\x91\x6f\x90\xb8\
\x56\x53\xda\x9f\x84\xc3\x47\x83\x50\x02\x86\x9b\x76\x72\x3d\x3d\
\x85\x76\x35\x4c\xd8\x62\xbe\xa6\x53\x98\x5c\xdf\xde\x9c\xb9\xda\
\x26\x0c\x37\x14\x8c\xf8\x89\x88\x1a\x8f\x4f\x8b\x76\xb7\xb5\x1a\
\x63\xe6\x89\xc4\x78\xc8\x73\x10\x6c\x58\xe8\xc3\xf9\xa6\xc3\x89\
\x56\x44\x56\x8e\x42\xcb\x60\xc5\x75\xfc\xaa\xc3\x2c\x73\xc6\x52\
\x4b\x4a\x9f\x46\x5a\x64\xfe\x55\xbf\xd4\xc2\x63\x9d\x73\x41\x20\
\x71\xd0\xe8\x47\xca\xa4\x82\xa6\xc4\x5b\xde\xa9\x62\x78\x6c\x89\
\xf7\x98\x66\x2e\xa0\x7b\x30\xae\x83\x88\x4b\x1d\x93\x10\x9e\x22\
\x8e\xba\x30\xf9\xd5\xbf\xd1\x9f\xc5\xe5\x26\x97\xc4\xcf\xfc\x1a\
\xc2\x0f\x9a\x76\x20\xff\x00\xa7\x99\xa6\x46\xd1\xcd\x1e\x78\x5b\
\x30\x03\x51\xcd\x6a\xb7\x13\x90\x7d\xa5\x23\x3b\x04\x28\x18\xf2\
\x6b\xde\xd4\xdf\x14\xf5\x51\x81\x92\x14\x63\x60\x59\x75\xb7\x22\
\x34\xa5\xc4\xc4\xa5\xb2\x93\x97\x4d\x28\xf1\x09\x96\x49\x63\x24\
\x8b\x36\x70\x0e\xe0\x1f\xef\x53\x83\x86\x59\x17\xd2\x04\x77\xf5\
\x1a\xe7\xf5\xa4\x00\x4f\x23\xfa\xd5\xdc\x16\x16\xd6\x92\x51\x71\
\xb8\x5e\xbe\xf4\xdc\x2c\x70\x44\x40\x52\xa2\x53\x7b\x16\x3a\xff\
\x00\x6a\x64\x88\x54\x16\x98\x65\x03\x76\x23\x5a\xdc\xe2\x2d\x33\
\x52\x46\xd7\x3b\x0a\xec\xc0\x10\x0d\x66\xcf\x36\x69\xfc\x41\x7c\
\xab\x6c\xa7\xb7\x7a\xd1\x23\xc4\x54\x90\x69\x9d\x73\x53\x28\xc3\
\x90\xf4\x35\x4e\x56\xf0\xf8\xe1\x3c\x84\xb5\x66\x25\xd4\x29\x27\
\x53\xb5\x66\x62\x8e\x6e\x2e\xcd\x60\x4f\x8a\x36\xe7\xad\x36\xf4\
\xa3\x66\x4d\x24\x60\x39\x1a\x8b\xe9\x43\x35\x8c\xcf\xa0\xdc\xf2\
\xa8\x0c\x47\x7f\x9d\x6b\x42\x0b\x5c\x91\x44\xbc\xe8\x52\xc4\x74\
\x23\xad\x70\x37\xed\x6d\xe8\x48\x7e\x7d\x28\x2f\xa5\xea\x64\x36\
\x5d\x4d\x46\xf4\x54\xa9\xc4\x60\xce\xa6\x64\x1a\x8f\x58\x1f\xad\
\x51\xb0\xad\x70\x1b\x3d\xd4\x12\x7b\x55\x7c\x56\x0f\x31\xbc\x6b\
\x91\xb9\xa9\xe7\xed\x59\xb3\xeb\x52\xaa\x61\xe5\x78\x49\xca\x6e\
\x0e\xe0\xec\x6a\xdc\x13\x47\x26\x24\x16\x21\x18\xc7\xe5\xbe\xdb\
\xf5\xaa\x45\x48\x36\x3b\xd4\xb0\x6f\x23\x80\x4e\x4b\x86\x03\x7b\
\x1a\x25\xa6\xb4\x58\x39\x1e\x19\x4c\xea\x77\x04\x5c\x52\xfe\xc3\
\x85\x63\x7f\xb2\xb0\x3c\xf2\x35\x87\xd2\xa8\xea\x23\x0f\x1c\xbe\
\x5e\xa0\xfe\xa2\xa0\x4f\x23\xe8\x5d\xaf\xef\x57\xea\x7d\x18\x6e\
\x22\x0c\x34\x2e\x55\x73\x3d\xc5\x8f\x9b\x6a\x54\x71\xe1\xd1\xc1\
\x8f\x0a\xa0\xff\x00\x56\xb5\xc0\x51\xa8\xd2\x82\x2c\xc4\x9d\x95\
\x40\xfc\x20\x54\x85\x5b\xfa\x45\x08\xa9\xbd\x20\x56\xf7\xfa\xd4\
\x1f\x7a\x8b\xd7\x6a\x6a\x4e\x3b\xee\x3e\x95\xd6\x3d\x45\x4d\xab\
\xaa\x48\x1b\xdb\x63\x5c\x74\xa9\xde\xba\xc7\xb1\xa9\x20\x94\x48\
\x8c\xb2\xb6\x54\x1c\xfa\xd5\x29\xf8\x89\x91\xbc\x28\x07\x86\x3e\
\x16\x27\x5b\xf7\xae\xe3\xce\xde\x2c\x71\x0d\x15\x50\x1b\x77\x34\
\xee\x19\x86\x8e\x2c\x32\xe2\x24\x50\x59\x86\x60\x4f\xc0\xbd\x7d\
\xeb\x3b\x6d\xc8\x73\xad\x3b\x04\xb0\x9c\x42\xe2\x66\x5f\x0f\x38\
\xcc\xaa\xdb\x5e\xad\xcb\x7c\xc6\xfb\xd2\xf0\x91\xc6\xf0\x09\x64\
\x19\x9f\x31\x39\x49\xd1\x6f\xa8\xa6\x4a\xda\x92\x6b\x70\x7d\x2f\
\xe1\x14\x24\x54\xee\x2f\xb0\xae\x53\x7d\x41\x20\x76\xa5\x04\x8e\
\xd4\x07\xc4\xbe\xc2\xdd\xa8\xc9\x39\xf2\xef\x50\xc6\xc4\x0e\xb4\
\x20\x9a\x13\x46\x68\x1b\x40\x4f\x4a\x10\x4e\xa6\xf5\x53\x8a\x13\
\x26\x24\x22\x7c\x20\x00\x07\x33\x57\xa1\x1e\x23\x6a\x3c\xab\xa9\
\x17\xde\xa8\x46\xc1\xb1\x99\xdb\x7b\x97\xfa\x6b\x59\xe4\xd4\x2a\
\x00\x7c\x67\x48\x99\x57\xc2\xb6\x79\x48\xbe\xbd\x05\x0e\x23\x06\
\x26\x97\xc4\x8f\x16\xc5\xfa\xca\x2c\x4f\xcc\x69\x4f\x2b\xe1\x40\
\xa9\x6b\x34\x87\xc4\x93\xdc\xf2\xa8\x55\xbe\xf5\x9c\xeb\xb4\x98\
\xe3\x9f\xc1\x3f\x68\x42\x19\x3d\x2e\x35\x0c\x2a\xc7\x0b\x4f\xf3\
\x24\x3f\x08\xb0\x3d\xea\xae\xe0\xe5\x1b\x75\x35\xa3\xc3\x93\xfe\
\x04\x58\x80\x59\x89\x60\xcc\x05\xad\xa5\x6a\x0a\x8c\x6e\x21\x20\
\x11\x19\x95\x9f\xc4\x04\xe6\xcd\xa8\xd6\xd4\x50\xaa\x4e\xb9\xb0\
\xce\x24\xed\xb3\x0f\x95\x27\x8b\x64\x64\x80\x85\x0f\x95\x4f\x99\
\x9b\x2a\x03\x7f\xce\xa9\x78\xe1\x08\xfb\xc7\x62\x0e\x9e\x19\xc8\
\xa3\xdb\xad\x57\x96\x5e\xd6\x74\x6f\x13\x90\x8c\x68\x31\xbd\x8a\
\x28\x17\x1d\x45\x36\x18\x04\x98\x70\xf8\x89\x24\xcd\x26\xa3\x5d\
\x87\xb5\x57\x82\x07\xc6\x67\x95\x0a\xa9\x07\xcc\x09\xb7\xce\xb4\
\x25\x3e\x7d\x35\xb0\x00\x77\xa2\x77\xda\x56\xc0\xe1\xcc\x12\x34\
\x99\xd7\x2e\x56\x02\xc7\x52\x4d\x34\x0f\x95\x48\x50\xa2\xc3\xf3\
\xa1\x93\x53\x6e\x94\xe6\x21\x8b\x16\xd3\x97\x3a\x62\x85\x08\x5e\
\x46\xca\x8b\xbb\x54\x44\xaa\x22\xcf\x21\xca\x8a\x35\x35\x43\x15\
\x34\xb8\xe9\x84\x50\xa1\xc8\x0d\x95\x47\xea\x69\xb7\x10\x71\xd8\
\x97\xc5\x4a\x23\x8d\x48\x8c\x1b\x22\x0e\x7d\xeb\x43\x87\xe1\x57\
\x06\xa1\x9e\xc6\x73\xff\x00\xd3\xfb\xd1\x61\x30\xf1\xe0\xd0\x65\
\x0a\xf3\x73\x7f\xc3\xd8\x53\x54\x5c\xdc\xd3\x38\xfd\xa3\x5c\x54\
\xb0\xef\xde\xa7\x0c\x72\xb9\x16\xbe\x61\x6a\x34\x5a\x60\x5c\xdf\
\xcd\x68\x6b\xa2\x51\x1d\xfc\x24\x09\x7d\xed\xbd\x0e\x25\x0a\xa3\
\x4e\xac\x55\x97\x9f\x5e\xd4\xe8\xc1\x2a\x6f\xb8\x36\x35\x18\x98\
\xcc\xb6\x86\xf6\x55\x37\x76\xfd\xaa\xc4\x4e\x0f\x16\x24\x60\xae\
\xb6\x63\xa0\x75\xda\x9b\x8a\x82\x2c\x52\xda\x51\x67\xd6\xce\x34\
\xd7\xbd\x28\x60\xa3\x59\x96\x45\x9a\xca\x1a\xe4\x15\xd4\xd5\x96\
\x39\x98\x9b\x6e\x6f\x54\xdc\xed\x7f\xe3\x28\x24\xfc\x36\x76\x95\
\x80\x20\x21\x3d\x43\x0a\x87\x03\x11\x11\x64\x24\xa4\xea\x4a\xf6\
\x3d\x3d\xeb\x47\x89\x46\xb3\x70\xf3\x03\xb1\x1e\x23\x59\x48\xe5\
\xfd\xab\x0b\x86\xca\xd8\x4c\x63\x61\xa5\xd1\x59\xac\x6f\xf0\x91\
\xb1\xac\x5e\xae\x19\xda\xc7\x09\x55\x9e\x73\x1c\xce\x49\x78\xd4\
\xa9\x36\xb1\xb0\x1e\x5b\xf5\xb8\x3f\xec\x56\x8a\x07\x13\x08\x94\
\x05\x7c\xb7\xb7\xe0\x15\x93\x82\x85\x5e\x18\x62\x11\x96\x77\xb3\
\x02\xbb\x8d\x6b\x68\x44\x90\xca\xcc\xac\x59\x88\xb1\xd0\x00\x4f\
\x33\x4f\x1f\x15\x23\x11\x83\x52\xb7\x80\x5c\x8d\xc1\x1a\x9e\xe2\
\xaa\x9f\x14\xb0\x46\x2e\xdc\xac\x4e\xbf\x4a\xd1\x04\x8a\x89\x50\
\x4c\xca\xf9\x8a\x48\x9b\x35\xaf\x7a\x6c\x12\xa9\x34\x6d\x1d\xbc\
\x45\x2b\x7f\xc5\xa5\x68\x70\xe9\x56\x4c\x3a\x44\x74\x74\x04\x2d\
\xf9\x8a\x5e\x26\x19\xe6\x20\x2f\x85\x65\xd7\x43\x6b\x9e\xba\xd2\
\xf0\x0a\xc9\x8f\x8d\x58\x10\x43\x73\xa6\x75\x53\x46\x1f\xf3\x54\
\x9d\x81\xbf\xd2\xb1\x61\x19\xf1\xe0\xa9\xd1\xa5\xba\xdf\x90\xbd\
\x6c\xc2\x7e\xf5\x7d\xed\x58\xa2\xe9\x8e\x00\x5b\xcb\x26\x97\x36\
\xb6\xb5\x72\xf8\xa3\x66\x7d\x66\x6f\xf5\x1a\x85\x15\x33\x5f\xc5\
\x7d\x47\xa8\xf2\xef\x43\xaf\x61\xf9\xd6\x80\xd4\x29\x2c\x58\x79\
\x46\xa7\xe4\x2b\x29\xb8\x96\x20\x4c\xe2\x30\x42\x83\xa0\xab\x9c\
\x4d\xcc\x7c\x38\xda\xf7\x76\x00\xd6\x41\x3a\xdf\xeb\x58\xe5\x73\
\xc6\xa4\x5f\x87\x88\x2c\x8d\x7c\x4c\x36\xe4\x0a\xff\x00\x15\x2f\
\xc4\xa0\x53\x95\x61\x76\x3f\xd4\x6c\x2b\x38\x11\xf8\x87\xd6\xb8\
\xe6\x3b\x58\x0e\x57\xa3\xf5\x4e\x43\xf1\x38\xcc\x44\xd7\x19\xb2\
\x2f\x45\xd2\x82\x1c\x6e\x2a\x11\x65\x94\x95\xfc\x2d\xa8\xa0\xe5\
\xb8\xbf\xb5\x5f\xc0\xe1\xa2\x38\x41\x23\xc6\x24\x67\x26\xdd\xa8\
\x9b\x6a\xb9\x0b\x18\xcc\x36\x20\x85\x99\x7c\x37\xfc\x43\x6a\x39\
\x30\x6e\x14\x34\x6c\x24\x07\xf0\x9d\x68\xce\x03\x0d\x20\xb9\x56\
\x8c\xff\x00\x4e\xb4\xb5\xc0\xe2\xf0\xe7\x3e\x12\x51\x27\x60\x6c\
\x7e\x9c\xeb\x5d\xfd\x1d\x2b\x4f\x01\x2d\x9d\x33\x2c\xab\xea\x1b\
\x67\x14\xa5\x4c\xc7\x32\x03\x6b\xfd\x3d\xea\xf2\xe3\xca\x9c\x98\
\xbc\x31\xb8\x3b\x81\x63\x53\x2b\x60\xe6\x91\x1e\x19\x32\x3b\x1c\
\xad\x71\x6c\xda\x6d\x46\x45\xb5\x5d\x52\xdc\xc9\xf7\x34\x40\x5a\
\x9c\xf8\x59\x46\xa9\x67\x07\x9a\x9a\x53\xab\x21\xb3\x29\x1e\xf5\
\x62\x45\x75\x45\xeb\xaa\x4e\x22\xe6\xa4\x57\x0a\x9a\x92\x40\xae\
\xae\xae\xa5\x3a\xdd\xab\xad\xad\x4d\x75\xaa\xc4\xa7\xc7\x63\xce\
\xb1\x4e\xa2\xc4\xae\x46\x37\xe6\x36\xd3\xfd\xef\x4d\xc1\xb1\x97\
\x0e\x80\x90\x52\x15\xd1\x6d\xcc\x31\x02\xff\x00\x91\xa7\x49\x1f\
\x8f\x87\x93\x0f\xcd\x85\xd7\xdc\x55\x6e\x1e\xcc\x85\xe0\x73\x6c\
\xe8\x96\x1c\xb3\x6a\x7e\x5a\x00\x2b\x39\xda\xf8\xbd\xc3\xc9\x2b\
\x2a\x12\x2f\xa3\x0b\xf3\xa2\x90\x5c\x58\xfe\x54\x8e\x1c\xc0\x63\
\x2c\x7e\x20\x54\x55\x87\xb1\xad\x4f\x05\xf5\x5d\x94\x96\xcb\x5c\
\x17\x21\x1a\xe8\x69\x84\x73\x1b\xfe\xb5\x04\xa8\x4c\xee\xe1\x54\
\x73\x35\x12\xda\xec\xd7\x5e\x5b\x9a\x9c\xba\xdc\xb5\xcd\x4c\x6f\
\x0c\xa4\x84\x97\x6e\x45\x6d\x45\x94\x13\x65\x74\x24\x6e\x2f\x6f\
\xd6\xa4\x59\x15\xd9\x46\x52\x58\x85\x51\xb9\x3b\x53\x0c\x4d\xb8\
\x1a\x75\xbe\x9f\x5a\x8b\x2b\x1b\xe8\xc2\x2d\x06\xb7\x05\x8d\x58\
\x95\xe4\x56\x4c\x24\x8c\x8c\xb6\x60\x00\x21\xc0\x1a\xf3\xfc\xaa\
\x9c\x30\x9c\x99\x94\x11\x9b\x43\x2b\x68\x00\xe7\x6e\xb5\x7f\x1f\
\xe4\x81\x5d\x15\x43\x33\x58\xf9\x46\xba\x55\x2b\x97\x62\x59\x8b\
\x1e\x75\x8e\x53\xb3\x13\x29\x0f\x29\x61\xb6\xc2\x84\x81\x6d\x6a\
\x76\xae\x35\x92\x18\x90\x34\x81\x54\x1b\x13\xa8\xbe\xf4\xec\x04\
\x0a\xd2\x33\x38\xba\xc5\xa1\x1d\x5b\xff\x00\x34\xdc\x36\x1c\xa6\
\x11\xe7\x26\xcd\x94\x95\x16\xe5\x56\x32\x2c\x68\xb1\xa0\xd2\xc0\
\x93\xd4\x91\x5b\x9c\x46\x93\xc4\xd0\xe2\x38\x79\x03\x78\x9b\x30\
\x00\x72\xac\x60\x3a\x0a\xf4\x11\xb1\x57\xb8\x17\xb6\xe3\xf6\x35\
\x9b\xc5\xb0\x9e\x0c\xbe\x2a\x29\x31\xbe\xa0\x9e\x47\xa5\x1c\xf8\
\xfd\x52\x87\x82\x39\x18\xf5\x8c\x1b\x2c\xbe\x53\xde\xae\x85\x02\
\x52\x3a\x0e\x75\x9b\xc3\xe3\x69\x31\xf1\x22\x9b\x12\xe3\x5a\xd8\
\x97\x2b\x48\xc5\x45\x81\x26\xd5\x70\x9d\x2a\x55\xaa\x5a\x35\xf0\
\xcc\x8d\xe5\x55\xdc\xd1\x48\xd1\x41\x18\x92\x72\x40\x3e\x95\x1b\
\xb5\x51\x69\x26\xe2\x38\x85\x88\x2d\x90\x1d\x15\x74\x0a\x29\xb4\
\x47\x4c\xd3\xf1\x0c\x42\xc3\x12\x15\x8c\x6c\x39\x7b\x9a\xd0\x82\
\x38\xf0\xb0\xf8\x50\x9d\x4f\xad\xc6\xed\xfd\xaa\x62\x54\x81\x0c\
\x30\x8b\x20\xe7\xcd\xbb\x9a\x9b\x53\x26\x76\x82\x06\xb4\xd8\xd7\
\x95\x42\x8b\x53\x10\x56\x85\x12\x8d\x6d\x4c\x41\x50\x83\x4a\x35\
\x14\xc0\x9b\x84\x90\x30\x17\xd2\xe4\x75\xa3\x28\x42\x05\x3a\x93\
\xe6\x63\xd4\x9d\x6b\x94\x03\xa3\x6c\x41\x14\x69\x73\x18\xcf\xeb\
\x5f\x2b\x58\x74\xd8\xfd\x2b\x51\x14\x56\xc0\x9b\x6c\x2a\x02\xe5\
\x50\x29\xd7\x5b\xdb\xf2\xb5\x00\x07\x2e\xa3\x51\x46\x25\x3e\x28\
\x40\x78\xd3\xa2\xdc\xfc\xeb\x23\x8f\xc7\x71\x16\x25\x45\x8b\x79\
\x58\xf7\x15\xaf\xc5\x74\xc4\x8f\xf4\x0f\xd2\xb3\xb8\xbe\x5f\xfd\
\x28\xe6\xdf\xc4\x19\x3d\xf9\xfe\x55\xcf\x9c\xf5\xae\x2d\x3c\x1c\
\x0b\x86\xc3\xa1\x0b\x69\x1d\x05\xcf\xe1\x16\xda\xb8\xdc\x9a\x7e\
\x28\x7d\xe6\x51\xb0\x00\x7d\x00\x14\x83\xa5\x69\x97\x0a\x25\x34\
\x35\x2b\x52\x18\xda\x89\x81\x91\x40\x06\xce\xba\xa3\x5f\x63\x42\
\xb4\x40\xeb\x4c\x43\x0d\x9d\x44\x83\x42\x77\xec\x79\xd6\x6f\x1b\
\x8d\x93\x18\x66\x1b\x4b\xe6\x06\xb4\x09\xca\xcc\xe1\x4b\x2b\x6a\
\xe0\x6e\x0f\x5a\x99\x16\x39\x22\xc9\x20\x12\x46\x76\x20\xed\xed\
\x55\x9b\x0c\xea\xab\x61\xb1\xf0\xc8\x80\x62\x2e\x8e\x05\xb3\x01\
\x7b\xd3\xda\x6c\x28\x5c\xc7\x13\x19\x03\xa6\xff\x00\x4a\xac\xfc\
\x32\x26\x6b\xc7\x88\xca\x3a\x32\x9b\xfe\x54\xa9\xb8\x64\xf1\xdc\
\x96\x8f\x2f\x22\x5a\xd7\xa3\x79\x45\xd0\x78\xa6\x29\x67\xcb\x1c\
\x57\xc8\x97\xd4\xf3\xaa\x95\x63\xec\x73\x88\x4c\x9e\x04\xae\xa3\
\xf0\x2e\x9f\x5a\x42\x14\x75\x25\x43\x02\xbe\xa5\x61\xa8\xac\x5d\
\xfa\xd4\xc7\x0b\xd7\x58\xd4\x8a\x90\x3c\xb7\xa0\x86\xc6\xb5\x78\
\x6d\x9f\x87\x2d\xbe\x06\x20\xfc\xeb\x2f\xe5\x57\xf8\x1c\x83\xc4\
\x78\x0f\xfc\xc0\x32\xfb\xd6\xb8\xfa\x2f\x8b\x56\xae\xb5\x19\x17\
\x14\x24\x6b\x5d\x18\x43\xda\x45\xc9\x2a\x89\x01\xd2\xcc\x2f\x55\
\x8e\x0b\x0e\xf2\xfd\xde\x68\xc2\x12\x49\xbd\xc5\xc8\xb5\xaa\xd1\
\x16\x89\x8e\x6c\xbb\x00\x7d\xcd\xaa\x5d\x42\x9c\x8a\x3c\xab\xa0\
\xa3\x16\xb3\x1f\x0b\x8b\xc1\xfd\xec\x12\xe6\x41\xbd\x8e\xde\xe2\
\xad\x60\x31\x23\x15\xf7\x4e\xa0\x48\x06\x9a\x68\xd5\x65\x09\x56\
\xb8\xff\x00\xcd\x51\xe2\x78\x51\x1f\xfc\x4c\x07\x28\xbf\x99\x7f\
\x09\xa3\x33\xc3\xba\xb0\xf8\x58\xe4\xbe\x51\xe1\xb7\x3e\x9f\xda\
\xa9\xcc\x8b\x13\x65\x92\x44\x53\xca\xe7\x7a\xb5\x83\x91\xf1\x8e\
\xc9\x2c\x8c\x72\x0c\xc5\x41\xb6\x70\x76\x3f\xcf\xb5\x49\xc2\xc6\
\xd8\x93\x11\x04\xc2\x10\x31\x07\x52\x2f\xd2\xac\xdf\x12\x93\x29\
\x53\x63\xf9\x57\x0a\xb1\x8d\x84\xc5\x2f\x55\x61\xe5\xb7\x4a\x4f\
\xb5\x18\x5c\x05\x75\xaa\x46\x82\xa0\x1b\xd2\x93\x50\x6a\x6b\x8e\
\xa2\xa4\x10\x4a\xb0\x61\xb8\x3a\x55\x3e\x34\x1a\x2c\x44\x8e\x9e\
\x51\x94\x49\x19\x07\xa6\x9f\xb8\xab\x6d\x53\x2c\x51\x62\x03\x44\
\xe3\xcc\x51\x42\xb5\xbd\x37\xfe\xe0\x56\x6c\xde\x90\x38\x64\x8b\
\x89\x9a\x29\xd2\xd9\x81\xb4\x8a\x39\x1e\xb5\x64\xd7\x9e\xc1\xb4\
\xf8\x4e\x26\xa2\x13\x66\x12\x65\x3a\xdb\x4a\xf4\x12\x32\xb4\xbe\
\x22\xb7\x92\x41\x99\x7d\xaa\xe1\x76\x1a\x9b\xd2\x38\x92\x5f\x07\
\x98\x35\xad\x25\xed\xd7\x4a\x2b\xf9\xf3\x0d\x40\xa6\xe4\xf1\x3c\
\x10\xc3\xca\x25\x24\xdc\x5e\xfa\x53\xe8\x27\x0c\x7c\x18\x11\x14\
\x10\xc4\x5d\x88\xef\x4c\x2c\x58\x11\x20\x0e\x3f\xa8\x5e\x80\x7f\
\x9b\xf3\xe7\x4c\x37\xbd\x51\x06\xd1\x65\xcb\xe0\x47\x6e\x9a\xff\
\x00\x35\x20\x8b\x05\x00\x28\x1b\x28\xda\xa6\xc6\xa4\x0b\xd2\x89\
\xe2\x5a\x60\xd0\x85\xbd\xa4\x3f\xa5\x66\xc6\x72\xb5\xcf\x3d\xeb\
\x57\x1e\x3f\xe1\x93\xff\x00\x90\xfe\x95\x9c\xa8\x19\xcf\x6a\xc7\
\x2f\x4c\x41\xd7\x50\x34\xef\x45\x86\x88\xcb\x88\x58\xee\x06\x63\
\xc8\x5e\x8a\xd5\x6b\x85\xc6\x73\x3b\x80\x6e\x05\x87\xce\x89\x36\
\x9a\x9e\x20\xfe\x1e\x1f\x22\x9d\x24\xd0\x0b\x6b\x94\x54\x60\x09\
\x93\x0c\x55\x8d\xcc\x67\x4b\xf4\xa8\xe2\x65\x4f\x86\x80\xdd\x94\
\x1b\xd8\xde\xd5\x30\xc5\x2e\x12\xd2\xba\x82\x8e\x2c\x40\x35\xaf\
\xac\xfc\x37\x2d\x2f\x89\x18\xd3\x07\xe0\xc8\xda\xca\xea\x6d\xd0\
\x75\xab\x12\xbc\x50\xe1\x8e\x20\xd9\xd7\xe1\x1d\x4d\x63\x62\xa5\
\x79\xe6\x32\x48\x75\x3f\x95\x36\xa8\x4c\x38\xf9\x30\x9c\x5c\x09\
\x11\x04\x71\xb7\xa4\x0d\xbb\xd6\x8c\xdc\x45\x50\x0c\xb8\x54\x37\
\x17\x0d\x98\x91\xee\x2b\x33\x8a\xa7\x89\x83\x49\x88\xbb\x23\x65\
\x27\xb7\x2a\x77\x0e\xe1\xdc\x56\xc1\x51\x13\xc3\xb5\xfe\xf0\xf9\
\x47\xf7\xae\x72\xf2\x97\x23\x5d\x02\x77\x9b\x15\x88\xcc\xe7\x33\
\xb6\x80\x01\xfa\x56\xbe\x12\x0f\xb2\x61\x7c\x2f\xf9\x8f\xac\x9d\
\xbb\x51\xe0\xe0\x4c\x39\x20\x46\x04\xfb\xdc\xdf\x6f\xe9\xa2\xb5\
\xeb\x7c\x78\xe7\x75\x9b\x41\x6b\x58\xd1\x01\x53\x96\xed\x6f\x99\
\xa3\x55\xad\x0d\x42\xad\x35\x56\xb9\x16\x98\x05\x41\xca\x28\xd4\
\x6b\x50\xbb\xda\x88\x9b\x68\xba\xb7\xb6\xd5\xa4\x24\x5a\x6a\x8f\
\x30\xe8\xfe\x53\xd8\x8d\x8f\xed\x49\x89\xb3\x1f\x31\xdb\x96\xd4\
\xd8\x8f\xde\x2a\x83\xe5\xcc\x48\xee\x40\xdb\xf3\xad\x44\xe3\x7b\
\x6e\x68\x5a\x9a\x46\x9a\xd0\x30\xa9\x33\x78\xb7\xff\x00\xaa\x1f\
\xe8\x5f\xd2\xb2\xb8\xf3\x11\x85\x82\x3b\x68\xc4\xb9\xf7\xda\xb6\
\x78\xba\x7d\xea\x3d\xfd\x4b\xfa\x69\x59\x3c\x69\x03\x3e\x0d\x0e\
\xcd\x70\x7f\xee\xae\x5c\xe7\x55\xae\x2d\xc9\xbc\xc1\x5f\x6c\xea\
\x1b\xda\x90\xc0\xde\x8b\x87\x16\x9f\x85\xc0\x43\x67\x65\x05\x0d\
\xbb\x7f\xe6\x89\x81\x07\x51\xaf\xb5\x6a\xff\x00\x59\x2c\x2f\x5a\
\x21\x5d\x51\x7e\xd4\x21\x0f\x7a\x9a\x10\x6a\x41\x35\x21\x02\x41\
\x04\x6e\x29\x44\x4d\x16\x2b\x2c\x00\x15\x94\x66\x00\x8b\x81\x4c\
\x15\x32\x5c\x46\xd2\xa5\xc3\xc6\xbe\xe0\x8e\x96\xa5\x03\x10\xf8\
\xc8\xe2\x2d\x9a\x35\x00\xea\x63\xb5\x1e\x19\x32\xc2\xae\x7c\xce\
\xe2\xe4\xb6\xb6\xaa\x32\x4b\x88\x9d\x6e\x73\xba\x83\xf0\x8d\x05\
\x5d\x49\x84\xa1\x72\x02\x0a\xd8\x15\x3b\xe9\x54\xa4\xd0\xd2\x66\
\xcd\x98\xdf\xad\xea\xbc\xb0\x26\x23\x89\x4a\x19\x72\xda\xf7\x75\
\x1a\xfc\xea\xc6\x66\x12\x00\xaa\x73\x7c\x2b\xd7\xdf\xb5\x74\x71\
\xac\x20\x85\x39\x8b\x1f\x33\x1e\x67\xb7\x6a\x7d\x0c\x9c\x66\x19\
\xf0\xf2\xe5\x3a\xa9\xd5\x58\x6c\x69\x56\x35\xbb\x3c\x5e\x3e\x19\
\xa2\xe7\xba\x7b\xd6\x31\x5b\x12\x08\x37\x15\x8e\x5c\x71\xa9\x41\
\x6d\x2d\x56\x38\x40\xff\x00\xf2\x50\xff\x00\xaa\x92\x6c\x37\x20\
\x7b\xd5\x8e\x12\x3f\xfc\x8c\x27\xfa\xaa\x9e\xa6\x89\x02\xda\x52\
\xcd\x31\x98\x6c\x01\x27\xda\x81\xae\x01\xd0\x56\xd9\x71\x00\xca\
\xa9\x73\x64\x5f\x10\x81\xcc\xec\x2a\x05\xed\x6d\xed\xce\xa1\x81\
\x2a\xac\x1a\xef\x6b\x65\x3b\x30\xe9\xd8\xd3\x22\x0a\x45\x95\x80\
\x23\x70\xda\x11\x52\x0d\x8d\x12\x73\xd0\x32\x9d\x18\x72\x34\xc5\
\x8c\xf2\x19\xbd\x8d\xe9\x25\x6c\xe5\x46\x52\x06\xec\x4e\x9e\xda\
\x6e\x6a\x4c\xfe\x2a\x92\x60\x18\x62\xa0\x3e\x54\x6b\x2e\xbb\x83\
\xc8\xfc\xe9\xdc\x1b\x1d\x1e\x36\x49\x73\x30\x49\x99\x75\x52\x74\
\x36\x3c\x8d\x59\x9a\x05\xc4\xc2\xd8\x76\x94\x80\xe2\xd6\x0a\x00\
\xbf\x5a\xf3\x85\x25\xc0\x63\xd9\x1d\x7c\xcb\x70\xc3\x91\x15\x8b\
\x6f\x1b\xbf\x0c\xed\xe9\xa7\x89\x9e\x06\x46\x1c\xae\xa6\xb3\x2d\
\x47\x82\xc5\xbc\x79\x64\x8d\xd8\xc4\xc7\x55\x34\xdc\x5a\x05\x9c\
\xd8\x68\xda\x8a\xd6\xea\x22\xc6\xbb\x2d\x34\x2d\x71\x5a\x16\xaa\
\xe3\xe6\x92\x08\xd0\x44\x72\x97\xb9\x27\xb7\x4a\x94\xc5\x61\xde\
\x30\xce\xf9\x5a\xde\x60\x16\xfa\xd3\xa7\x8e\x39\x23\xfb\xc4\x24\
\xa0\xd0\x83\x6a\x4b\xe0\x62\x71\xe4\x63\x1b\x7f\x56\xa2\x8e\xf7\
\xa3\xd3\x96\x48\x24\x6c\xb1\xcb\x73\xfd\x42\xd4\x4a\xd9\xe5\x31\
\x23\x03\x95\x2c\x4f\x72\x74\xb5\x56\x7e\x1f\x38\x04\xfd\xd3\x76\
\x56\xd7\xe9\x4e\xc0\x14\x83\x06\xce\x48\x56\x8e\xec\xc0\xef\xdb\
\x4a\x3b\xde\xd3\x3b\x10\x56\x4f\xf1\x31\xc8\x43\x5e\x4f\xae\x95\
\xb2\x61\x11\xc3\x1c\x6d\xea\x44\x00\xf6\x35\x99\xfe\x1e\x84\xcb\
\xc6\x17\x12\xea\x08\x4d\x4d\xf9\x9a\xd6\x93\x31\xb9\x2a\x6e\x68\
\xe1\xe6\xaa\x51\xb2\xad\x42\xc8\x4a\x94\x07\x29\x24\x10\x7a\x11\
\xb5\x11\x40\x39\x6b\xd6\x81\xd4\x12\x2c\x35\xad\x23\x34\x20\x48\
\xab\x6c\xdb\x8e\x87\x98\xa1\x66\xb1\xb5\xa8\x92\xd7\x24\x79\x49\
\xdf\x4b\x86\xf7\x1f\xb8\xa9\xb5\xcd\xad\x66\x1a\xe5\xbe\xfd\xc7\
\x5a\x52\x28\xd4\x50\xae\xf4\x6b\x50\xa0\xc7\x2e\x6c\x18\x23\xe1\
\x7d\x7e\x75\x9c\x10\x2e\xa0\xed\xbd\x69\xe2\x41\x6c\x1c\x80\x6e\
\x2c\x4f\xb5\x51\x50\x2d\x59\xbe\x99\x42\x12\xe6\xdb\x73\x27\xa5\
\x5c\xc0\x25\xf0\xcc\xc4\xb2\x29\x60\x02\x8d\x0b\x0b\x1d\x4f\x4a\
\x42\x80\x14\x2b\x5a\xce\x72\xfc\x86\xa7\xf2\x1f\x9d\x5c\xc0\xdd\
\xa0\x91\x8e\xe6\x4b\xfe\x55\x48\xa8\xac\xa5\x72\x18\xd3\x2f\xe1\
\xcb\x50\xd8\x44\x98\x04\xf1\x64\x50\x3d\x2a\x75\x02\x99\x61\x4c\
\x87\xd6\x47\x55\x20\x7d\x0d\x69\x96\x0e\x35\xc3\x48\x51\x49\xca\
\xa4\xdb\xa0\x1b\x0f\xd2\xff\x00\x3a\xb1\xc3\xb8\x78\x78\x84\xd8\
\x82\xc1\x0f\xa5\x46\xed\x54\x9c\x10\xc5\x48\xd4\x1b\x1a\xdf\x6d\
\x42\x64\xb6\x4c\x82\xc4\x7b\x56\x38\xf7\x75\xaa\x52\x41\x86\x8d\
\x72\x24\x00\x5c\xdf\x31\x39\x88\x34\xb4\x79\x12\x5f\xb3\xce\xd7\
\x0d\xe8\x6e\x5f\xf8\xa7\xb0\x23\x98\x3f\x95\x2d\xf2\x99\x21\x72\
\x40\xc9\x25\x89\x27\x60\x7f\xbd\x6c\x39\x94\x98\xd8\x0d\x59\x7c\
\xcb\xae\xc4\x57\x40\xc9\x32\xe7\x42\x01\xf8\x94\x9d\xa8\xe2\x17\
\x97\x2f\x33\x70\x79\xf6\xac\xf8\x96\x21\x23\xac\x8c\xde\x51\xe5\
\xcb\x45\xaa\x34\x02\x1b\x92\x74\x27\xb7\x2a\x25\x5b\x54\x61\x03\
\x8c\x22\x67\x37\x27\x51\x7e\x42\x9a\x10\x9f\xee\x6d\x5a\xc0\x85\
\x14\x5c\xab\x88\x23\x71\xa7\x23\xd6\xa4\x52\x9c\xbe\xaa\x38\xf4\
\x5b\x03\xcc\xdf\xeb\x41\x6d\x6b\xae\x43\x79\x4d\x89\x3a\x8a\x90\
\xa6\xb6\x9d\x4d\x1c\x79\x64\x87\x2b\x72\xe6\x39\x1e\xb5\x20\x8e\
\x82\xb8\x1c\xa4\xe5\x1a\x1d\xc5\x69\x25\x24\x61\x27\x85\x3d\xae\
\x7d\x2f\xc9\xa8\xd9\x4d\xed\x6f\x95\x2d\x99\x5d\x32\xb4\x64\xaf\
\x7d\xc5\x04\x73\x98\xdf\xc2\x98\x92\xa7\xd0\xc7\x97\x6a\x90\x78\
\x9a\x5f\x0c\xad\x6f\x4b\x6b\x58\xfc\x60\x5b\x09\x0c\xd9\x6f\xe1\
\x4b\xa9\xe8\x37\xfd\x6b\x73\x1a\xc8\x20\x78\xdd\xd4\x31\x17\x00\
\x9e\x95\x95\x8c\x48\xdf\x01\x34\x66\x45\x37\x00\x8b\x1e\x63\x6f\
\xd6\xb1\xce\x18\xff\xd9\
"
qt_resource_name = b"\
\x00\x06\
\x07\x03\x7d\xc3\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
\x00\x0b\
\x08\x52\xaa\xc7\
\x00\x66\
\x00\x69\x00\x67\x00\x75\x00\x72\x00\x65\x00\x38\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0a\
\x0b\x53\x47\xc7\
\x00\x72\
\x00\x61\x00\x6e\x00\x64\x00\x6f\x00\x6d\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0b\
\x0a\x12\x5e\xc7\
\x00\x6b\
\x00\x69\x00\x6e\x00\x65\x00\x74\x00\x69\x00\x63\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x00\x28\x58\x27\
\x00\x74\
\x00\x69\x00\x6c\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0b\
\x07\x50\x31\x47\
\x00\x65\
\x00\x6c\x00\x6c\x00\x69\x00\x70\x00\x73\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0c\
\x05\x8f\xe2\xc7\
\x00\x63\
\x00\x65\x00\x6e\x00\x74\x00\x65\x00\x72\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x14\
\x00\x22\x00\x47\
\x00\x54\
\x00\x69\x00\x6d\x00\x65\x00\x2d\x00\x46\x00\x6f\x00\x72\x00\x2d\x00\x4c\x00\x75\x00\x6e\x00\x63\x00\x68\x00\x2d\x00\x32\x00\x2e\
\x00\x6a\x00\x70\x00\x67\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x07\x00\x00\x00\x02\
\x00\x00\x00\xb4\x00\x00\x00\x00\x00\x01\x00\x00\xf9\x47\
\x00\x00\x00\x64\x00\x00\x00\x00\x00\x01\x00\x00\x8b\xdf\
\x00\x00\x00\x96\x00\x00\x00\x00\x00\x01\x00\x00\xf5\xc7\
\x00\x00\x00\x7a\x00\x00\x00\x00\x00\x01\x00\x00\xcb\xb4\
\x00\x00\x00\x12\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x48\x00\x00\x00\x00\x00\x01\x00\x00\x71\x63\
\x00\x00\x00\x2e\x00\x00\x00\x00\x00\x01\x00\x00\x36\xe6\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
baoboa/pyqt5
|
examples/animation/animatedtiles/animatedtiles_rc.py
|
Python
|
gpl-3.0
| 399,281
|
# Authors:
# Rob Crittenden <rcritten@redhat.com>
#
# Copyright (C) 2010 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Test group nesting and indirect members
"""
from ipatests.test_xmlrpc.xmlrpc_test import XMLRPC_test
from ipatests.test_xmlrpc.tracker.user_plugin import UserTracker
from ipatests.test_xmlrpc.tracker.group_plugin import GroupTracker
from ipatests.test_xmlrpc.tracker.host_plugin import HostTracker
from ipatests.test_xmlrpc.tracker.hostgroup_plugin import HostGroupTracker
import pytest
@pytest.fixture(scope='class')
def user1(request, xmlrpc_setup):
tracker = UserTracker(name=u'tuser1', givenname=u'Test1', sn=u'User1')
return tracker.make_fixture(request)
@pytest.fixture(scope='class')
def user2(request, xmlrpc_setup):
tracker = UserTracker(name=u'tuser2', givenname=u'Test2', sn=u'User2')
return tracker.make_fixture(request)
@pytest.fixture(scope='class')
def user3(request, xmlrpc_setup):
tracker = UserTracker(name=u'tuser3', givenname=u'Test3', sn=u'User3')
return tracker.make_fixture(request)
@pytest.fixture(scope='class')
def user4(request, xmlrpc_setup):
tracker = UserTracker(name=u'tuser4', givenname=u'Test4', sn=u'User4')
return tracker.make_fixture(request)
@pytest.fixture(scope='class')
def group1(request, xmlrpc_setup):
tracker = GroupTracker(name=u'testgroup1', description=u'Test desc1')
return tracker.make_fixture(request)
@pytest.fixture(scope='class')
def group2(request, xmlrpc_setup):
tracker = GroupTracker(name=u'testgroup2', description=u'Test desc2')
return tracker.make_fixture(request)
@pytest.fixture(scope='class')
def group3(request, xmlrpc_setup):
tracker = GroupTracker(name=u'testgroup3', description=u'Test desc3')
return tracker.make_fixture(request)
@pytest.fixture(scope='class')
def group4(request, xmlrpc_setup):
tracker = GroupTracker(name=u'testgroup4', description=u'Test desc4')
return tracker.make_fixture(request)
@pytest.fixture(scope='class')
def host1(request, xmlrpc_setup):
tracker = HostTracker(name=u'host1')
return tracker.make_fixture(request)
@pytest.fixture(scope='class')
def hostgroup1(request, xmlrpc_setup):
tracker = HostGroupTracker(name=u'hostgroup1')
return tracker.make_fixture(request)
@pytest.fixture(scope='class')
def hostgroup2(request, xmlrpc_setup):
tracker = HostGroupTracker(name=u'hostgroup2')
return tracker.make_fixture(request)
@pytest.mark.tier1
class TestNestingUserGroups(XMLRPC_test):
def test_create_groups_and_users(self, group1, group2, group3, group4,
user1, user2, user3, user4):
""" Create groups and users """
group1.ensure_exists()
group2.ensure_exists()
group3.ensure_exists()
group4.ensure_exists()
user1.ensure_exists()
user2.ensure_exists()
user3.ensure_exists()
user4.ensure_exists()
###############
# member stuff
#
# Create 4 groups and 4 users and set the following membership:
#
# g1:
# no direct memberships
#
# g2:
# memberof: g1
# member: user1, user2
#
# g3:
# memberof: g1
# member: user3, g4
#
# g4:
# memberof: g3
# member: user1, user4
#
# So when we do a show it looks like:
#
# g1:
# member groups: g2, g3
# indirect member group: g4
# indirect member users: user1, user2, tuser3, tuser4
#
# g2:
# member of group: g1
# member users: tuser1, tuser2
#
# g3:
# member users: tuser3
# member groups: g4
# member of groups: g1
# indirect member users: tuser4
#
# g4:
# member users: tuser1, tuser4
# member of groups: g3
# indirect member of groups: g1
#
# Note that tuser1 is an indirect member of g1 both through
# g2 and g4. It should appear just once in the list.
def test_add_group_members_to_groups(self, group1, group2, group3):
""" Add group1 two members: group2 and group3 """
group1.add_member(dict(group=group2.cn))
group2.attrs.update(memberof_group=[group1.cn])
group1.add_member(dict(group=group3.cn))
group3.attrs.update(memberof_group=[group1.cn])
def test_add_user_members_to_groups(self, user1, user2, user3, user4,
group1, group2, group3, group4):
""" Add user1 and user2 to group1, add user3 and group4 to group3,
add user1 and user4 to group4 """
group2.add_member(dict(user=user1.uid))
group2.add_member(dict(user=user2.uid))
group3.add_member(dict(user=user3.uid))
group3.add_member(dict(group=group4.cn))
group4.attrs.update(
memberof_group=[group3.cn],
memberofindirect_group=[group1.cn]
)
group4.add_member(dict(user=user1.uid))
group4.add_member(dict(user=user4.uid))
group1.attrs.update(
memberindirect_user=[user1.uid, user2.uid, user3.uid, user4.uid],
memberindirect_group=[group4.cn]
)
group3.attrs.update(
memberindirect_user=[u'tuser4', u'tuser1']
)
def test_retrieve_group_group(self, group1, group2, group3, group4):
""" Retrieve all test groups (1-4) """
group1.retrieve()
group2.retrieve()
group3.retrieve()
group4.retrieve()
@pytest.mark.tier1
class TestNestingHostGroups(XMLRPC_test):
def test_create_hostgroups(self, host1, hostgroup1, hostgroup2):
""" Create a host and two hostgroups """
host1.ensure_exists()
hostgroup1.ensure_exists()
hostgroup2.ensure_exists()
def test_nest_hostgroups(self, host1, hostgroup1, hostgroup2):
""" Add host1 to hostgroup2, add hostgroup2 to hostgroup1 """
hostgroup2.add_member(dict(host=host1.fqdn))
command = hostgroup1.make_add_member_command(
dict(hostgroup=hostgroup2.cn)
)
hostgroup1.attrs.update(
memberindirect_host=hostgroup2.attrs[u'member_host'],
member_hostgroup=[hostgroup2.cn]
)
result = command()
hostgroup1.check_add_member(result)
host1.attrs.update(
memberof_hostgroup=[hostgroup2.cn],
memberofindirect_hostgroup=[hostgroup1.cn]
)
def test_retrieve_host_hostgroup(self, host1, hostgroup1):
""" Retrieve host1 and hostgroup1 """
hostgroup1.retrieve()
host1.retrieve()
|
encukou/freeipa
|
ipatests/test_xmlrpc/test_nesting.py
|
Python
|
gpl-3.0
| 7,396
|
from rezgui.qt import QtCore, QtGui
from rezgui.dialogs.ProcessDialog import ProcessDialog
from rezgui.objects.App import app
from rezgui.util import get_icon_widget, update_font, add_menu_action
from rez.utils.formatting import readable_time_duration
from functools import partial
import subprocess
import time
class ToolWidget(QtGui.QWidget):
clicked = QtCore.Signal()
def __init__(self, context, tool_name, process_tracker=None, parent=None):
super(ToolWidget, self).__init__(parent)
self.context = context
self.tool_name = tool_name
self.process_tracker = process_tracker
tool_icon = get_icon_widget("spanner")
self.label = QtGui.QLabel(tool_name)
self.instances_label = QtGui.QLabel("")
self.instances_label.setEnabled(False)
update_font(self.instances_label, italic=True)
if self.context:
self.setCursor(QtCore.Qt.PointingHandCursor)
if self.process_tracker:
entries = self.get_processes()
self.set_instance_count(len(entries))
layout = QtGui.QHBoxLayout()
layout.setSpacing(2)
layout.setContentsMargins(2, 2, 2, 2)
layout.addWidget(tool_icon)
layout.addWidget(self.label, 1)
layout.addWidget(self.instances_label)
self.setLayout(layout)
def get_processes(self):
return self.process_tracker.running_instances(self.context, self.tool_name)
def mouseReleaseEvent(self, event):
super(ToolWidget, self).mouseReleaseEvent(event)
if not self.context:
return
menu = QtGui.QMenu(self)
add_menu_action(menu, "Run", self._launch_tool)
fn = partial(self._launch_tool, terminal=True)
add_menu_action(menu, "Run In Terminal", fn)
fn = partial(self._launch_tool, moniter=True)
add_menu_action(menu, "Run And Moniter", fn)
entries = self.get_processes()
if entries:
menu.addSeparator()
add_menu_action(menu, "Running Processes...", self._list_processes)
menu.addSeparator()
add_menu_action(menu, "Cancel")
menu.exec_(self.mapToGlobal(event.pos()))
self.clicked.emit()
def _launch_tool(self, terminal=False, moniter=False):
buf = subprocess.PIPE if moniter else None
proc = app.execute_shell(context=self.context,
command=self.tool_name,
terminal=terminal,
stdout=buf,
stderr=buf)
if self.process_tracker:
self.process_tracker.add_instance(self.context, self.tool_name, proc)
if moniter:
dlg = ProcessDialog(proc, self.tool_name)
dlg.exec_()
def _list_processes(self):
entries = self.get_processes()
now = int(time.time())
items = []
for proc, start_time in entries:
age = now - start_time
items.append((age, proc.pid))
if items:
items = sorted(items)
lines = []
for age, pid in items:
t_str = readable_time_duration(age)
line = "Process #%d has been running for %s" % (pid, t_str)
lines.append(line)
txt = "\n".join(lines)
else:
txt = "There are no running processes."
QtGui.QMessageBox.information(self, "Processes", txt)
def set_instance_count(self, nprocs):
if nprocs:
txt = "%d instances running..." % nprocs
else:
txt = ""
self.instances_label.setText(txt)
|
saddingtonbaynes/rez
|
src/rezgui/widgets/ToolWidget.py
|
Python
|
gpl-3.0
| 3,684
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import portfolio
# NOTE: when using iterated search included, we must include the option
# "plan_counter=PLANCOUNTER"
CONFIGS = [
# alt_lazy_ff_cg
(49, ["--heuristic", "hff=ff(cost_type=H_COST_TYPE)",
"--heuristic", "hcg=cg(cost_type=H_COST_TYPE)", "--search",
"lazy_greedy(hff,hcg,preferred=[hff,hcg],cost_type=S_COST_TYPE,bound=BOUND)"]),
# lazy_greedy_ff_1
(171, ["--heuristic", "h=ff(cost_type=H_COST_TYPE)",
"--search",
"lazy_greedy(h,preferred=h,cost_type=S_COST_TYPE,bound=BOUND)"]),
# alt_lazy_cea_cg
(27, ["--heuristic", "hcea=cea(cost_type=H_COST_TYPE)",
"--heuristic", "hcg=cg(cost_type=H_COST_TYPE)", "--search",
"lazy_greedy(hcea,hcg,preferred=[hcea,hcg],cost_type=S_COST_TYPE,bound=BOUND)"]),
# lazy_wa3_ff_1
(340, ["--heuristic", "h=ff(cost_type=H_COST_TYPE)",
"--search",
"lazy_wastar(h,w=3,preferred=h,cost_type=S_COST_TYPE,bound=BOUND)"]),
# alt_eager_ff_cg
(76, ["--heuristic", "hff=ff(cost_type=H_COST_TYPE)",
"--heuristic", "hcg=cg(cost_type=H_COST_TYPE)", "--search",
"eager_greedy(hff,hcg,preferred=[hff,hcg],cost_type=S_COST_TYPE,bound=BOUND)"]),
# eager_greedy_ff_1
(88, ["--heuristic", "h=ff(cost_type=H_COST_TYPE)",
"--search",
"eager_greedy(h,preferred=h,cost_type=S_COST_TYPE,bound=BOUND)"]),
# alt_eager_ff_add
(90, ["--heuristic", "hff=ff(cost_type=H_COST_TYPE)",
"--heuristic", "hadd=add(cost_type=H_COST_TYPE)", "--search",
"eager_greedy(hff,hadd,preferred=[hff,hadd],cost_type=S_COST_TYPE,bound=BOUND)"]),
# lazy_greedy_cea_1
(56, ["--heuristic", "h=cea(cost_type=H_COST_TYPE)",
"--search",
"lazy_greedy(h,preferred=h,cost_type=S_COST_TYPE,bound=BOUND)"]),
# alt_eager_ff_cea_cg
(73, ["--heuristic", "hff=ff(cost_type=H_COST_TYPE)",
"--heuristic", "hcea=cea(cost_type=H_COST_TYPE)",
"--heuristic", "hcg=cg(cost_type=H_COST_TYPE)",
"--search",
"eager_greedy([hff,hcea,hcg],preferred=[hff,hcea,hcg],cost_type=S_COST_TYPE,bound=BOUND)"]),
# lazy_wa3_add_1
(50, ["--heuristic", "h=add(cost_type=H_COST_TYPE)",
"--search",
"lazy_wastar(h,w=3,preferred=h,cost_type=S_COST_TYPE,bound=BOUND)"]),
# eager_greedy_cea_1
(84, ["--heuristic", "h=cea(cost_type=H_COST_TYPE)",
"--search",
"eager_greedy(h,preferred=h,cost_type=S_COST_TYPE,bound=BOUND)"]),
# eager_wa3_add_1
(166, ["--heuristic", "h=add(cost_type=H_COST_TYPE)",
"--search",
"eager(single(sum([g(),weight(h,3)])),preferred=h,cost_type=S_COST_TYPE,bound=BOUND)"]),
# eager_wa3_ff_1
(87, ["--heuristic", "h=ff(cost_type=H_COST_TYPE)",
"--search",
"eager(single(sum([g(),weight(h,3)])),preferred=h,cost_type=S_COST_TYPE,bound=BOUND)"]),
# lazy_wa3_cg_1
(73, ["--heuristic", "h=cg(cost_type=H_COST_TYPE)",
"--search",
"lazy_wastar(h,w=3,preferred=h,cost_type=S_COST_TYPE,bound=BOUND)"]),
# eager_wa3_cg_1
(89, ["--heuristic", "h=cg(cost_type=H_COST_TYPE)",
"--search",
"eager(single(sum([g(),weight(h,3)])),preferred=h,cost_type=S_COST_TYPE,bound=BOUND)"]),
]
FINAL_CONFIG = [
"--heuristic", "h=ff(cost_type=H_COST_TYPE)",
"--search",
"iterated(eager(single(sum([g(),weight(h,3)])),preferred=h,cost_type=S_COST_TYPE,bound=BOUND),bound=BOUND,repeat_last=true,plan_counter=PLANCOUNTER)"]
portfolio.run(configs=CONFIGS, optimal=False, final_config=FINAL_CONFIG)
|
rock-planning/planning-fd_uniform
|
src/search/downward-seq-sat-fdss-1.py
|
Python
|
gpl-3.0
| 3,644
|
from __future__ import print_function, absolute_import
import czmq
from ._malamute_ctypes import MlmClient
try:
range = xrange
except NameError:
pass
class MalamuteError(Exception):
pass
def _list_to_zmsg(parts):
assert isinstance(parts, (list, tuple))
zmsg = czmq.Zmsg()
for p in parts:
zmsg.addstr(p)
return zmsg
def _zmsg_to_list(zmsg):
size = zmsg.size()
return [zmsg.pop().strdup() for _ in range(size)]
class MalamuteClient(object):
"""
A pythonic wrapper for the generated MlmClient.
"""
def __init__(self):
self.c = MlmClient()
def _check_error(self, return_value, fmt, *args, **kw):
if return_value != 0:
reason = self.c.reason()
if reason:
reason = reason.decode('utf8')
raise MalamuteError(
fmt.format(*args, **kw) + ': ' + str(reason)
)
def connected(self):
return self.c.connected()
def connect(self, endpoint, timeout, address):
result = self.c.connect(endpoint, timeout, address)
self._check_error(
result,
"Could not connect to malamute server at {!r}", endpoint,
)
def set_producer(self, stream):
result = self.c.set_producer(stream)
self._check_error(
result,
"Could not set producer",
)
def set_worker(self, address, pattern):
result = self.c.set_worker(address, pattern)
self._check_error(
result,
"Could not set worker",
)
def set_consumer(self, stream, pattern):
result = self.c.set_consumer(stream, pattern)
self._check_error(
result,
"Could not set consumer",
)
def send(self, subject, content):
result = self.c.send(subject, _list_to_zmsg(content))
self._check_error(
result,
"(send) Could not send stream message",
)
def sendto(self, address, subject, tracker, timeout, content):
result = self.c.sendto(
address, subject, tracker, timeout, _list_to_zmsg(content)
)
self._check_error(
result,
"(sendto) Could not send direct message",
)
def sendfor(self, address, subject, tracker=None, timeout=0, content=''):
result = self.c.sendfor(
address, subject, tracker, timeout, _list_to_zmsg(content)
)
self._check_error(
result,
"(sendfor) Could not send service message",
)
def recv(self):
m = self.c.recv()
return (
self.c.command(), self.c.sender(),
self.c.subject(), _zmsg_to_list(m) if m else None
)
|
lnls-dig/malamute
|
bindings/python/malamute/__init__.py
|
Python
|
mpl-2.0
| 2,772
|
import sys
from os.path import join, dirname
import mock
import pytest
sys.path.insert(0, join(dirname(__file__), "..", "..", ".."))
sauce = pytest.importorskip("wptrunner.browsers.sauce")
def test_sauceconnect_success():
with mock.patch.object(sauce.SauceConnect, "upload_prerun_exec"),\
mock.patch.object(sauce.subprocess, "Popen") as Popen,\
mock.patch.object(sauce.os.path, "exists") as exists:
# Act as if it's still running
Popen.return_value.poll.return_value = None
Popen.return_value.returncode = None
# Act as if file created
exists.return_value = True
sauce_connect = sauce.SauceConnect(
sauce_user="aaa",
sauce_key="bbb",
sauce_tunnel_id="ccc",
sauce_connect_binary="ddd")
env_config = {
"domains": {"": "example.net"}
}
sauce_connect(None, env_config)
with sauce_connect:
pass
@pytest.mark.parametrize("readyfile,returncode", [
(True, 0),
(True, 1),
(True, 2),
(False, 0),
(False, 1),
(False, 2),
])
def test_sauceconnect_failure_exit(readyfile, returncode):
with mock.patch.object(sauce.SauceConnect, "upload_prerun_exec"),\
mock.patch.object(sauce.subprocess, "Popen") as Popen,\
mock.patch.object(sauce.os.path, "exists") as exists,\
mock.patch.object(sauce.time, "sleep") as sleep:
Popen.return_value.poll.return_value = returncode
Popen.return_value.returncode = returncode
exists.return_value = readyfile
sauce_connect = sauce.SauceConnect(
sauce_user="aaa",
sauce_key="bbb",
sauce_tunnel_id="ccc",
sauce_connect_binary="ddd")
env_config = {
"domains": {"": "example.net"}
}
sauce_connect(None, env_config)
with pytest.raises(sauce.SauceException):
with sauce_connect:
pass
# Given we appear to exit immediately with these mocks, sleep shouldn't be called
sleep.assert_not_called()
def test_sauceconnect_failure_never_ready():
with mock.patch.object(sauce.SauceConnect, "upload_prerun_exec"),\
mock.patch.object(sauce.subprocess, "Popen") as Popen,\
mock.patch.object(sauce.os.path, "exists") as exists,\
mock.patch.object(sauce.time, "sleep") as sleep:
Popen.return_value.poll.return_value = None
Popen.return_value.returncode = None
exists.return_value = False
sauce_connect = sauce.SauceConnect(
sauce_user="aaa",
sauce_key="bbb",
sauce_tunnel_id="ccc",
sauce_connect_binary="ddd")
env_config = {
"domains": {"": "example.net"}
}
sauce_connect(None, env_config)
with pytest.raises(sauce.SauceException):
with sauce_connect:
pass
# We should sleep while waiting for it to create the readyfile
sleep.assert_called()
# Check we actually kill it after termination fails
Popen.return_value.terminate.assert_called()
Popen.return_value.kill.assert_called()
def test_sauceconnect_tunnel_domains():
with mock.patch.object(sauce.SauceConnect, "upload_prerun_exec"),\
mock.patch.object(sauce.subprocess, "Popen") as Popen,\
mock.patch.object(sauce.os.path, "exists") as exists:
Popen.return_value.poll.return_value = None
Popen.return_value.returncode = None
exists.return_value = True
sauce_connect = sauce.SauceConnect(
sauce_user="aaa",
sauce_key="bbb",
sauce_tunnel_id="ccc",
sauce_connect_binary="ddd")
env_config = {
"domains": {"foo": "foo.bar.example.com", "": "example.net"}
}
sauce_connect(None, env_config)
with sauce_connect:
Popen.assert_called_once()
args, kwargs = Popen.call_args
cmd = args[0]
assert "--tunnel-domains" in cmd
i = cmd.index("--tunnel-domains")
rest = cmd[i+1:]
assert len(rest) >= 1
if len(rest) > 1:
assert rest[1].startswith("-"), "--tunnel-domains takes a comma separated list (not a space separated list)"
assert set(rest[0].split(",")) == {"foo.bar.example.com", "example.net"}
|
anthgur/servo
|
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/tests/browsers/test_sauce.py
|
Python
|
mpl-2.0
| 4,470
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import re
import os
import constants
from perf_tests_helper import PrintPerfResult
from pylib import pexpect
from test_result import BaseTestResult, TestResults
# TODO(bulach): TestPackage, TestPackageExecutable and
# TestPackageApk are a work in progress related to making the native tests
# run as a NDK-app from an APK rather than a stand-alone executable.
class TestPackage(object):
"""A helper base class for both APK and stand-alone executables.
Args:
adb: ADB interface the tests are using.
device: Device to run the tests.
test_suite: A specific test suite to run, empty to run all.
timeout: Timeout for each test.
rebaseline: Whether or not to run tests in isolation and update the filter.
performance_test: Whether or not performance test(s).
cleanup_test_files: Whether or not to cleanup test files on device.
tool: Name of the Valgrind tool.
dump_debug_info: A debug_info object.
"""
def __init__(self, adb, device, test_suite, timeout, rebaseline,
performance_test, cleanup_test_files, tool, dump_debug_info):
self.adb = adb
self.device = device
self.test_suite_full = test_suite
self.test_suite = os.path.splitext(test_suite)[0]
self.test_suite_basename = self._GetTestSuiteBaseName()
self.test_suite_dirname = os.path.dirname(
self.test_suite.split(self.test_suite_basename)[0])
self.rebaseline = rebaseline
self.performance_test = performance_test
self.cleanup_test_files = cleanup_test_files
self.tool = tool
if timeout == 0:
timeout = 60
# On a VM (e.g. chromium buildbots), this timeout is way too small.
if os.environ.get('BUILDBOT_SLAVENAME'):
timeout = timeout * 2
self.timeout = timeout * self.tool.GetTimeoutScale()
self.dump_debug_info = dump_debug_info
def _BeginGetIOStats(self):
"""Gets I/O statistics before running test.
Return:
I/O stats object.The I/O stats object may be None if the test is not
performance test.
"""
initial_io_stats = None
# Try to get the disk I/O statistics for all performance tests.
if self.performance_test and not self.rebaseline:
initial_io_stats = self.adb.GetIoStats()
return initial_io_stats
def _EndGetIOStats(self, initial_io_stats):
"""Gets I/O statistics after running test and calcuate the I/O delta.
Args:
initial_io_stats: I/O stats object got from _BeginGetIOStats.
Return:
String for formated diso I/O statistics.
"""
disk_io = ''
if self.performance_test and initial_io_stats:
final_io_stats = self.adb.GetIoStats()
for stat in final_io_stats:
disk_io += '\n' + PrintPerfResult(stat, stat,
[final_io_stats[stat] -
initial_io_stats[stat]],
stat.split('_')[1],
print_to_stdout=False)
logging.info(disk_io)
return disk_io
def GetDisabledPrefixes(self):
return ['DISABLED_', 'FLAKY_', 'FAILS_']
def _ParseGTestListTests(self, all_tests):
ret = []
current = ''
disabled_prefixes = self.GetDisabledPrefixes()
for test in all_tests:
if not test:
continue
if test[0] != ' ' and not test.endswith('.'):
# Ignore any lines with unexpected format.
continue
if test[0] != ' ' and test.endswith('.'):
current = test
continue
if 'YOU HAVE' in test:
break
test_name = test[2:]
if not any([test_name.startswith(x) for x in disabled_prefixes]):
ret += [current + test_name]
return ret
def PushDataAndPakFiles(self):
external_storage = self.adb.GetExternalStorage()
if (self.test_suite_basename == 'ui_unittests' or
self.test_suite_basename == 'unit_tests'):
self.adb.PushIfNeeded(
self.test_suite_dirname + '/chrome.pak',
external_storage + '/paks/chrome.pak')
self.adb.PushIfNeeded(
self.test_suite_dirname + '/locales/en-US.pak',
external_storage + '/paks/en-US.pak')
if self.test_suite_basename == 'unit_tests':
self.adb.PushIfNeeded(
self.test_suite_dirname + '/resources.pak',
external_storage + '/paks/resources.pak')
self.adb.PushIfNeeded(
self.test_suite_dirname + '/chrome_100_percent.pak',
external_storage + '/paks/chrome_100_percent.pak')
self.adb.PushIfNeeded(self.test_suite_dirname + '/test_data',
external_storage + '/test_data')
if self.test_suite_basename == 'content_unittests':
self.adb.PushIfNeeded(
self.test_suite_dirname + '/content_resources.pak',
external_storage + '/paks/content_resources.pak')
def _WatchTestOutput(self, p):
"""Watches the test output.
Args:
p: the process generating output as created by pexpect.spawn.
"""
ok_tests = []
failed_tests = []
crashed_tests = []
timed_out = False
overall_fail = False
re_run = re.compile('\[ RUN \] ?(.*)\r\n')
# APK tests rely on the PASSED tag.
re_passed = re.compile('\[ PASSED \] ?(.*)\r\n')
# Signal handlers are installed before starting tests
# to output the CRASHED marker when a crash happens.
re_crash = re.compile('\[ CRASHED \](.*)\r\n')
re_fail = re.compile('\[ FAILED \] ?(.*)\r\n')
re_runner_fail = re.compile('\[ RUNNER_FAILED \] ?(.*)\r\n')
re_ok = re.compile('\[ OK \] ?(.*?) .*\r\n')
io_stats_before = self._BeginGetIOStats()
try:
while True:
found = p.expect([re_run, re_passed, re_runner_fail],
timeout=self.timeout)
if found == 1: # matched PASSED.
break
if found == 2: # RUNNER_FAILED
logging.error('RUNNER_FAILED')
overall_fail = True
break
if self.dump_debug_info:
self.dump_debug_info.TakeScreenshot('_Test_Start_Run_')
full_test_name = p.match.group(1).replace('\r', '')
found = p.expect([re_ok, re_fail, re_crash], timeout=self.timeout)
if found == 0: # re_ok
if full_test_name == p.match.group(1).replace('\r', ''):
ok_tests += [BaseTestResult(full_test_name, p.before)]
continue
if found == 2: # re_crash
crashed_tests += [BaseTestResult(full_test_name, p.before)]
overall_fail = True
break
# The test failed.
failed_tests += [BaseTestResult(full_test_name, p.before)]
except pexpect.EOF:
logging.error('Test terminated - EOF')
except pexpect.TIMEOUT:
logging.error('Test terminated after %d second timeout.',
self.timeout)
timed_out = True
finally:
p.close()
if not self.rebaseline:
ok_tests += self._EndGetIOStats(io_stats_before)
ret_code = self._GetGTestReturnCode()
if ret_code:
failed_tests += [BaseTestResult('gtest exit code: %d' % ret_code,
'pexpect.before: %s'
'\npexpect.after: %s'
% (p.before,
p.after))]
# Create TestResults and return
return TestResults.FromRun(ok=ok_tests, failed=failed_tests,
crashed=crashed_tests, timed_out=timed_out,
overall_fail=overall_fail)
|
Yukarumya/Yukarum-Redfoxes
|
media/webrtc/trunk/build/android/pylib/test_package.py
|
Python
|
mpl-2.0
| 7,723
|
## MediaInfoDLL - All info about media files
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
#
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#
# Python (Windows) example
#
# To make this example working, you must put MediaInfo.Dll and test.avi
# in the same folder
#
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#
# Should be "import MediaInfoDLL" but does not work, why?
# How to import MediaInfoDLL.py correctly?
# Example following
#
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
from MediaInfoDLL3 import *
MI = MediaInfo()
Version=MI.Option_Static("Info_Version", "0.7.7.0;MediaInfoDLL_Example_Python;0.7.7.0")
if Version=="":
print("MediaInfo.Dll: this version of the DLL is not compatible")
exit
#Information about MediaInfo
print("Info_Parameters")
MI.Option_Static("Info_Parameters")
print("")
print("Info_Capacities")
print(MI.Option_Static("Info_Capacities"))
print("")
print("Info_Codecs")
print(MI.Option_Static("Info_Codecs"))
#An example of how to use the library
print("")
print("Open")
MI.Open("Example.ogg")
print("")
print("Inform with Complete=false")
MI.Option_Static("Complete")
print(MI.Inform())
print("")
print("Inform with Complete=true")
MI.Option_Static("Complete", "1")
print(MI.Inform())
print("")
print("Custom Inform")
MI.Option_Static("Inform", "General;Example : FileSize=%FileSize%")
print(MI.Inform())
print("")
print("Get with Stream=General and Parameter='FileSize'")
print(MI.Get(Stream.General, 0, "FileSize"))
print("")
print("GetI with Stream=General and Parameter=46")
print(MI.GetI(Stream.General, 0, 46))
print("")
print("Count_Get with StreamKind=Stream_Audio")
print(MI.Count_Get(Stream.Audio))
print("")
print("Get with Stream=General and Parameter='AudioCount'")
print(MI.Get(Stream.General, 0, "AudioCount"))
print("")
print("Get with Stream=Audio and Parameter='StreamCount'")
print(MI.Get(Stream.Audio, 0, "StreamCount"))
print("")
print("Close")
MI.Close()
|
michaelforfxhelp/master
|
third_party/MediaInfoLib/Source/Example/HowToUse_Dll3.py
|
Python
|
mpl-2.0
| 3,236
|
# -*- coding: utf-8 -*-
import datetime
from dateutil.relativedelta import relativedelta
from odoo import fields, tools
from odoo.addons.event.tests.common import TestEventCommon
from odoo.tools import mute_logger
class TestMailSchedule(TestEventCommon):
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_00_event_mail_schedule(self):
""" Test mail scheduling for events """
now = fields.datetime.now()
event_date_begin = now + relativedelta(days=1)
event_date_end = now + relativedelta(days=3)
test_event = self.Event.sudo(self.user_eventmanager).create({
'name': 'TestEventMail',
'auto_confirm': True,
'date_begin': event_date_begin,
'date_end': event_date_end,
'seats_max': 10,
'event_mail_ids': [
(0, 0, { # right at subscription
'interval_unit': 'now',
'interval_type': 'after_sub',
'template_id': self.env['ir.model.data'].xmlid_to_res_id('event.event_subscription')}),
(0, 0, { # 1 days before event
'interval_nbr': 1,
'interval_unit': 'days',
'interval_type': 'before_event',
'template_id': self.env['ir.model.data'].xmlid_to_res_id('event.event_reminder')}),
]
})
# create some registrations
self.Registration.sudo(self.user_eventuser).create({
'event_id': test_event.id,
'name': 'Reg0',
'email': 'reg0@example.com',
})
self.Registration.sudo(self.user_eventuser).create({
'event_id': test_event.id,
'name': 'Reg1',
'email': 'reg1@example.com',
})
# check subscription scheduler
schedulers = self.EventMail.search([('event_id', '=', test_event.id), ('interval_type', '=', 'after_sub')])
self.assertEqual(len(schedulers), 1, 'event: wrong scheduler creation')
self.assertEqual(schedulers[0].scheduled_date, test_event.create_date, 'event: incorrect scheduled date for checking controller')
# verify that subscription scheduler was auto-executed after each registration
self.assertEqual(len(schedulers[0].mail_registration_ids), 2, 'event: incorrect number of mail scheduled date')
mails = self.env['mail.mail'].search([('subject', 'ilike', 'registration'), ('date', '>=', datetime.datetime.strftime(now, tools.DEFAULT_SERVER_DATETIME_FORMAT))], order='date DESC', limit=3)
self.assertEqual(len(mails), 2, 'event: wrong number of registration mail sent')
for registration in schedulers[0].mail_registration_ids:
self.assertTrue(registration.mail_sent, 'event: wrongly confirmed mailing on registration')
# check before event scheduler
schedulers = self.EventMail.search([('event_id', '=', test_event.id), ('interval_type', '=', 'before_event')])
self.assertEqual(len(schedulers), 1, 'event: wrong scheduler creation')
self.assertEqual(schedulers[0].scheduled_date, datetime.datetime.strftime(event_date_begin + relativedelta(days=-1), tools.DEFAULT_SERVER_DATETIME_FORMAT), 'event: incorrect scheduled date')
# execute event reminder scheduler explicitly
schedulers[0].execute()
self.assertTrue(schedulers[0].mail_sent, 'event: reminder scheduler should have sent an email')
self.assertTrue(schedulers[0].done, 'event: reminder scheduler should be done')
mails = self.env['mail.mail'].search([('subject', 'ilike', 'TestEventMail'), ('date', '>=', datetime.datetime.strftime(now, tools.DEFAULT_SERVER_DATETIME_FORMAT))], order='date DESC', limit=3)
self.assertEqual(len(mails), 3, 'event: wrong number of reminders in outgoing mail queue')
|
maxive/erp
|
addons/event/tests/test_mail_schedule.py
|
Python
|
agpl-3.0
| 3,876
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi, Guewen Baconnier
# Copyright 2012-2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
class CreditControlPolicy(models.Model):
""" Define a policy of reminder """
_name = "credit.control.policy"
_description = """Define a reminder policy"""
name = fields.Char('Name', required=True)
level_ids = fields.One2many('credit.control.policy.level',
'policy_id',
string='Policy Levels')
do_nothing = fields.Boolean('Do nothing',
help='For policies which should not '
'generate lines or are obsolete')
company_id = fields.Many2one('res.company', string='Company')
account_ids = fields.Many2many(
'account.account',
string='Accounts',
required=True,
domain="[('type', '=', 'receivable')]",
help="This policy will be active only"
" for the selected accounts",
)
active = fields.Boolean('Active', default=True)
@api.multi
def _move_lines_domain(self, controlling_date):
""" Build the default domain for searching move lines """
self.ensure_one()
return [('account_id', 'in', self.account_ids.ids),
('date_maturity', '<=', controlling_date),
('reconcile_id', '=', False),
('partner_id', '!=', False)]
@api.multi
@api.returns('account.move.line')
def _due_move_lines(self, controlling_date):
""" Get the due move lines for the policy of the company.
The set of ids will be reduced and extended according
to the specific policies defined on partners and invoices.
Do not use direct SQL in order to respect security rules.
Assume that only the receivable lines have a maturity date and that
accounts used in the policy are reconcilable.
"""
self.ensure_one()
move_l_obj = self.env['account.move.line']
user = self.env.user
if user.company_id.credit_policy_id.id != self.id:
return move_l_obj.browse()
domain_line = self._move_lines_domain(controlling_date)
return move_l_obj.search(domain_line)
@api.multi
@api.returns('account.move.line')
def _move_lines_subset(self, controlling_date, model, move_relation_field):
""" Get the move lines related to one model for a policy.
Do not use direct SQL in order to respect security rules.
Assume that only the receivable lines have a maturity date and that
accounts used in the policy are reconcilable.
The policy relation field must be named credit_policy_id.
:param str controlling_date: date of credit control
:param str model: name of the model where is defined a credit_policy_id
:param str move_relation_field: name of the field in account.move.line
which is a many2one to `model`
:return: recordset to add in the process, recordset to remove from
the process
"""
self.ensure_one()
# MARK possible place for a good optimisation
my_obj = self.env[model]
move_l_obj = self.env['account.move.line']
default_domain = self._move_lines_domain(controlling_date)
to_add = move_l_obj.browse()
to_remove = move_l_obj.browse()
# The lines which are linked to this policy have to be included in the
# run for this policy.
# If another object override the credit_policy_id (ie. invoice after
add_objs = my_obj.search([('credit_policy_id', '=', self.id)])
if add_objs:
domain = list(default_domain)
domain.append((move_relation_field, 'in', add_objs.ids))
to_add = move_l_obj.search(domain)
# The lines which are linked to another policy do not have to be
# included in the run for this policy.
neg_objs = my_obj.search([('credit_policy_id', '!=', self.id),
('credit_policy_id', '!=', False)])
if neg_objs:
domain = list(default_domain)
domain.append((move_relation_field, 'in', neg_objs.ids))
to_remove = move_l_obj.search(domain)
return to_add, to_remove
@api.multi
@api.returns('account.move.line')
def _get_partner_related_lines(self, controlling_date):
""" Get the move lines for a policy related to a partner.
:param str controlling_date: date of credit control
:param str model: name of the model where is defined a credit_policy_id
:param str move_relation_field: name of the field in account.move.line
which is a many2one to `model`
:return: recordset to add in the process, recordset to remove from
the process
"""
return self._move_lines_subset(controlling_date, 'res.partner',
'partner_id')
@api.multi
@api.returns('account.move.line')
def _get_invoice_related_lines(self, controlling_date):
""" Get the move lines for a policy related to an invoice.
:param str controlling_date: date of credit control
:param str model: name of the model where is defined a credit_policy_id
:param str move_relation_field: name of the field in account.move.line
which is a many2one to `model`
:return: recordset to add in the process, recordset to remove from
the process
"""
return self._move_lines_subset(controlling_date, 'account.invoice',
'invoice')
@api.multi
@api.returns('account.move.line')
def _get_move_lines_to_process(self, controlling_date):
""" Build a list of move lines ids to include in a run
for a policy at a given date.
:param str controlling_date: date of credit control
:return: recordset to include in the run
"""
self.ensure_one()
# there is a priority between the lines, depicted by the calls below
lines = self._due_move_lines(controlling_date)
to_add, to_remove = self._get_partner_related_lines(controlling_date)
lines = (lines | to_add) - to_remove
to_add, to_remove = self._get_invoice_related_lines(controlling_date)
lines = (lines | to_add) - to_remove
return lines
@api.multi
@api.returns('account.move.line')
def _lines_different_policy(self, lines):
""" Return a set of move lines ids for which there is an
existing credit line but with a different policy.
"""
self.ensure_one()
move_line_obj = self.env['account.move.line']
different_lines = move_line_obj.browse()
if not lines:
return different_lines
cr = self.env.cr
cr.execute("SELECT move_line_id FROM credit_control_line"
" WHERE policy_id != %s and move_line_id in %s"
" AND manually_overridden IS false",
(self.id, tuple(lines.ids)))
res = cr.fetchall()
if res:
return move_line_obj.browse([row[0] for row in res])
return different_lines
@api.multi
def check_policy_against_account(self, account):
""" Ensure that the policy corresponds to account relation """
policies = self.search([])
allowed = [x for x in policies
if account in x.account_ids or x.do_nothing]
if self not in allowed:
allowed_names = u"\n".join(x.name for x in allowed)
raise api.Warning(
_('You can only use a policy set on '
'account %s.\n'
'Please choose one of the following '
'policies:\n %s') % (account.name, allowed_names)
)
return True
class CreditControlPolicyLevel(models.Model):
"""Define a policy level. A level allows to determine if
a move line is due and the level of overdue of the line"""
_name = "credit.control.policy.level"
_order = 'level'
_description = """A credit control policy level"""
name = fields.Char(string='Name', required=True, translate=True)
policy_id = fields.Many2one('credit.control.policy',
string='Related Policy',
required=True)
level = fields.Integer(string='Level', required=True)
computation_mode = fields.Selection(
[('net_days', 'Due Date'),
('end_of_month', 'Due Date, End Of Month'),
('previous_date', 'Previous Reminder')],
string='Compute Mode',
required=True
)
delay_days = fields.Integer(string='Delay (in days)', required=True)
email_template_id = fields.Many2one('email.template',
string='Email Template',
required=True)
channel = fields.Selection([('letter', 'Letter'),
('email', 'Email')],
string='Channel',
required=True)
custom_text = fields.Text(string='Custom Message',
required=True,
translate=True)
custom_mail_text = fields.Html(string='Custom Mail Message',
required=True, translate=True)
custom_text_after_details = fields.Text(
string='Custom Message after details', translate=True)
_sql_constraint = [('unique level',
'UNIQUE (policy_id, level)',
'Level must be unique per policy')]
@api.one
@api.constrains('level', 'computation_mode')
def _check_level_mode(self):
""" The smallest level of a policy cannot be computed on the
"previous_date".
"""
smallest_level = self.search([('policy_id', '=', self.policy_id.id)],
order='level asc', limit=1)
if smallest_level.computation_mode == 'previous_date':
return api.ValidationError(_('The smallest level can not be of '
'type Previous Reminder'))
@api.multi
def _previous_level(self):
""" For one policy level, returns the id of the previous level
If there is no previous level, it returns None, it means that's the
first policy level
:return: previous level or None if there is no previous level
"""
self.ensure_one()
previous_levels = self.search([('policy_id', '=', self.policy_id.id),
('level', '<', self.level)],
order='level desc',
limit=1)
if not previous_levels:
return None
return previous_levels
# ----- sql time related methods ---------
@staticmethod
def _net_days_get_boundary():
return (" (mv_line.date_maturity + %(delay)s)::date <= "
"date(%(controlling_date)s)")
@staticmethod
def _end_of_month_get_boundary():
return ("(date_trunc('MONTH', (mv_line.date_maturity + %(delay)s))+"
"INTERVAL '1 MONTH - 1 day')::date"
"<= date(%(controlling_date)s)")
@staticmethod
def _previous_date_get_boundary():
return "(cr_line.date + %(delay)s)::date <= date(%(controlling_date)s)"
@api.multi
def _get_sql_date_boundary_for_computation_mode(self, controlling_date):
""" Return a where clauses statement for the given controlling
date and computation mode of the level
"""
self.ensure_one()
fname = "_%s_get_boundary" % (self.computation_mode, )
if hasattr(self, fname):
fnc = getattr(self, fname)
return fnc()
else:
raise NotImplementedError(
_('Can not get function for computation mode: '
'%s is not implemented') % (fname, )
)
# -----------------------------------------
@api.multi
@api.returns('account.move.line')
def _get_first_level_move_lines(self, controlling_date, lines):
""" Retrieve all the move lines that are linked to a first level.
We use Raw SQL for performance. Security rule where applied in
policy object when the first set of lines were retrieved
"""
self.ensure_one()
move_line_obj = self.env['account.move.line']
if not lines:
return move_line_obj.browse()
cr = self.env.cr
sql = ("SELECT DISTINCT mv_line.id\n"
" FROM account_move_line mv_line\n"
" WHERE mv_line.id in %(line_ids)s\n"
" AND NOT EXISTS (SELECT id\n"
" FROM credit_control_line\n"
" WHERE move_line_id = mv_line.id\n"
# lines from a previous level with a draft or ignored state
# or manually overridden
# have to be generated again for the previous level
" AND NOT manually_overridden\n"
" AND state NOT IN ('draft', 'ignored'))"
" AND (mv_line.debit IS NOT NULL AND mv_line.debit != 0.0)\n")
sql += " AND"
_get_sql_date_part = self._get_sql_date_boundary_for_computation_mode
sql += _get_sql_date_part(controlling_date)
data_dict = {'controlling_date': controlling_date,
'line_ids': tuple(lines.ids),
'delay': self.delay_days}
cr.execute(sql, data_dict)
res = cr.fetchall()
if res:
return move_line_obj.browse([row[0] for row in res])
return move_line_obj.browse()
@api.multi
@api.returns('account.move.line')
def _get_other_level_move_lines(self, controlling_date, lines):
""" Retrieve the move lines for other levels than first level.
"""
self.ensure_one()
move_line_obj = self.env['account.move.line']
if not lines:
return move_line_obj.browse()
cr = self.env.cr
sql = ("SELECT mv_line.id\n"
" FROM account_move_line mv_line\n"
" JOIN credit_control_line cr_line\n"
" ON (mv_line.id = cr_line.move_line_id)\n"
" WHERE cr_line.id = (SELECT credit_control_line.id "
" FROM credit_control_line\n"
" WHERE credit_control_line.move_line_id = mv_line.id\n"
" AND state != 'ignored'"
" AND NOT manually_overridden"
" ORDER BY credit_control_line.level desc limit 1)\n"
" AND cr_line.level = %(previous_level)s\n"
" AND (mv_line.debit IS NOT NULL AND mv_line.debit != 0.0)\n"
# lines from a previous level with a draft or ignored state
# or manually overridden
# have to be generated again for the previous level
" AND NOT manually_overridden\n"
" AND cr_line.state NOT IN ('draft', 'ignored')\n"
" AND mv_line.id in %(line_ids)s\n")
sql += " AND "
_get_sql_date_part = self._get_sql_date_boundary_for_computation_mode
sql += _get_sql_date_part(controlling_date)
previous_level = self._previous_level()
data_dict = {'controlling_date': controlling_date,
'line_ids': tuple(lines.ids),
'delay': self.delay_days,
'previous_level': previous_level.level}
# print cr.mogrify(sql, data_dict)
cr.execute(sql, data_dict)
res = cr.fetchall()
if res:
return move_line_obj.browse([row[0] for row in res])
return move_line_obj.browse()
@api.multi
@api.returns('account.move.line')
def get_level_lines(self, controlling_date, lines):
""" get all move lines in entry lines that match the current level """
self.ensure_one()
move_line_obj = self.env['account.move.line']
matching_lines = move_line_obj.browse()
if self._previous_level() is None:
method = self._get_first_level_move_lines
else:
method = self._get_other_level_move_lines
matching_lines |= method(controlling_date, lines)
return matching_lines
|
abstract-open-solutions/account-financial-tools
|
account_credit_control/policy.py
|
Python
|
agpl-3.0
| 17,427
|
from __future__ import unicode_literals
import json
from django.core.management.base import BaseCommand
from django.core.exceptions import ObjectDoesNotExist
from candidates.models import PartySet, OrganizationExtra
from popolo import models as popolo_models
from popolo.importers.popit import PopItImporter
from compat import input
class Command(BaseCommand):
help = 'Create or update parties from a Popolo JSON file'
def add_arguments(self, parser):
parser.add_argument('JSON-FILENAME')
parser.add_argument('--party-set', default='default')
def add_related_objects(self, org, attr, related_data_list, get_kwargs):
related_manager = getattr(org, attr)
for related_data in related_data_list:
kwargs = get_kwargs(related_data)
try:
related_manager.get(**kwargs)
except ObjectDoesNotExist:
related_manager.create(**kwargs)
def update_party(self, party_data, party_set):
kwargs = {}
for k in (
'name', 'summary', 'description', 'classification',
'founding_date', 'dissolution_date',
):
v = party_data.get(k)
if v:
kwargs[k] = v
try:
org_extra = OrganizationExtra.objects.get(
slug=party_data['id']
)
org = org_extra.base
for k, v in kwargs.items():
setattr(org, k, v)
if not org.party_sets.filter(slug=party_set.slug):
org.party_sets.add(party_set)
except OrganizationExtra.DoesNotExist:
org = popolo_models.Organization.objects.create(**kwargs)
org_extra = OrganizationExtra.objects.create(
base=org, slug=party_data['id']
)
org.party_sets.add(party_set)
# Now make sure any related objects exist:
for k, get_kwargs in [
('contact_details', self.importer.make_contact_detail_dict),
('identifiers', self.importer.make_identifier_dict),
('links', self.importer.make_link_dict),
('sources', self.importer.make_source_dict),
('other_names', self.importer.make_other_name_dict),
]:
if len(party_data.get(k, [])) > 0:
self.add_related_objects(
org,
k,
party_data[k],
get_kwargs
)
def get_party_set(self, requested_party_set_slug):
try:
return PartySet.objects.get(slug=requested_party_set_slug)
except PartySet.DoesNotExist:
self.stdout.write("Couldn't find the party set '{0}'".format(
requested_party_set_slug
))
all_party_sets = PartySet.objects.values_list('slug', flat=True)
if PartySet.objects.exists():
self.stdout.write("You might have meant one of these:")
for other_party_set_slug in all_party_sets:
self.stdout.write(" " + other_party_set_slug)
self.stdout.write(
"Create the party set '{0}'? (y/n) ".format(
requested_party_set_slug
),
ending=''
)
response = input()
if response.strip().lower() != 'y':
self.stderr.write("Exiting.")
return
self.stdout.write('What is the full name of the party set? ')
response = input()
return PartySet.objects.create(
slug=requested_party_set_slug, name=response
)
def handle(self, **options):
self.importer = PopItImporter()
party_set = self.get_party_set(options['party_set'])
with open(options['JSON-FILENAME']) as f:
for party_data in json.load(f):
self.update_party(party_data, party_set)
|
mysociety/yournextmp-popit
|
candidates/management/commands/candidates_create_parties_from_json.py
|
Python
|
agpl-3.0
| 3,982
|
# -*- coding: utf-8 -*-
# (c) 2015 Alex Comba - Agile Business Group
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, fields
class PurchaseConfigSettings(models.TransientModel):
_inherit = 'purchase.config.settings'
group_use_product_description_per_po_line = fields.Boolean(
"Allow using only the product purchase description on the purchase "
"order lines", implied_group="purchase_order_line_description."
"group_use_product_description_per_po_line",
help="Allows you to use only product purchase description on the "
"purchase order lines."
)
|
acsone/purchase-workflow
|
purchase_order_line_description/models/purchase_config_settings.py
|
Python
|
agpl-3.0
| 647
|
# -*- coding: utf-8 -*-
# Copyright 2015 Nicola Malcontenti - Agile Business Group
# Copyright 2016 Andrea Cometa - Apulia Software
# Copyright 2016 Lorenzo Battistini - Agile Business Group
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
from . import account_invoice, partner
|
linkitspa/l10n-italy
|
account_invoice_report_ddt_group/models/__init__.py
|
Python
|
agpl-3.0
| 300
|
""" API v1 models. """
from itertools import groupby
import logging
from django.db import transaction
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from course_modes.models import CourseMode
log = logging.getLogger(__name__)
class Course(object):
""" Pseudo-course model used to group CourseMode objects. """
id = None # pylint: disable=invalid-name
modes = None
_deleted_modes = None
def __init__(self, id, modes): # pylint: disable=invalid-name,redefined-builtin
self.id = CourseKey.from_string(unicode(id)) # pylint: disable=invalid-name
self.modes = list(modes)
self._deleted_modes = []
def get_mode_display_name(self, mode):
""" Returns display name for the given mode. """
slug = mode.mode_slug.strip().lower()
if slug == 'credit':
return 'Credit'
if 'professional' in slug:
return 'Professional Education'
elif slug == 'verified':
return 'Verified Certificate'
elif slug == 'honor':
return 'Honor Certificate'
elif slug == 'audit':
return 'Audit'
return mode.mode_slug
@transaction.commit_on_success
def save(self, *args, **kwargs): # pylint: disable=unused-argument
""" Save the CourseMode objects to the database. """
for mode in self.modes:
mode.course_id = self.id
mode.mode_display_name = self.get_mode_display_name(mode)
mode.save()
deleted_mode_ids = [mode.id for mode in self._deleted_modes]
CourseMode.objects.filter(id__in=deleted_mode_ids).delete()
self._deleted_modes = []
def update(self, attrs):
""" Update the model with external data (usually passed via API call). """
existing_modes = {mode.mode_slug: mode for mode in self.modes}
merged_modes = set()
merged_mode_keys = set()
for posted_mode in attrs.get('modes', []):
merged_mode = existing_modes.get(posted_mode.mode_slug, CourseMode())
merged_mode.course_id = self.id
merged_mode.mode_slug = posted_mode.mode_slug
merged_mode.mode_display_name = posted_mode.mode_slug
merged_mode.min_price = posted_mode.min_price
merged_mode.currency = posted_mode.currency
merged_mode.sku = posted_mode.sku
merged_mode.expiration_datetime = posted_mode.expiration_datetime
merged_modes.add(merged_mode)
merged_mode_keys.add(merged_mode.mode_slug)
deleted_modes = set(existing_modes.keys()) - merged_mode_keys
self._deleted_modes = [existing_modes[mode] for mode in deleted_modes]
self.modes = list(merged_modes)
@classmethod
def get(cls, course_id):
""" Retrieve a single course. """
try:
course_id = CourseKey.from_string(unicode(course_id))
except InvalidKeyError:
log.debug('[%s] is not a valid course key.', course_id)
raise ValueError
course_modes = CourseMode.objects.filter(course_id=course_id)
if course_modes:
return cls(unicode(course_id), list(course_modes))
return None
@classmethod
def iterator(cls):
""" Generator that yields all courses. """
course_modes = CourseMode.objects.order_by('course_id')
for course_id, modes in groupby(course_modes, lambda o: o.course_id):
yield cls(course_id, list(modes))
|
benpatterson/edx-platform
|
lms/djangoapps/commerce/api/v1/models.py
|
Python
|
agpl-3.0
| 3,535
|
import sys
from PySide2.QtCore import *
from PySide2.QtWidgets import *
class ListModel(QAbstractListModel):
def rowCount(self, parent = QModelIndex()):
return 0
app = QApplication([])
model = ListModel()
v = QListView()
v.setModel(model)
QTimer.singleShot(0, v.close)
app.exec_()
|
gbaty/pyside2
|
tests/QtWidgets/bug_430.py
|
Python
|
lgpl-2.1
| 295
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Bash(AutotoolsPackage):
"""The GNU Project's Bourne Again SHell."""
homepage = "https://www.gnu.org/software/bash/"
url = "https://ftp.gnu.org/gnu/bash/bash-4.4.tar.gz"
version('4.4', '148888a7c95ac23705559b6f477dfe25')
version('4.3', '81348932d5da294953e15d4814c74dd1')
depends_on('ncurses')
depends_on('readline@5.0:')
def configure_args(self):
spec = self.spec
return [
'LIBS=-lncursesw',
'--with-curses',
'--enable-readline',
'--with-installed-readline={0}'.format(spec['readline'].prefix),
]
def check(self):
make('tests')
|
TheTimmy/spack
|
var/spack/repos/builtin/packages/bash/package.py
|
Python
|
lgpl-2.1
| 1,917
|
from warnings import warn
from six import string_types, iteritems
from ..manipulation import delete_model_genes, undelete_model_genes
from ..manipulation.delete import find_gene_knockout_reactions
from ..solvers import solver_dict, get_solver_name
try:
import scipy
except ImportError:
moma = None
else:
from . import moma
def single_deletion(cobra_model, element_list=None,
element_type='gene', **kwargs):
"""Wrapper for single_gene_deletion and single_reaction_deletion
.. deprecated :: 0.4
Use single_reaction_deletion and single_gene_deletion
"""
warn("deprecated - use single_reaction_deletion and single_gene_deletion")
if element_type == "reaction":
return single_reaction_deletion(cobra_model, element_list, **kwargs)
elif element_type == "gene":
return single_gene_deletion(cobra_model, element_list, **kwargs)
else:
raise Exception("unknown element type")
def single_reaction_deletion(cobra_model, reaction_list=None, solver=None,
method="fba", **solver_args):
"""sequentially knocks out each reaction in a model
reaction_list: list of reaction_ids or cobra.Reaction
method: "fba" or "moma"
returns ({reaction_id: growth_rate}, {reaction_id: status})"""
if reaction_list is None:
reaction_list = cobra_model.reactions
else:
reaction_list = [cobra_model.reactions.get_by_id(i)
if isinstance(i, string_types) else i
for i in reaction_list]
if method == "fba":
return single_reaction_deletion_fba(cobra_model, reaction_list,
solver=solver, **solver_args)
elif method == "moma":
return single_reaction_deletion_moma(cobra_model, reaction_list,
solver=solver, **solver_args)
else:
raise ValueError("Unknown deletion method '%s'" % method)
def single_reaction_deletion_fba(cobra_model, reaction_list, solver=None,
**solver_args):
"""sequentially knocks out each reaction in a model using FBA
reaction_list: list of reaction_ids or cobra.Reaction
method: "fba" or "moma"
returns ({reaction_id: growth_rate}, {reaction_id: status})"""
solver = solver_dict[get_solver_name() if solver is None else solver]
lp = solver.create_problem(cobra_model)
growth_rate_dict = {}
status_dict = {}
for reaction in reaction_list:
old_bounds = (reaction.lower_bound, reaction.upper_bound)
index = cobra_model.reactions.index(reaction)
solver.change_variable_bounds(lp, index, 0., 0.)
solver.solve_problem(lp, **solver_args)
# get the status and growth rate
status = solver.get_status(lp)
status_dict[reaction.id] = status
growth_rate_dict[reaction.id] = solver.get_objective_value(lp) \
if status == "optimal" else 0.
# reset the problem
solver.change_variable_bounds(lp, index, old_bounds[0], old_bounds[1])
return (growth_rate_dict, status_dict)
def single_reaction_deletion_moma(cobra_model, reaction_list, solver=None,
**solver_args):
"""sequentially knocks out each reaction in a model using MOMA
reaction_list: list of reaction_ids or cobra.Reaction
returns ({reaction_id: growth_rate}, {reaction_id: status})"""
# The same function can not be used because MOMA can not re-use the
# same LP object. Problem re-use leads to incorrect solutions.
if moma is None:
raise RuntimeError("scipy required for moma")
solver = solver_dict[solver if solver else get_solver_name(qp=True)]
moma_model, moma_objective = moma.create_euclidian_moma_model(cobra_model)
growth_rate_dict = {}
status_dict = {}
for reaction in reaction_list:
index = cobra_model.reactions.index(reaction)
solution = moma.moma_knockout(moma_model, moma_objective, (index,),
solver=solver, **solver_args)
status_dict[reaction.id] = solution.status
growth_rate_dict[reaction.id] = solution.f
return (growth_rate_dict, status_dict)
def single_gene_deletion(cobra_model, gene_list=None, solver=None,
method="fba", **solver_args):
"""sequentially knocks out each gene in a model
gene_list: list of gene_ids or cobra.Gene
method: "fba" or "moma"
returns ({gene_id: growth_rate}, {gene_id: status})"""
if gene_list is None:
gene_list = cobra_model.genes
else:
gene_list = [cobra_model.genes.get_by_id(i)
if isinstance(i, string_types) else i for i in gene_list]
if method == "fba":
return single_gene_deletion_fba(cobra_model, gene_list,
solver=solver, **solver_args)
elif method == "moma":
return single_gene_deletion_moma(cobra_model, gene_list,
solver=solver, **solver_args)
else:
raise ValueError("Unknown deletion method '%s'" % method)
def single_gene_deletion_fba(cobra_model, gene_list, solver=None,
**solver_args):
solver = solver_dict[get_solver_name() if solver is None else solver]
lp = solver.create_problem(cobra_model)
growth_rate_dict = {}
status_dict = {}
for gene in gene_list:
old_bounds = {}
for reaction in find_gene_knockout_reactions(cobra_model, [gene]):
index = cobra_model.reactions.index(reaction)
old_bounds[index] = (reaction.lower_bound, reaction.upper_bound)
solver.change_variable_bounds(lp, index, 0., 0.)
solver.solve_problem(lp, **solver_args)
# get the status and growth rate
status = solver.get_status(lp)
status_dict[gene.id] = status
growth_rate = solver.get_objective_value(lp) \
if status == "optimal" else 0.
growth_rate_dict[gene.id] = growth_rate
# reset the problem
for index, bounds in iteritems(old_bounds):
solver.change_variable_bounds(lp, index, bounds[0], bounds[1])
return (growth_rate_dict, status_dict)
def single_gene_deletion_moma(cobra_model, gene_list, solver=None,
**solver_args):
if moma is None:
raise RuntimeError("scipy required for moma")
solver = solver if solver else get_solver_name(qp=True)
moma_model, moma_objective = moma.create_euclidian_moma_model(cobra_model)
growth_rate_dict = {}
status_dict = {}
for gene in gene_list:
delete_model_genes(moma_model, [gene.id])
solution = moma.solve_moma_model(moma_model, moma_objective,
solver=solver, **solver_args)
status_dict[gene.id] = solution.status
growth_rate_dict[gene.id] = solution.f
undelete_model_genes(moma_model)
return (growth_rate_dict, status_dict)
|
aebrahim/cobrapy
|
cobra/flux_analysis/single_deletion.py
|
Python
|
lgpl-2.1
| 7,065
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 Mika Mäenpää <mika.j.maenpaa@tut.fi>,
# Tampere University of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
try:
import unittest
except ImportError:
import unittest2 as unittest
from httmock import HTTMock # noqa
from httmock import response # noqa
from httmock import urlmatch # noqa
from gitlab import * # noqa
class TestGitLabRawMethods(unittest.TestCase):
def setUp(self):
self.gl = Gitlab("http://localhost", private_token="private_token",
email="testuser@test.com", password="testpassword",
ssl_verify=True)
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/known_path",
method="get")
def resp_get(self, url, request):
headers = {'content-type': 'application/json'}
content = 'response'.encode("utf-8")
return response(200, content, headers, None, 5, request)
def test_raw_get_unknown_path(self):
@urlmatch(scheme="http", netloc="localhost",
path="/api/v3/unknown_path",
method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(404, content, headers, None, 5, request)
with HTTMock(resp_cont):
resp = self.gl._raw_get("/unknown_path")
self.assertEqual(resp.status_code, 404)
def test_raw_get_without_kwargs(self):
with HTTMock(self.resp_get):
resp = self.gl._raw_get("/known_path")
self.assertEqual(resp.content, b'response')
self.assertEqual(resp.status_code, 200)
def test_raw_get_with_kwargs(self):
with HTTMock(self.resp_get):
resp = self.gl._raw_get("/known_path", sudo="testing")
self.assertEqual(resp.content, b'response')
self.assertEqual(resp.status_code, 200)
def test_raw_post(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/known_path",
method="post")
def resp_post(url, request):
headers = {'content-type': 'application/json'}
content = 'response'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_post):
resp = self.gl._raw_post("/known_path")
self.assertEqual(resp.content, b'response')
self.assertEqual(resp.status_code, 200)
def test_raw_post_unknown_path(self):
@urlmatch(scheme="http", netloc="localhost",
path="/api/v3/unknown_path",
method="post")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(404, content, headers, None, 5, request)
with HTTMock(resp_cont):
resp = self.gl._raw_post("/unknown_path")
self.assertEqual(resp.status_code, 404)
def test_raw_put(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/known_path",
method="put")
def resp_put(url, request):
headers = {'content-type': 'application/json'}
content = 'response'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_put):
resp = self.gl._raw_put("/known_path")
self.assertEqual(resp.content, b'response')
self.assertEqual(resp.status_code, 200)
def test_raw_put_unknown_path(self):
@urlmatch(scheme="http", netloc="localhost",
path="/api/v3/unknown_path",
method="put")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(404, content, headers, None, 5, request)
with HTTMock(resp_cont):
resp = self.gl._raw_put("/unknown_path")
self.assertEqual(resp.status_code, 404)
def test_raw_delete(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/known_path",
method="delete")
def resp_delete(url, request):
headers = {'content-type': 'application/json'}
content = 'response'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_delete):
resp = self.gl._raw_delete("/known_path")
self.assertEqual(resp.content, b'response')
self.assertEqual(resp.status_code, 200)
def test_raw_delete_unknown_path(self):
@urlmatch(scheme="http", netloc="localhost",
path="/api/v3/unknown_path",
method="delete")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(404, content, headers, None, 5, request)
with HTTMock(resp_cont):
resp = self.gl._raw_delete("/unknown_path")
self.assertEqual(resp.status_code, 404)
class TestGitLabMethods(unittest.TestCase):
def setUp(self):
self.gl = Gitlab("http://localhost", private_token="private_token",
email="testuser@test.com", password="testpassword",
ssl_verify=True)
def test_list(self):
@urlmatch(scheme="http", netloc="localhost",
path="/api/v3/projects/1/repository/branches", method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = ('[{"branch_name": "testbranch", '
'"project_id": 1, "ref": "a"}]').encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_cont):
data = self.gl.list(ProjectBranch, project_id=1, page=1,
per_page=20)
self.assertEqual(len(data), 1)
data = data[0]
self.assertEqual(data.branch_name, "testbranch")
self.assertEqual(data.project_id, 1)
self.assertEqual(data.ref, "a")
def test_list_401(self):
@urlmatch(scheme="http", netloc="localhost",
path="/api/v3/projects/1/repository/branches", method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message":"message"}'.encode("utf-8")
return response(401, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabAuthenticationError, self.gl.list,
ProjectBranch, project_id=1)
def test_list_unknown_error(self):
@urlmatch(scheme="http", netloc="localhost",
path="/api/v3/projects/1/repository/branches", method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message":"message"}'.encode("utf-8")
return response(405, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabListError, self.gl.list,
ProjectBranch, project_id=1)
def test_list_kw_missing(self):
self.assertRaises(GitlabListError, self.gl.list, ProjectBranch)
def test_list_no_connection(self):
self.gl.set_url('http://localhost:66000')
self.assertRaises(GitlabConnectionError, self.gl.list, ProjectBranch,
project_id=1)
def test_get(self):
@urlmatch(scheme="http", netloc="localhost",
path="/api/v3/projects/1", method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"name": "testproject"}'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_cont):
data = self.gl.get(Project, id=1)
expected = {"name": "testproject"}
self.assertEqual(expected, data)
def test_get_unknown_path(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/groups/1",
method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(404, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabGetError, self.gl.get, Group, 1)
def test_get_missing_kw(self):
self.assertRaises(GitlabGetError, self.gl.get, ProjectBranch)
def test_get_401(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects/1",
method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(401, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabAuthenticationError, self.gl.get,
Project, 1)
def test_get_404(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects/1",
method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(404, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabGetError, self.gl.get,
Project, 1)
def test_get_unknown_error(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects/1",
method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(405, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabGetError, self.gl.get,
Project, 1)
def test_delete(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/groups/1",
method="delete")
def resp_delete_group(url, request):
headers = {'content-type': 'application/json'}
content = ''.encode("utf-8")
return response(200, content, headers, None, 5, request)
obj = Group(self.gl, data={"name": "testname", "id": 1})
with HTTMock(resp_delete_group):
data = self.gl.delete(obj)
self.assertIs(data, True)
def test_delete_unknown_path(self):
obj = Project(self.gl, data={"name": "testname", "id": 1})
obj._created = True
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects/1",
method="delete")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(404, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabDeleteError, self.gl.delete, obj)
def test_delete_401(self):
obj = Project(self.gl, data={"name": "testname", "id": 1})
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects/1",
method="delete")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(401, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabAuthenticationError, self.gl.delete, obj)
def test_delete_unknown_error(self):
obj = Project(self.gl, data={"name": "testname", "id": 1})
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects/1",
method="delete")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(405, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabDeleteError, self.gl.delete, obj)
def test_create(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects",
method="post")
def resp_create_project(url, request):
headers = {'content-type': 'application/json'}
content = '{"name": "testname", "id": 1}'.encode("utf-8")
return response(201, content, headers, None, 5, request)
obj = Project(self.gl, data={"name": "testname"})
with HTTMock(resp_create_project):
data = self.gl.create(obj)
expected = {u"name": u"testname", u"id": 1}
self.assertEqual(expected, data)
def test_create_kw_missing(self):
obj = Group(self.gl, data={"name": "testgroup"})
self.assertRaises(GitlabCreateError, self.gl.create, obj)
def test_create_unknown_path(self):
obj = User(self.gl, data={"email": "email", "password": "password",
"username": "username", "name": "name",
"can_create_group": True})
obj._created = True
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects/1",
method="delete")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(404, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabCreateError, self.gl.create, obj)
def test_create_401(self):
obj = Group(self.gl, data={"name": "testgroup", "path": "testpath"})
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/groups",
method="post")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(401, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabAuthenticationError, self.gl.create, obj)
def test_create_unknown_error(self):
obj = Group(self.gl, data={"name": "testgroup", "path": "testpath"})
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/groups",
method="post")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(405, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabCreateError, self.gl.create, obj)
def test_update(self):
obj = User(self.gl, data={"email": "testuser@testmail.com",
"password": "testpassword",
"name": u"testuser",
"username": "testusername",
"can_create_group": True,
"id": 1})
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/users/1",
method="put")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"first": "return1"}'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_cont):
data = self.gl.update(obj)
expected = {"first": "return1"}
self.assertEqual(expected, data)
def test_update_kw_missing(self):
obj = Group(self.gl, data={"name": "testgroup"})
self.assertRaises(GitlabUpdateError, self.gl.update, obj)
def test_update_401(self):
obj = Group(self.gl, data={"name": "testgroup", "path": "testpath",
"id": 1})
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/groups/1",
method="put")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(401, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabAuthenticationError, self.gl.update, obj)
def test_update_unknown_error(self):
obj = Group(self.gl, data={"name": "testgroup", "path": "testpath",
"id": 1})
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/groups/1",
method="put")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(405, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabUpdateError, self.gl.update, obj)
def test_update_unknown_path(self):
obj = Group(self.gl, data={"name": "testgroup", "path": "testpath",
"id": 1})
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/groups/1",
method="put")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(404, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabUpdateError, self.gl.update, obj)
class TestGitLab(unittest.TestCase):
def setUp(self):
self.gl = Gitlab("http://localhost", private_token="private_token",
email="testuser@test.com", password="testpassword",
ssl_verify=True)
def test_set_url(self):
self.gl.set_url("http://new_url")
self.assertEqual(self.gl._url, "http://new_url/api/v3")
def test_set_token(self):
token = "newtoken"
expected = {"PRIVATE-TOKEN": token}
self.gl.set_token(token)
self.assertEqual(self.gl.private_token, token)
self.assertDictContainsSubset(expected, self.gl.headers)
def test_set_credentials(self):
email = "credentialuser@test.com"
password = "credentialpassword"
self.gl.set_credentials(email=email, password=password)
self.assertEqual(self.gl.email, email)
self.assertEqual(self.gl.password, password)
def test_credentials_auth_nopassword(self):
self.gl.set_credentials(email=None, password=None)
self.assertRaises(GitlabAuthenticationError, self.gl.credentials_auth)
def test_credentials_auth_notok(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/session",
method="post")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"message": "message"}'.encode("utf-8")
return response(404, content, headers, None, 5, request)
with HTTMock(resp_cont):
self.assertRaises(GitlabAuthenticationError,
self.gl.credentials_auth)
def test_auth_with_credentials(self):
self.gl.set_token(None)
self.test_credentials_auth(callback=self.gl.auth)
def test_auth_with_token(self):
self.test_token_auth(callback=self.gl.auth)
def test_credentials_auth(self, callback=None):
if callback is None:
callback = self.gl.credentials_auth
token = "credauthtoken"
id_ = 1
expected = {"PRIVATE-TOKEN": token}
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/session",
method="post")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{{"id": {0:d}, "private_token": "{1:s}"}}'.format(
id_, token).encode("utf-8")
return response(201, content, headers, None, 5, request)
with HTTMock(resp_cont):
callback()
self.assertEqual(self.gl.private_token, token)
self.assertDictContainsSubset(expected, self.gl.headers)
self.assertEqual(self.gl.user.id, id_)
def test_token_auth(self, callback=None):
if callback is None:
callback = self.gl.token_auth
name = "username"
id_ = 1
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/user",
method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{{"id": {0:d}, "username": "{1:s}"}}'.format(
id_, name).encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_cont):
callback()
self.assertEqual(self.gl.user.username, name)
self.assertEqual(self.gl.user.id, id_)
self.assertEqual(type(self.gl.user), CurrentUser)
def test_get_list_or_object_without_id(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects",
method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '[{"name": "testproject", "id": 1}]'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_cont):
projs = Project._get_list_or_object(self.gl, None)
self.assertEqual(len(projs), 1)
proj = projs[0]
self.assertEqual(proj.id, 1)
self.assertEqual(proj.name, "testproject")
def test_get_list_or_object_with_id(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects/1",
method="get")
def resp_cont(url, request):
headers = {'content-type': 'application/json'}
content = '{"name": "testproject", "id": 1}'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_cont):
proj = Project._get_list_or_object(self.gl, 1)
self.assertEqual(proj.id, 1)
self.assertEqual(proj.name, "testproject")
def test_Hook(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/hooks/1",
method="get")
def resp_get_hook(url, request):
headers = {'content-type': 'application/json'}
content = '{"url": "testurl", "id": 1}'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_get_hook):
data = self.gl.Hook(id=1)
self.assertEqual(type(data), Hook)
self.assertEqual(data.url, "testurl")
self.assertEqual(data.id, 1)
def test_Project(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/projects/1",
method="get")
def resp_get_project(url, request):
headers = {'content-type': 'application/json'}
content = '{"name": "name", "id": 1}'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_get_project):
data = self.gl.Project(id=1)
self.assertEqual(type(data), Project)
self.assertEqual(data.name, "name")
self.assertEqual(data.id, 1)
def test_UserProject(self):
@urlmatch(scheme="http", netloc="localhost",
path="/api/v3/projects/user/2", method="get")
def resp_get_userproject(url, request):
headers = {'content-type': 'application/json'}
content = '{"name": "name", "id": 1, "user_id": 2}'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_get_userproject):
self.assertRaises(NotImplementedError, self.gl.UserProject, id=1,
user_id=2)
def test_Group(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/groups/1",
method="get")
def resp_get_group(url, request):
headers = {'content-type': 'application/json'}
content = '{"name": "name", "id": 1, "path": "path"}'
content = content.encode('utf-8')
return response(200, content, headers, None, 5, request)
with HTTMock(resp_get_group):
data = self.gl.Group(id=1)
self.assertEqual(type(data), Group)
self.assertEqual(data.name, "name")
self.assertEqual(data.path, "path")
self.assertEqual(data.id, 1)
def test_Issue(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/issues/1",
method="get")
def resp_get_issue(url, request):
headers = {'content-type': 'application/json'}
content = '{"name": "name", "id": 1}'.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_get_issue):
self.assertRaises(NotImplementedError, self.gl.Issue, id=1)
def test_User(self):
@urlmatch(scheme="http", netloc="localhost", path="/api/v3/users/1",
method="get")
def resp_get_user(url, request):
headers = {'content-type': 'application/json'}
content = ('{"name": "name", "id": 1, "password": "password", '
'"username": "username", "email": "email"}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
with HTTMock(resp_get_user):
user = self.gl.User(id=1)
self.assertEqual(type(user), User)
self.assertEqual(user.name, "name")
self.assertEqual(user.id, 1)
|
stefanklug/python-gitlab
|
gitlab/tests/test_gitlab.py
|
Python
|
lgpl-3.0
| 27,164
|
"""
=========================================================
Comparing different clustering algorithms on toy datasets
=========================================================
This example aims at showing characteristics of different
clustering algorithms on datasets that are "interesting"
but still in 2D. The last dataset is an example of a 'null'
situation for clustering: the data is homogeneous, and
there is no good clustering.
While these examples give some intuition about the algorithms,
this intuition might not apply to very high dimensional data.
The results could be improved by tweaking the parameters for
each clustering strategy, for instance setting the number of
clusters for the methods that needs this parameter
specified. Note that affinity propagation has a tendency to
create many clusters. Thus in this example its two parameters
(damping and per-point preference) were set to to mitigate this
behavior.
"""
print __doc__
import time
import numpy as np
import pylab as pl
from sklearn import cluster, datasets
from sklearn.metrics import euclidean_distances
from sklearn.neighbors import kneighbors_graph
from sklearn.preprocessing import StandardScaler
np.random.seed(0)
# Generate datasets. We choose the size big enough to see the scalability
# of the algorithms, but not too big to avoid too long running times
n_samples = 1500
noisy_circles = datasets.make_circles(n_samples=n_samples, factor=.5,
noise=.05)
noisy_moons = datasets.make_moons(n_samples=n_samples, noise=.05)
blobs = datasets.make_blobs(n_samples=n_samples, random_state=8)
no_structure = np.random.rand(n_samples, 2), None
colors = np.array([x for x in 'bgrcmykbgrcmykbgrcmykbgrcmyk'])
colors = np.hstack([colors] * 20)
pl.figure(figsize=(14, 9.5))
pl.subplots_adjust(left=.001, right=.999, bottom=.001, top=.96, wspace=.05,
hspace=.01)
plot_num = 1
for i_dataset, dataset in enumerate([noisy_circles, noisy_moons, blobs,
no_structure]):
X, y = dataset
# normalize dataset for easier parameter selection
X = StandardScaler().fit_transform(X)
# estimate bandwidth for mean shift
bandwidth = cluster.estimate_bandwidth(X, quantile=0.3)
# connectivity matrix for structured Ward
connectivity = kneighbors_graph(X, n_neighbors=10)
# make connectivity symmetric
connectivity = 0.5 * (connectivity + connectivity.T)
# Compute distances
#distances = np.exp(-euclidean_distances(X))
distances = euclidean_distances(X)
# create clustering estimators
ms = cluster.MeanShift(bandwidth=bandwidth, bin_seeding=True)
two_means = cluster.MiniBatchKMeans(n_clusters=2)
ward_five = cluster.Ward(n_clusters=2, connectivity=connectivity)
spectral = cluster.SpectralClustering(n_clusters=2,
eigen_solver='arpack',
affinity="nearest_neighbors")
dbscan = cluster.DBSCAN(eps=.2)
affinity_propagation = cluster.AffinityPropagation(damping=.9,
preference=-200)
for algorithm in [two_means, affinity_propagation, ms, spectral,
ward_five, dbscan]:
# predict cluster memberships
t0 = time.time()
algorithm.fit(X)
t1 = time.time()
if hasattr(algorithm, 'labels_'):
y_pred = algorithm.labels_.astype(np.int)
else:
y_pred = algorithm.predict(X)
# plot
pl.subplot(4, 6, plot_num)
if i_dataset == 0:
pl.title(str(algorithm).split('(')[0], size=18)
pl.scatter(X[:, 0], X[:, 1], color=colors[y_pred].tolist(), s=10)
if hasattr(algorithm, 'cluster_centers_'):
centers = algorithm.cluster_centers_
center_colors = colors[:len(centers)]
pl.scatter(centers[:, 0], centers[:, 1], s=100, c=center_colors)
pl.xlim(-2, 2)
pl.ylim(-2, 2)
pl.xticks(())
pl.yticks(())
pl.text(.99, .01, ('%.2fs' % (t1 - t0)).lstrip('0'),
transform=pl.gca().transAxes, size=15,
horizontalalignment='right')
plot_num += 1
pl.show()
|
seckcoder/lang-learn
|
python/sklearn/examples/cluster/plot_cluster_comparison.py
|
Python
|
unlicense
| 4,259
|
#!/usr/bin/env python
"""This is a single binary demo program."""
import threading
# pylint: disable=unused-import,g-bad-import-order
from grr.lib import server_plugins
from grr.gui import admin_ui
# pylint: enable=unused-import,g-bad-import-order
from grr.client import client
from grr.gui import runtests
from grr.lib import config_lib
from grr.lib import flags
from grr.lib import startup
from grr.tools import http_server
from grr.worker import enroller
from grr.worker import worker
BASE_DIR = "grr/"
def main(argv):
"""Sets up all the component in their own threads."""
# For testing we use the test config file.
flags.FLAGS.config = config_lib.CONFIG["Test.config"]
config_lib.CONFIG.AddContext(
"Demo Context",
"The demo runs all functions in a single process using the "
"in memory data store.")
startup.TestInit()
# pylint: disable=unused-import,unused-variable,g-import-not-at-top
from grr.gui import gui_plugins
# pylint: enable=unused-import,unused-variable,g-import-not-at-top
# This is the worker thread.
worker_thread = threading.Thread(target=worker.main, args=[argv],
name="Worker")
worker_thread.daemon = True
worker_thread.start()
# This is the enroller thread.
enroller_thread = threading.Thread(target=enroller.main, args=[argv],
name="Enroller")
enroller_thread.daemon = True
enroller_thread.start()
# This is the http server Frontend that clients communicate with.
http_thread = threading.Thread(target=http_server.main, args=[argv],
name="HTTP Server")
http_thread.daemon = True
http_thread.start()
client_thread = threading.Thread(target=client.main, args=[argv],
name="Client")
client_thread.daemon = True
client_thread.start()
# The UI is running in the main thread.
runtests.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
|
MiniSEC/GRR_clone
|
tools/demo.py
|
Python
|
apache-2.0
| 1,990
|
from django.utils.timezone import now as timezone_now
from zerver.lib.actions import do_change_stream_invite_only, get_client
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import Message, UserMessage, get_realm, get_stream
class TopicHistoryTest(ZulipTestCase):
def test_topics_history_zephyr_mirror(self) -> None:
user_profile = self.mit_user('sipbtest')
stream_name = 'new_stream'
# Send a message to this new stream from another user
self.subscribe(self.mit_user("starnine"), stream_name)
stream = get_stream(stream_name, user_profile.realm)
self.send_stream_message(self.mit_user("starnine"), stream_name,
topic_name="secret topic")
# Now subscribe this MIT user to the new stream and verify
# that the new topic is not accessible
self.login_user(user_profile)
self.subscribe(user_profile, stream_name)
endpoint = f'/json/users/me/{stream.id}/topics'
result = self.client_get(endpoint, {}, subdomain="zephyr")
self.assert_json_success(result)
history = result.json()['topics']
self.assertEqual(history, [])
def test_topics_history(self) -> None:
# verified: int(UserMessage.flags.read) == 1
user_profile = self.example_user('iago')
self.login_user(user_profile)
stream_name = 'Verona'
stream = get_stream(stream_name, user_profile.realm)
recipient = stream.recipient
def create_test_message(topic: str) -> int:
# TODO: Clean this up to send messages the normal way.
hamlet = self.example_user('hamlet')
message = Message(
sender=hamlet,
recipient=recipient,
content='whatever',
date_sent=timezone_now(),
sending_client=get_client('whatever'),
)
message.set_topic_name(topic)
message.save()
UserMessage.objects.create(
user_profile=user_profile,
message=message,
flags=0,
)
return message.id
# our most recent topics are topic0, topic1, topic2
# Create old messages with strange spellings.
create_test_message('topic2')
create_test_message('toPIc1')
create_test_message('toPIc0')
create_test_message('topic2')
create_test_message('topic2')
create_test_message('Topic2')
# Create new messages
topic2_msg_id = create_test_message('topic2')
create_test_message('topic1')
create_test_message('topic1')
topic1_msg_id = create_test_message('topic1')
topic0_msg_id = create_test_message('topic0')
endpoint = f'/json/users/me/{stream.id}/topics'
result = self.client_get(endpoint, {})
self.assert_json_success(result)
history = result.json()['topics']
# We only look at the most recent three topics, because
# the prior fixture data may be unreliable.
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
# Now try as cordelia, who we imagine as a totally new user in
# that she doesn't have UserMessage rows. We should see the
# same results for a public stream.
self.login('cordelia')
result = self.client_get(endpoint, {})
self.assert_json_success(result)
history = result.json()['topics']
# We only look at the most recent three topics, because
# the prior fixture data may be unreliable.
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertIn('topic0', [topic['name'] for topic in history])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
# Now make stream private, but subscribe cordelia
do_change_stream_invite_only(stream, True)
self.subscribe(self.example_user("cordelia"), stream.name)
result = self.client_get(endpoint, {})
self.assert_json_success(result)
history = result.json()['topics']
history = history[:3]
# Cordelia doesn't have these recent history items when we
# wasn't subscribed in her results.
self.assertNotIn('topic0', [topic['name'] for topic in history])
self.assertNotIn('topic1', [topic['name'] for topic in history])
self.assertNotIn('topic2', [topic['name'] for topic in history])
def test_bad_stream_id(self) -> None:
self.login('iago')
# non-sensible stream id
endpoint = '/json/users/me/9999999999/topics'
result = self.client_get(endpoint, {})
self.assert_json_error(result, 'Invalid stream id')
# out of realm
bad_stream = self.make_stream(
'mit_stream',
realm=get_realm('zephyr'),
)
endpoint = f'/json/users/me/{bad_stream.id}/topics'
result = self.client_get(endpoint, {})
self.assert_json_error(result, 'Invalid stream id')
# private stream to which I am not subscribed
private_stream = self.make_stream(
'private_stream',
invite_only=True,
)
endpoint = f'/json/users/me/{private_stream.id}/topics'
result = self.client_get(endpoint, {})
self.assert_json_error(result, 'Invalid stream id')
def test_get_topics_web_public_stream_web_public_request(self) -> None:
stream = self.make_stream('web-public-steram', is_web_public=True)
for i in range(3):
self.send_stream_message(self.example_user('iago'),
stream.name, topic_name='topic' + str(i))
endpoint = f'/json/users/me/{stream.id}/topics'
result = self.client_get(endpoint)
self.assert_json_success(result)
history = result.json()['topics']
self.assertEqual([topic['name'] for topic in history], [
'topic2',
'topic1',
'topic0',
])
def test_get_topics_non_web_public_stream_web_public_request(self) -> None:
stream = get_stream('Verona', self.example_user('iago').realm)
endpoint = f'/json/users/me/{stream.id}/topics'
result = self.client_get(endpoint)
self.assert_json_error(result, 'Invalid stream id', 400)
def test_get_topics_non_existant_stream_web_public_request(self) -> None:
non_existant_stream_id = 10000000000000000000000
endpoint = f'/json/users/me/{non_existant_stream_id}/topics'
result = self.client_get(endpoint)
self.assert_json_error(result, 'Invalid stream id', 400)
class TopicDeleteTest(ZulipTestCase):
def test_topic_delete(self) -> None:
initial_last_msg_id = self.get_last_message().id
stream_name = 'new_stream'
topic_name = 'new topic 2'
# NON-ADMIN USER
user_profile = self.example_user('hamlet')
self.subscribe(user_profile, stream_name)
# Send message
stream = get_stream(stream_name, user_profile.realm)
self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
last_msg_id = self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
# Deleting the topic
self.login_user(user_profile)
endpoint = '/json/streams/' + str(stream.id) + '/delete_topic'
result = self.client_post(endpoint, {
"topic_name": topic_name,
})
self.assert_json_error(result, "Must be an organization administrator")
self.assertEqual(self.get_last_message().id, last_msg_id)
# Make stream private with limited history
do_change_stream_invite_only(stream, invite_only=True,
history_public_to_subscribers=False)
# ADMIN USER subscribed now
user_profile = self.example_user('iago')
self.subscribe(user_profile, stream_name)
self.login_user(user_profile)
new_last_msg_id = self.send_stream_message(user_profile, stream_name, topic_name=topic_name)
# Now admin deletes all messages in topic -- which should only
# delete new_last_msg_id, i.e. the one sent since they joined.
self.assertEqual(self.get_last_message().id, new_last_msg_id)
result = self.client_post(endpoint, {
"topic_name": topic_name,
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, last_msg_id)
# Try to delete all messages in the topic again. There are no messages accessible
# to the administrator, so this should do nothing.
result = self.client_post(endpoint, {
"topic_name": topic_name,
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, last_msg_id)
# Make the stream's history public to subscribers
do_change_stream_invite_only(stream, invite_only=True,
history_public_to_subscribers=True)
# Delete the topic should now remove all messages
result = self.client_post(endpoint, {
"topic_name": topic_name,
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, initial_last_msg_id)
# Delete again, to test the edge case of deleting an empty topic.
result = self.client_post(endpoint, {
"topic_name": topic_name,
})
self.assert_json_success(result)
self.assertEqual(self.get_last_message().id, initial_last_msg_id)
|
showell/zulip
|
zerver/tests/test_message_topics.py
|
Python
|
apache-2.0
| 10,110
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for creating target pools."""
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.calliope import exceptions as calliope_exceptions
from googlecloudsdk.command_lib.compute import flags as compute_flags
from googlecloudsdk.command_lib.compute.backend_services import (
flags as backend_services_flags)
from googlecloudsdk.command_lib.compute.http_health_checks import (
flags as http_health_check_flags)
from googlecloudsdk.command_lib.compute.target_pools import flags
from googlecloudsdk.core import log
class Create(base_classes.BaseAsyncCreator):
"""Define a load-balanced pool of virtual machine instances.
*{command}* is used to create a target pool. A target pool resource
defines a group of instances that can receive incoming traffic
from forwarding rules. When a forwarding rule directs traffic to a
target pool, Google Compute Engine picks an instance from the
target pool based on a hash of the source and
destination IP addresses and ports. For more
information on load balancing, see
[](https://cloud.google.com/compute/docs/load-balancing-and-autoscaling/)
To add instances to a target pool, use 'gcloud compute
target-pools add-instances'.
"""
BACKUP_POOL_ARG = None
HTTP_HEALTH_CHECK_ARG = None
TARGET_POOL_ARG = None
@classmethod
def Args(cls, parser):
cls.BACKUP_POOL_ARG = flags.BackupPoolArgument(required=False)
cls.HTTP_HEALTH_CHECK_ARG = (
http_health_check_flags.HttpHealthCheckArgumentForTargetPoolCreate(
required=False))
cls.HTTP_HEALTH_CHECK_ARG.AddArgument(parser)
cls.TARGET_POOL_ARG = flags.TargetPoolArgument()
cls.TARGET_POOL_ARG.AddArgument(parser, operation_type='create')
backup_pool = parser.add_argument(
'--backup-pool',
help='Defines the fallback pool for the target pool.')
backup_pool.detailed_help = """\
Together with ``--failover-ratio'', this flag defines the fallback
behavior of the target pool (primary pool) to be created by this
command. If the ratio of the healthy instances in the primary pool
is at or below the specified ``--failover-ratio value'', then traffic
arriving at the load-balanced IP address will be directed to the
backup pool. If this flag is provided, then ``--failover-ratio'' is
required.
"""
parser.add_argument(
'--description',
help='An optional description of this target pool.')
failover_ratio = parser.add_argument(
'--failover-ratio',
type=float,
help=('The ratio of healthy instances below which the backup pool '
'will be used.'))
failover_ratio.detailed_help = """\
Together with ``--backup-pool'', defines the fallback behavior of the
target pool (primary pool) to be created by this command. If the
ratio of the healthy instances in the primary pool is at or below this
number, traffic arriving at the load-balanced IP address will be
directed to the backup pool. For example, if 0.4 is chosen as the
failover ratio, then traffic will fail over to the backup pool if
more than 40% of the instances become unhealthy.
If not set, the traffic will be directed the
instances in this pool in the ``force'' mode, where traffic will be
spread to the healthy instances with the best effort, or to all
instances when no instance is healthy.
If this flag is provided, then ``--backup-pool'' is required.
"""
health_check = parser.add_argument(
'--health-check',
help=('Specifies HttpHealthCheck to determine the health of instances '
'in the pool.'),
metavar='HEALTH_CHECK')
health_check.detailed_help = """\
DEPRECATED, use --http-health-check.
Specifies an HTTP health check resource to use to determine the health
of instances in this pool. If no health check is specified, traffic will
be sent to all instances in this target pool as if the instances
were healthy, but the health status of this pool will appear as
unhealthy as a warning that this target pool does not have a health
check.
"""
backend_services_flags.AddSessionAffinity(parser, target_pools=True)
@property
def service(self):
return self.compute.targetPools
@property
def method(self):
return 'Insert'
@property
def resource_type(self):
return 'targetPools'
def CreateRequests(self, args):
"""Returns a list of requests necessary for adding a target pool."""
if ((args.backup_pool and not args.failover_ratio) or
(args.failover_ratio and not args.backup_pool)):
raise calliope_exceptions.ToolException(
'Either both or neither of [--failover-ratio] and [--backup-pool] '
'must be provided.')
if args.failover_ratio is not None:
if args.failover_ratio < 0 or args.failover_ratio > 1:
raise calliope_exceptions.ToolException(
'[--failover-ratio] must be a number between 0 and 1, inclusive.')
if args.health_check:
args.http_health_check = args.health_check
log.warn('The --health-check flag is deprecated. Use equivalent '
'--http-health-check=%s flag.', args.health_check)
if args.http_health_check:
http_health_check = [self.HTTP_HEALTH_CHECK_ARG.ResolveAsResource(
args, self.resources).SelfLink()]
else:
http_health_check = []
target_pool_ref = self.TARGET_POOL_ARG.ResolveAsResource(
args,
self.resources,
scope_lister=compute_flags.GetDefaultScopeLister(self.compute_client,
self.project))
if args.backup_pool:
args.backup_pool_region = target_pool_ref.region
backup_pool_uri = self.BACKUP_POOL_ARG.ResolveAsResource(
args, self.resources).SelfLink()
else:
backup_pool_uri = None
request = self.messages.ComputeTargetPoolsInsertRequest(
targetPool=self.messages.TargetPool(
backupPool=backup_pool_uri,
description=args.description,
failoverRatio=args.failover_ratio,
healthChecks=http_health_check,
name=target_pool_ref.Name(),
sessionAffinity=(
self.messages.TargetPool.SessionAffinityValueValuesEnum(
args.session_affinity))),
region=target_pool_ref.region,
project=self.project)
return [request]
|
KaranToor/MA450
|
google-cloud-sdk/lib/surface/compute/target_pools/create.py
|
Python
|
apache-2.0
| 7,175
|
import base64
import datetime
import json
import hashlib
import hmac
from urllib.parse import quote as urlencode
import jinja2
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.humanize.templatetags.humanize import intcomma
from django.contrib.staticfiles.storage import staticfiles_storage
from django.utils.translation import ugettext, ugettext_lazy, ungettext
from jinja2 import Environment
import accounts.payment_plans as payment_plans
import pinecast.constants as constants
from . import helpers
from accounts.models import UserSettings
from payments import urls_tips as tips_urls
from podcasts.models import CATEGORIES
from pinecast.helpers import markdownify
def environment(**options):
options['autoescape'] = True
# Enable localization
options.setdefault('extensions', [])
if 'jinja2.ext.i18n' not in options['extensions']:
options['extensions'].append('jinja2.ext.i18n')
env = Environment(**options)
env.cache = {}
env.globals.update({
'bool': bool,
'dir': dir,
'enumerate': enumerate,
'float': float,
'getattr': getattr,
'int': int,
'isinstance': isinstance,
'len': len,
'list': list,
'min': min,
'max': max,
'sorted': sorted,
'str': str,
'get_user_settings': UserSettings.get_from_user,
'gravatar': helpers.gravatar,
'intercom_token': intercom_token,
'is_paid_plan': lambda p: p != payment_plans.PLAN_DEMO and p != payment_plans.PLAN_COMMUNITY,
'minimum_plan': minimum_plan,
'now': lambda hours=0: datetime.datetime.now() + datetime.timedelta(hours=hours),
'pop_until_lt': pop_until_lt,
'timedelta': datetime.timedelta,
'url': helpers.reverse,
'_': ugettext,
'gettext': ugettext,
'ngettext': ungettext,
'static': staticfiles_storage.url,
'LOCALES': constants.locales,
'PLAN_IDS': payment_plans.PLAN_IDS_MAP,
'PLAN_NAMES': payment_plans.PLANS_MAP,
'PLANS': payment_plans.PLANS_RAW,
'PODCAST_CATEGORIES': CATEGORIES,
'RECAPTCHA_KEY': settings.RECAPTCHA_KEY,
'STRIPE_PUBLISHABLE_KEY': settings.STRIPE_PUBLISHABLE_KEY,
'SUPPORT_URL': settings.SUPPORT_URL,
'SITE_BUILDER': settings.SITE_BUILDER,
'TS_OMNIBUS': settings.TS_OMNIBUS,
'__tips_urls': tips_urls,
'timezones': [
-12.0,
-11.0,
-10.0,
-9.0,
-8.0,
-7.0,
-6.0,
-6.0,
-5.0,
-4.0,
-3.0,
-2.0,
-1.0,
0,
1.0,
2.0,
3.0,
4.0,
5.0,
6.0,
7.0,
8.0,
9.0,
10.0,
11.0,
12.0,
13.0,
14.0,
],
})
env.filters['data_uri'] = lambda t, type: 'data:%s;base64,%s' % (type, base64.b64encode(t.encode('utf-8')).decode('utf-8'))
env.filters['format_tz'] = format_tz
env.filters['https'] = lambda s: ('https:%s' % s[5:]) if s.startswith('http:') else s
env.filters['int'] = safe_int
env.filters['intcomma'] = int_comma
env.filters['json'] = json.dumps
env.filters['json_parse'] = json.loads
env.filters['markdown'] = markdownify
env.filters['parse_date'] = lambda d: datetime.datetime.strptime(d, '%Y-%m-%d')
env.filters['pretty_date'] = helpers.pretty_date
env.filters['replace'] = lambda s: s.replace
env.filters['safe_json'] = safe_json
env.filters['sanitize'] = helpers.sanitize
env.filters['sanitize_hard'] = helpers.sanitize_hard
env.filters['sparkline'] = sparkline
env.filters['thumbnail'] = thumbnail
return env
TZ_SHORTHAND = {
-8.0: ugettext_lazy('PST'),
-7.0: ugettext_lazy('MST'),
-6.0: ugettext_lazy('CST'),
-5.0: ugettext_lazy('EST'),
}
def safe_int(i, def_=0):
try:
return int(i)
except ValueError:
return def_
def int_comma(value):
from_dj = intcomma(value)
if from_dj != str(value):
return from_dj
return intcomma(value, False)
def format_tz(tz):
if tz == 0:
return 'UTC'
offset = '%d:%0.2d' % (abs(int(tz)), tz % 1 * 60)
sign = '+' if tz > 0 else '-'
return 'UTC%s%s%s' % (sign, offset, ' (%s)' % TZ_SHORTHAND[tz] if tz in TZ_SHORTHAND else '')
def intercom_token(user):
return hmac.new(
settings.INTERCOM_SECRET.encode('utf-8'),
str(user.id).encode('utf-8'),
digestmod=hashlib.sha256,
).hexdigest()
def minimum_plan(user_settings, plan):
if isinstance(user_settings, User):
user_settings = UserSettings.get_from_user(user_settings)
return payment_plans.minimum(user_settings.plan, plan)
def safe_json(data):
if data is None:
return 'null'
elif isinstance(data, bool):
return 'true' if data else 'false'
elif isinstance(data, (int, float)):
return str(data)
elif isinstance(data, (tuple, list, set, frozenset)):
return jinja2.Markup('[%s]' % ','.join(safe_json(x) for x in data))
elif isinstance(data, dict):
return jinja2.Markup('{%s}' % ','.join(
'%s:%s' % (safe_json(key), safe_json(val)) for key, val in data.items()))
safe_data = str(jinja2.escape(data))
return jinja2.Markup(json.dumps(safe_data))
def sparkline(data, spacing=1, height=20):
data = list(data) or [0 for _ in range(31)]
spark_min = min(data)
spark_max = max(data)
spark_range = spark_max - spark_min or 1
sadj = ((i - spark_min) / spark_range for i in data)
return ' '.join('%d,%d' % (i * spacing, (1 - y) * height) for i, y in enumerate(sadj))
def thumbnail(url, width=100, height=100, format='jpeg'):
bucket_host = '%s.s3.amazonaws.com/' % settings.S3_BUCKET
if bucket_host not in url:
bucket_host = 'storage.pinecast.net/'
if bucket_host not in url:
return url
key = url.split(bucket_host)[1]
encoded_key = urlencode(key)
return 'https://thumb.service.pinecast.com/resize?h=%d&w=%d&key=%s&format=%s' % (
height, width, encoded_key, format)
def pop_until_lt(arr, field, comp, max=-1):
count = 0
while arr:
if getattr(arr[0], field) >= comp:
yield arr.pop(0)
else:
return
if max > 0:
count += 1
if count == max:
break
if not arr:
return
if max > 0:
ignored = 0
while arr:
if getattr(arr[0], field) >= comp:
ignored += 1
arr.pop(0)
else:
break
if ignored:
yield ignored
|
Pinecast/pinecast
|
pinecast/jinja2_helper.py
|
Python
|
apache-2.0
| 6,927
|
# -*- coding: utf-8 -*-
import datetime as dt
import itertools
import logging
import re
import urlparse
import bson
import pytz
import itsdangerous
from modularodm import fields, Q
from modularodm.exceptions import NoResultsFound
from modularodm.exceptions import ValidationError, ValidationValueError
from modularodm.validators import URLValidator
import framework
from framework.addons import AddonModelMixin
from framework import analytics
from framework.auth import signals, utils
from framework.auth.exceptions import (ChangePasswordError, ExpiredTokenError, InvalidTokenError,
MergeConfirmedRequiredError, MergeConflictError)
from framework.bcrypt import generate_password_hash, check_password_hash
from framework.exceptions import PermissionsError
from framework.guid.model import GuidStoredObject
from framework.mongo.validators import string_required
from framework.sentry import log_exception
from framework.sessions import session
from framework.sessions.model import Session
from framework.sessions.utils import remove_sessions_for_user
from website import mails, settings, filters, security
name_formatters = {
'long': lambda user: user.fullname,
'surname': lambda user: user.family_name if user.family_name else user.fullname,
'initials': lambda user: u'{surname}, {initial}.'.format(
surname=user.family_name,
initial=user.given_name_initial,
),
}
logger = logging.getLogger(__name__)
# Hide implementation of token generation
def generate_confirm_token():
return security.random_string(30)
def generate_claim_token():
return security.random_string(30)
def validate_history_item(item):
string_required(item.get('institution'))
startMonth = item.get('startMonth')
startYear = item.get('startYear')
endMonth = item.get('endMonth')
endYear = item.get('endYear')
validate_year(startYear)
validate_year(endYear)
if startYear and endYear:
if endYear < startYear:
raise ValidationValueError('End date must be later than start date.')
elif endYear == startYear:
if endMonth and startMonth and endMonth < startMonth:
raise ValidationValueError('End date must be later than start date.')
def validate_year(item):
if item:
try:
int(item)
except ValueError:
raise ValidationValueError('Please enter a valid year.')
else:
if len(item) != 4:
raise ValidationValueError('Please enter a valid year.')
validate_url = URLValidator()
def validate_personal_site(value):
if value:
try:
validate_url(value)
except ValidationError:
# Reraise with a better message
raise ValidationError('Invalid personal URL.')
def validate_social(value):
validate_personal_site(value.get('personal'))
# TODO - rename to _get_current_user_from_session /HRYBACKI
def _get_current_user():
uid = session._get_current_object() and session.data.get('auth_user_id')
return User.load(uid)
# TODO: This should be a class method of User?
def get_user(email=None, password=None, verification_key=None):
"""Get an instance of User matching the provided params.
:return: The instance of User requested
:rtype: User or None
"""
# tag: database
if password and not email:
raise AssertionError("If a password is provided, an email must also "
"be provided.")
query_list = []
if email:
email = email.strip().lower()
query_list.append(Q('emails', 'eq', email) | Q('username', 'eq', email))
if password:
password = password.strip()
try:
query = query_list[0]
for query_part in query_list[1:]:
query = query & query_part
user = User.find_one(query)
except Exception as err:
logger.error(err)
user = None
if user and not user.check_password(password):
return False
return user
if verification_key:
query_list.append(Q('verification_key', 'eq', verification_key))
try:
query = query_list[0]
for query_part in query_list[1:]:
query = query & query_part
user = User.find_one(query)
return user
except Exception as err:
logger.error(err)
return None
class Auth(object):
def __init__(self, user=None, api_node=None,
private_key=None):
self.user = user
self.api_node = api_node
self.private_key = private_key
def __repr__(self):
return ('<Auth(user="{self.user}", '
'private_key={self.private_key})>').format(self=self)
@property
def logged_in(self):
return self.user is not None
@classmethod
def from_kwargs(cls, request_args, kwargs):
user = request_args.get('user') or kwargs.get('user') or _get_current_user()
private_key = request_args.get('view_only')
return cls(
user=user,
private_key=private_key,
)
class User(GuidStoredObject, AddonModelMixin):
# Node fields that trigger an update to the search engine on save
SEARCH_UPDATE_FIELDS = {
'fullname',
'given_name',
'middle_names',
'family_name',
'suffix',
'merged_by',
'date_disabled',
'date_confirmed',
'jobs',
'schools',
'social',
}
# TODO: Add SEARCH_UPDATE_NODE_FIELDS, for fields that should trigger a
# search update for all nodes to which the user is a contributor.
SOCIAL_FIELDS = {
'orcid': u'http://orcid.com/{}',
'github': u'http://github.com/{}',
'scholar': u'http://scholar.google.com/citation?user={}',
'twitter': u'http://twitter.com/{}',
'personal': u'{}',
'linkedIn': u'https://www.linkedin.com/profile/view?id={}',
'impactStory': u'https://impactstory.org/{}',
'researcherId': u'http://researcherid.com/rid/{}',
}
# This is a GuidStoredObject, so this will be a GUID.
_id = fields.StringField(primary=True)
# The primary email address for the account.
# This value is unique, but multiple "None" records exist for:
# * unregistered contributors where an email address was not provided.
# TODO: Update mailchimp subscription on username change in user.save()
username = fields.StringField(required=False, unique=True, index=True)
# Hashed. Use `User.set_password` and `User.check_password`
password = fields.StringField()
fullname = fields.StringField(required=True, validate=string_required)
# user has taken action to register the account
is_registered = fields.BooleanField(index=True)
# user has claimed the account
# TODO: This should be retired - it always reflects is_registered.
# While a few entries exist where this is not the case, they appear to be
# the result of a bug, as they were all created over a small time span.
is_claimed = fields.BooleanField(default=False, index=True)
# a list of strings - for internal use
system_tags = fields.StringField(list=True)
# security emails that have been sent
# TODO: This should be removed and/or merged with system_tags
security_messages = fields.DictionaryField()
# Format: {
# <message label>: <datetime>
# ...
# }
# user was invited (as opposed to registered unprompted)
is_invited = fields.BooleanField(default=False, index=True)
# Per-project unclaimed user data:
# TODO: add validation
unclaimed_records = fields.DictionaryField(required=False)
# Format: {
# <project_id>: {
# 'name': <name that referrer provided>,
# 'referrer_id': <user ID of referrer>,
# 'token': <token used for verification urls>,
# 'email': <email the referrer provided or None>,
# 'claimer_email': <email the claimer entered or None>,
# 'last_sent': <timestamp of last email sent to referrer or None>
# }
# ...
# }
# Time of last sent notification email to newly added contributors
# Format : {
# <project_id>: {
# 'last_sent': time.time()
# }
# ...
# }
contributor_added_email_records = fields.DictionaryField(default=dict)
# The user into which this account was merged
merged_by = fields.ForeignField('user',
default=None,
backref='merged',
index=True)
# verification key used for resetting password
verification_key = fields.StringField()
# confirmed emails
# emails should be stripped of whitespace and lower-cased before appending
# TODO: Add validator to ensure an email address only exists once across
# all User's email lists
emails = fields.StringField(list=True)
# email verification tokens
# see also ``unconfirmed_emails``
email_verifications = fields.DictionaryField(default=dict)
# Format: {
# <token> : {'email': <email address>,
# 'expiration': <datetime>}
# }
# email lists to which the user has chosen a subscription setting
mailing_lists = fields.DictionaryField()
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# the date this user was registered
# TODO: consider removal - this can be derived from date_registered
date_registered = fields.DateTimeField(auto_now_add=dt.datetime.utcnow,
index=True)
# watched nodes are stored via a list of WatchConfigs
watched = fields.ForeignField("WatchConfig", list=True, backref="watched")
# list of users recently added to nodes as a contributor
recently_added = fields.ForeignField("user", list=True, backref="recently_added")
# Attached external accounts (OAuth)
external_accounts = fields.ForeignField("externalaccount",
list=True,
backref="connected")
# CSL names
given_name = fields.StringField()
middle_names = fields.StringField()
family_name = fields.StringField()
suffix = fields.StringField()
# Employment history
jobs = fields.DictionaryField(list=True, validate=validate_history_item)
# Format: {
# 'title': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Educational history
schools = fields.DictionaryField(list=True, validate=validate_history_item)
# Format: {
# 'degree': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Social links
social = fields.DictionaryField(validate=validate_social)
# Format: {
# 'personal': <personal site>,
# 'twitter': <twitter id>,
# }
# hashed password used to authenticate to Piwik
piwik_token = fields.StringField()
# date the user last logged in via the web interface
date_last_login = fields.DateTimeField()
# date the user first successfully confirmed an email address
date_confirmed = fields.DateTimeField(index=True)
# When the user was disabled.
date_disabled = fields.DateTimeField(index=True)
# when comments for a node were last viewed
comments_viewed_timestamp = fields.DictionaryField()
# Format: {
# 'node_id': 'timestamp'
# }
# timezone for user's locale (e.g. 'America/New_York')
timezone = fields.StringField(default='Etc/UTC')
# user language and locale data (e.g. 'en_US')
locale = fields.StringField(default='en_US')
_meta = {'optimistic': True}
def __repr__(self):
return '<User({0!r}) with id {1!r}>'.format(self.username, self._id)
def __str__(self):
return self.fullname.encode('ascii', 'replace')
__unicode__ = __str__
# For compatibility with Django auth
@property
def pk(self):
return self._id
@property
def email(self):
return self.username
def is_authenticated(self): # Needed for django compat
return True
def is_anonymous(self):
return False
@property
def absolute_api_v2_url(self):
from api.base.utils import absolute_reverse # Avoid circular dependency
return absolute_reverse('users:user-detail', kwargs={'user_id': self.pk})
# used by django and DRF
def get_absolute_url(self):
return self.absolute_api_v2_url
@classmethod
def create_unregistered(cls, fullname, email=None):
"""Create a new unregistered user.
"""
user = cls(
username=email,
fullname=fullname,
is_invited=True,
is_registered=False,
)
user.update_guessed_names()
return user
@classmethod
def create(cls, username, password, fullname):
user = cls(
username=username,
fullname=fullname,
)
user.update_guessed_names()
user.set_password(password)
return user
@classmethod
def create_unconfirmed(cls, username, password, fullname, do_confirm=True):
"""Create a new user who has begun registration but needs to verify
their primary email address (username).
"""
user = cls.create(username, password, fullname)
user.add_unconfirmed_email(username)
user.is_registered = False
return user
@classmethod
def create_confirmed(cls, username, password, fullname):
user = cls.create(username, password, fullname)
user.is_registered = True
user.is_claimed = True
user.date_confirmed = user.date_registered
return user
@classmethod
def from_cookie(cls, cookie, secret=None):
"""Attempt to load a user from their signed cookie
:returns: None if a user cannot be loaded else User
"""
if not cookie:
return None
secret = secret or settings.SECRET_KEY
try:
token = itsdangerous.Signer(secret).unsign(cookie)
except itsdangerous.BadSignature:
return None
user_session = Session.load(token)
if user_session is None:
return None
return cls.load(user_session.data.get('auth_user_id'))
def get_or_create_cookie(self, secret=None):
"""Find the cookie for the given user
Create a new session if no cookie is found
:param str secret: The key to sign the cookie with
:returns: The signed cookie
"""
secret = secret or settings.SECRET_KEY
sessions = Session.find(
Q('data.auth_user_id', 'eq', self._id)
).sort(
'-date_modified'
).limit(1)
if sessions.count() > 0:
user_session = sessions[0]
else:
user_session = Session(data={
'auth_user_id': self._id,
'auth_user_username': self.username,
'auth_user_fullname': self.fullname,
})
user_session.save()
signer = itsdangerous.Signer(secret)
return signer.sign(user_session._id)
def update_guessed_names(self):
"""Updates the CSL name fields inferred from the the full name.
"""
parsed = utils.impute_names(self.fullname)
self.given_name = parsed['given']
self.middle_names = parsed['middle']
self.family_name = parsed['family']
self.suffix = parsed['suffix']
def register(self, username, password=None):
"""Registers the user.
"""
self.username = username
if password:
self.set_password(password)
if username not in self.emails:
self.emails.append(username)
self.is_registered = True
self.is_claimed = True
self.date_confirmed = dt.datetime.utcnow()
self.update_search()
self.update_search_nodes()
# Emit signal that a user has confirmed
signals.user_confirmed.send(self)
return self
def add_unclaimed_record(self, node, referrer, given_name, email=None):
"""Add a new project entry in the unclaimed records dictionary.
:param Node node: Node this unclaimed user was added to.
:param User referrer: User who referred this user.
:param str given_name: The full name that the referrer gave for this user.
:param str email: The given email address.
:returns: The added record
"""
if not node.can_edit(user=referrer):
raise PermissionsError('Referrer does not have permission to add a contributor '
'to project {0}'.format(node._primary_key))
project_id = node._primary_key
referrer_id = referrer._primary_key
if email:
clean_email = email.lower().strip()
else:
clean_email = None
record = {
'name': given_name,
'referrer_id': referrer_id,
'token': generate_confirm_token(),
'email': clean_email
}
self.unclaimed_records[project_id] = record
return record
def display_full_name(self, node=None):
"""Return the full name , as it would display in a contributor list for a
given node.
NOTE: Unclaimed users may have a different name for different nodes.
"""
if node:
unclaimed_data = self.unclaimed_records.get(node._primary_key, None)
if unclaimed_data:
return unclaimed_data['name']
return self.fullname
@property
def is_active(self):
"""Returns True if the user is active. The user must have activated
their account, must not be deleted, suspended, etc.
:return: bool
"""
return (self.is_registered and
self.password is not None and
not self.is_merged and
not self.is_disabled and
self.is_confirmed)
def get_unclaimed_record(self, project_id):
"""Get an unclaimed record for a given project_id.
:raises: ValueError if there is no record for the given project.
"""
try:
return self.unclaimed_records[project_id]
except KeyError: # reraise as ValueError
raise ValueError('No unclaimed record for user {self._id} on node {project_id}'
.format(**locals()))
def get_claim_url(self, project_id, external=False):
"""Return the URL that an unclaimed user should use to claim their
account. Return ``None`` if there is no unclaimed_record for the given
project ID.
:param project_id: The project ID for the unclaimed record
:raises: ValueError if a record doesn't exist for the given project ID
:rtype: dict
:returns: The unclaimed record for the project
"""
uid = self._primary_key
base_url = settings.DOMAIN if external else '/'
unclaimed_record = self.get_unclaimed_record(project_id)
token = unclaimed_record['token']
return '{base_url}user/{uid}/{project_id}/claim/?token={token}'\
.format(**locals())
def set_password(self, raw_password):
"""Set the password for this user to the hash of ``raw_password``."""
self.password = generate_password_hash(raw_password)
def check_password(self, raw_password):
"""Return a boolean of whether ``raw_password`` was correct."""
if not self.password or not raw_password:
return False
return check_password_hash(self.password, raw_password)
@property
def csl_given_name(self):
parts = [self.given_name]
if self.middle_names:
parts.extend(each[0] for each in re.split(r'\s+', self.middle_names))
return ' '.join(parts)
@property
def csl_name(self):
return {
'family': self.family_name,
'given': self.csl_given_name,
}
# TODO: This should not be on the User object.
def change_password(self, raw_old_password, raw_new_password, raw_confirm_password):
"""Change the password for this user to the hash of ``raw_new_password``."""
raw_old_password = (raw_old_password or '').strip()
raw_new_password = (raw_new_password or '').strip()
raw_confirm_password = (raw_confirm_password or '').strip()
issues = []
if not self.check_password(raw_old_password):
issues.append('Old password is invalid')
elif raw_old_password == raw_new_password:
issues.append('Password cannot be the same')
if not raw_old_password or not raw_new_password or not raw_confirm_password:
issues.append('Passwords cannot be blank')
elif len(raw_new_password) < 6:
issues.append('Password should be at least six characters')
elif len(raw_new_password) > 256:
issues.append('Password should not be longer than 256 characters')
if raw_new_password != raw_confirm_password:
issues.append('Password does not match the confirmation')
if issues:
raise ChangePasswordError(issues)
self.set_password(raw_new_password)
def _set_email_token_expiration(self, token, expiration=None):
"""Set the expiration date for given email token.
:param str token: The email token to set the expiration for.
:param datetime expiration: Datetime at which to expire the token. If ``None``, the
token will expire after ``settings.EMAIL_TOKEN_EXPIRATION`` hours. This is only
used for testing purposes.
"""
expiration = expiration or (dt.datetime.utcnow() + dt.timedelta(hours=settings.EMAIL_TOKEN_EXPIRATION))
self.email_verifications[token]['expiration'] = expiration
return expiration
def add_unconfirmed_email(self, email, expiration=None):
"""Add an email verification token for a given email."""
# TODO: This is technically not compliant with RFC 822, which requires
# that case be preserved in the "local-part" of an address. From
# a practical standpoint, the vast majority of email servers do
# not preserve case.
# ref: https://tools.ietf.org/html/rfc822#section-6
email = email.lower().strip()
if email in self.emails:
raise ValueError("Email already confirmed to this user.")
utils.validate_email(email)
# If the unconfirmed email is already present, refresh the token
if email in self.unconfirmed_emails:
self.remove_unconfirmed_email(email)
token = generate_confirm_token()
# handle when email_verifications is None
if not self.email_verifications:
self.email_verifications = {}
self.email_verifications[token] = {'email': email}
self._set_email_token_expiration(token, expiration=expiration)
return token
def remove_unconfirmed_email(self, email):
"""Remove an unconfirmed email addresses and their tokens."""
for token, value in self.email_verifications.iteritems():
if value.get('email') == email:
del self.email_verifications[token]
return True
return False
def remove_email(self, email):
"""Remove a confirmed email"""
if email == self.username:
raise PermissionsError("Can't remove primary email")
if email in self.emails:
self.emails.remove(email)
signals.user_email_removed.send(self, email=email)
@signals.user_email_removed.connect
def _send_email_removal_confirmations(self, email):
mails.send_mail(to_addr=self.username,
mail=mails.REMOVED_EMAIL,
user=self,
removed_email=email,
security_addr='alternate email address ({})'.format(email))
mails.send_mail(to_addr=email,
mail=mails.REMOVED_EMAIL,
user=self,
removed_email=email,
security_addr='primary email address ({})'.format(self.username))
def get_confirmation_token(self, email, force=False):
"""Return the confirmation token for a given email.
:param str email: Email to get the token for.
:param bool force: If an expired token exists for the given email, generate a new
token and return that token.
:raises: ExpiredTokenError if trying to access a token that is expired and force=False.
:raises: KeyError if there no token for the email.
"""
# TODO: Refactor "force" flag into User.get_or_add_confirmation_token
for token, info in self.email_verifications.items():
if info['email'].lower() == email.lower():
# Old records will not have an expiration key. If it's missing,
# assume the token is expired
expiration = info.get('expiration')
if not expiration or (expiration and expiration < dt.datetime.utcnow()):
if not force:
raise ExpiredTokenError('Token for email "{0}" is expired'.format(email))
else:
new_token = self.add_unconfirmed_email(email)
self.save()
return new_token
return token
raise KeyError('No confirmation token for email "{0}"'.format(email))
def get_confirmation_url(self, email, external=True, force=False):
"""Return the confirmation url for a given email.
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: KeyError if there is no token for the email.
"""
base = settings.DOMAIN if external else '/'
token = self.get_confirmation_token(email, force=force)
return "{0}confirm/{1}/{2}/".format(base, self._primary_key, token)
def _get_unconfirmed_email_for_token(self, token):
"""Return whether or not a confirmation token is valid for this user.
:rtype: bool
"""
if token not in self.email_verifications:
raise InvalidTokenError
verification = self.email_verifications[token]
# Not all tokens are guaranteed to have expiration dates
if (
'expiration' in verification and
verification['expiration'] < dt.datetime.utcnow()
):
raise ExpiredTokenError
return verification['email']
def verify_claim_token(self, token, project_id):
"""Return whether or not a claim token is valid for this user for
a given node which they were added as a unregistered contributor for.
"""
try:
record = self.get_unclaimed_record(project_id)
except ValueError: # No unclaimed record for given pid
return False
return record['token'] == token
def confirm_email(self, token, merge=False):
"""Confirm the email address associated with the token"""
email = self._get_unconfirmed_email_for_token(token)
# If this email is confirmed on another account, abort
try:
user_to_merge = User.find_one(Q('emails', 'iexact', email))
except NoResultsFound:
user_to_merge = None
if user_to_merge and merge:
self.merge_user(user_to_merge)
elif user_to_merge:
raise MergeConfirmedRequiredError(
'Merge requires confirmation',
user=self,
user_to_merge=user_to_merge,
)
# If another user has this email as its username, get it
try:
unregistered_user = User.find_one(Q('username', 'eq', email) &
Q('_id', 'ne', self._id))
except NoResultsFound:
unregistered_user = None
if unregistered_user:
self.merge_user(unregistered_user)
self.save()
unregistered_user.username = None
if email not in self.emails:
self.emails.append(email)
# Complete registration if primary email
if email.lower() == self.username.lower():
self.register(self.username)
self.date_confirmed = dt.datetime.utcnow()
# Revoke token
del self.email_verifications[token]
# TODO: We can't assume that all unclaimed records are now claimed.
# Clear unclaimed records, so user's name shows up correctly on
# all projects
self.unclaimed_records = {}
self.save()
self.update_search_nodes()
return True
@property
def unconfirmed_emails(self):
# Handle when email_verifications field is None
email_verifications = self.email_verifications or {}
return [
each['email']
for each
in email_verifications.values()
]
def update_search_nodes(self):
"""Call `update_search` on all nodes on which the user is a
contributor. Needed to add self to contributor lists in search upon
registration or claiming.
"""
for node in self.node__contributed:
node.update_search()
def update_search_nodes_contributors(self):
"""
Bulk update contributor name on all nodes on which the user is
a contributor.
:return:
"""
from website.search import search
search.update_contributors(self.visible_contributor_to)
@property
def is_confirmed(self):
return bool(self.date_confirmed)
@property
def social_links(self):
return {
key: self.SOCIAL_FIELDS[key].format(val)
for key, val in self.social.items()
if val and
self.SOCIAL_FIELDS.get(key)
}
@property
def biblio_name(self):
given_names = self.given_name + ' ' + self.middle_names
surname = self.family_name
if surname != given_names:
initials = [
name[0].upper() + '.'
for name in given_names.split(' ')
if name and re.search(r'\w', name[0], re.I)
]
return u'{0}, {1}'.format(surname, ' '.join(initials))
return surname
@property
def given_name_initial(self):
"""
The user's preferred initialization of their given name.
Some users with common names may choose to distinguish themselves from
their colleagues in this way. For instance, there could be two
well-known researchers in a single field named "Robert Walker".
"Walker, R" could then refer to either of them. "Walker, R.H." could
provide easy disambiguation.
NOTE: The internal representation for this should never end with a
period. "R" and "R.H" would be correct in the prior case, but
"R.H." would not.
"""
return self.given_name[0]
@property
def url(self):
return '/{}/'.format(self._primary_key)
@property
def api_url(self):
return '/api/v1/profile/{0}/'.format(self._primary_key)
@property
def absolute_url(self):
return urlparse.urljoin(settings.DOMAIN, self.url)
@property
def display_absolute_url(self):
url = self.absolute_url
if url is not None:
return re.sub(r'https?:', '', url).strip('/')
@property
def deep_url(self):
return '/profile/{}/'.format(self._primary_key)
def profile_image_url(self, size=None):
"""A generalized method for getting a user's profile picture urls.
We may choose to use some service other than gravatar in the future,
and should not commit ourselves to using a specific service (mostly
an API concern).
As long as we use gravatar, this is just a proxy to User.gravatar_url
"""
return self._gravatar_url(size)
def _gravatar_url(self, size):
return filters.gravatar(
self,
use_ssl=True,
size=size
)
def get_activity_points(self, db=None):
db = db or framework.mongo.database
return analytics.get_total_activity_count(self._primary_key, db=db)
@property
def is_disabled(self):
"""Whether or not this account has been disabled.
Abstracts ``User.date_disabled``.
:return: bool
"""
return self.date_disabled is not None
@is_disabled.setter
def is_disabled(self, val):
"""Set whether or not this account has been disabled."""
if val:
self.date_disabled = dt.datetime.utcnow()
else:
self.date_disabled = None
@property
def is_merged(self):
'''Whether or not this account has been merged into another account.
'''
return self.merged_by is not None
@property
def profile_url(self):
return '/{}/'.format(self._id)
@property
def contributor_to(self):
return (
node for node in self.node__contributed
if not (
node.is_deleted
or node.is_dashboard
)
)
@property
def visible_contributor_to(self):
return (
node for node in self.contributor_to
if self._id in node.visible_contributor_ids
)
def get_summary(self, formatter='long'):
return {
'user_fullname': self.fullname,
'user_profile_url': self.profile_url,
'user_display_name': name_formatters[formatter](self),
'user_is_claimed': self.is_claimed
}
def save(self, *args, **kwargs):
# TODO: Update mailchimp subscription on username change
# Avoid circular import
from framework.analytics import tasks as piwik_tasks
self.username = self.username.lower().strip() if self.username else None
ret = super(User, self).save(*args, **kwargs)
if self.SEARCH_UPDATE_FIELDS.intersection(ret) and self.is_confirmed:
self.update_search()
self.update_search_nodes_contributors()
if settings.PIWIK_HOST and not self.piwik_token:
piwik_tasks.update_user(self._id)
return ret
def update_search(self):
from website import search
try:
search.search.update_user(self)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
@classmethod
def find_by_email(cls, email):
try:
user = cls.find_one(
Q('emails', 'eq', email)
)
return [user]
except:
return []
def serialize(self, anonymous=False):
return {
'id': utils.privacy_info_handle(self._primary_key, anonymous),
'fullname': utils.privacy_info_handle(self.fullname, anonymous, name=True),
'registered': self.is_registered,
'url': utils.privacy_info_handle(self.url, anonymous),
'api_url': utils.privacy_info_handle(self.api_url, anonymous),
}
###### OSF-Specific methods ######
def watch(self, watch_config):
"""Watch a node by adding its WatchConfig to this user's ``watched``
list. Raises ``ValueError`` if the node is already watched.
:param watch_config: The WatchConfig to add.
:param save: Whether to save the user.
"""
watched_nodes = [each.node for each in self.watched]
if watch_config.node in watched_nodes:
raise ValueError('Node is already being watched.')
watch_config.save()
self.watched.append(watch_config)
return None
def unwatch(self, watch_config):
"""Unwatch a node by removing its WatchConfig from this user's ``watched``
list. Raises ``ValueError`` if the node is not already being watched.
:param watch_config: The WatchConfig to remove.
:param save: Whether to save the user.
"""
for each in self.watched:
if watch_config.node._id == each.node._id:
each.__class__.remove_one(each)
return None
raise ValueError('Node not being watched.')
def is_watching(self, node):
'''Return whether a not a user is watching a Node.'''
watched_node_ids = set([config.node._id for config in self.watched])
return node._id in watched_node_ids
def get_recent_log_ids(self, since=None):
'''Return a generator of recent logs' ids.
:param since: A datetime specifying the oldest time to retrieve logs
from. If ``None``, defaults to 60 days before today. Must be a tz-aware
datetime because PyMongo's generation times are tz-aware.
:rtype: generator of log ids (strings)
'''
log_ids = []
# Default since to 60 days before today if since is None
# timezone aware utcnow
utcnow = dt.datetime.utcnow().replace(tzinfo=pytz.utc)
since_date = since or (utcnow - dt.timedelta(days=60))
for config in self.watched:
# Extract the timestamps for each log from the log_id (fast!)
# The first 4 bytes of Mongo's ObjectId encodes time
# This prevents having to load each Log Object and access their
# date fields
node_log_ids = [log_id for log_id in config.node.logs._to_primary_keys()
if bson.ObjectId(log_id).generation_time > since_date and
log_id not in log_ids]
# Log ids in reverse chronological order
log_ids = _merge_into_reversed(log_ids, node_log_ids)
return (l_id for l_id in log_ids)
def get_daily_digest_log_ids(self):
'''Return a generator of log ids generated in the past day
(starting at UTC 00:00).
'''
utcnow = dt.datetime.utcnow()
midnight = dt.datetime(
utcnow.year, utcnow.month, utcnow.day,
0, 0, 0, tzinfo=pytz.utc
)
return self.get_recent_log_ids(since=midnight)
@property
def can_be_merged(self):
"""The ability of the `merge_user` method to fully merge the user"""
return all((addon.can_be_merged for addon in self.get_addons()))
def merge_user(self, user):
"""Merge a registered user into this account. This user will be
a contributor on any project. if the registered user and this account
are both contributors of the same project. Then it will remove the
registered user and set this account to the highest permission of the two
and set this account to be visible if either of the two are visible on
the project.
:param user: A User object to be merged.
"""
# Fail if the other user has conflicts.
if not user.can_be_merged:
raise MergeConflictError("Users cannot be merged")
# Move over the other user's attributes
# TODO: confirm
for system_tag in user.system_tags:
if system_tag not in self.system_tags:
self.system_tags.append(system_tag)
self.is_claimed = self.is_claimed or user.is_claimed
self.is_invited = self.is_invited or user.is_invited
# copy over profile only if this user has no profile info
if user.jobs and not self.jobs:
self.jobs = user.jobs
if user.schools and not self.schools:
self.schools = user.schools
if user.social and not self.social:
self.social = user.social
unclaimed = user.unclaimed_records.copy()
unclaimed.update(self.unclaimed_records)
self.unclaimed_records = unclaimed
# - unclaimed records should be connected to only one user
user.unclaimed_records = {}
security_messages = user.security_messages.copy()
security_messages.update(self.security_messages)
self.security_messages = security_messages
for key, value in user.mailing_lists.iteritems():
# subscribe to each list if either user was subscribed
subscription = value or self.mailing_lists.get(key)
signals.user_merged.send(self, list_name=key, subscription=subscription)
# clear subscriptions for merged user
signals.user_merged.send(user, list_name=key, subscription=False)
for node_id, timestamp in user.comments_viewed_timestamp.iteritems():
if not self.comments_viewed_timestamp.get(node_id):
self.comments_viewed_timestamp[node_id] = timestamp
elif timestamp > self.comments_viewed_timestamp[node_id]:
self.comments_viewed_timestamp[node_id] = timestamp
self.emails.extend(user.emails)
user.emails = []
for k, v in user.email_verifications.iteritems():
email_to_confirm = v['email']
if k not in self.email_verifications and email_to_confirm != user.username:
self.email_verifications[k] = v
user.email_verifications = {}
# FOREIGN FIELDS
for watched in user.watched:
if watched not in self.watched:
self.watched.append(watched)
user.watched = []
for account in user.external_accounts:
if account not in self.external_accounts:
self.external_accounts.append(account)
user.external_accounts = []
# - addons
# Note: This must occur before the merged user is removed as a
# contributor on the nodes, as an event hook is otherwise fired
# which removes the credentials.
for addon in user.get_addons():
user_settings = self.get_or_add_addon(addon.config.short_name)
user_settings.merge(addon)
user_settings.save()
# - projects where the user was a contributor
for node in user.node__contributed:
# Skip dashboard node
if node.is_dashboard:
continue
# if both accounts are contributor of the same project
if node.is_contributor(self) and node.is_contributor(user):
if node.permissions[user._id] > node.permissions[self._id]:
permissions = node.permissions[user._id]
else:
permissions = node.permissions[self._id]
node.set_permissions(user=self, permissions=permissions)
visible1 = self._id in node.visible_contributor_ids
visible2 = user._id in node.visible_contributor_ids
if visible1 != visible2:
node.set_visible(user=self, visible=True, log=True, auth=Auth(user=self))
else:
node.add_contributor(
contributor=self,
permissions=node.get_permissions(user),
visible=node.get_visible(user),
log=False,
)
try:
node.remove_contributor(
contributor=user,
auth=Auth(user=self),
log=False,
)
except ValueError:
logger.error('Contributor {0} not in list on node {1}'.format(
user._id, node._id
))
node.save()
# - projects where the user was the creator
for node in user.node__created:
node.creator = self
node.save()
# finalize the merge
remove_sessions_for_user(user)
# - username is set to None so the resultant user can set it primary
# in the future.
user.username = None
user.password = None
user.verification_key = None
user.merged_by = self
user.save()
def get_projects_in_common(self, other_user, primary_keys=True):
"""Returns either a collection of "shared projects" (projects that both users are contributors for)
or just their primary keys
"""
if primary_keys:
projects_contributed_to = set(self.node__contributed._to_primary_keys())
return projects_contributed_to.intersection(other_user.node__contributed._to_primary_keys())
else:
projects_contributed_to = set(self.node__contributed)
return projects_contributed_to.intersection(other_user.node__contributed)
def n_projects_in_common(self, other_user):
"""Returns number of "shared projects" (projects that both users are contributors for)"""
return len(self.get_projects_in_common(other_user, primary_keys=True))
def _merge_into_reversed(*iterables):
'''Merge multiple sorted inputs into a single output in reverse order.
'''
return sorted(itertools.chain(*iterables), reverse=True)
|
arpitar/osf.io
|
framework/auth/core.py
|
Python
|
apache-2.0
| 46,002
|
#!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import sys
import gflags
from cyber_py import cyber
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from modules.control.proto import control_cmd_pb2
BRAKE_LINE_DATA = []
TROTTLE_LINE_DATA = []
STEERING_LINE_DATA = []
FLAGS = gflags.FLAGS
gflags.DEFINE_integer("data_length", 500, "Control plot data length")
def callback(control_cmd_pb):
global STEERING_LINE_DATA
global TROTTLE_LINE_DATA, BRAKE_LINE_DATA
STEERING_LINE_DATA.append(control_cmd_pb.steering_target)
if len(STEERING_LINE_DATA) > FLAGS.data_length:
STEERING_LINE_DATA = STEERING_LINE_DATA[-FLAGS.data_length:]
BRAKE_LINE_DATA.append(control_cmd_pb.brake)
if len(BRAKE_LINE_DATA) > FLAGS.data_length:
BRAKE_LINE_DATA = BRAKE_LINE_DATA[-FLAGS.data_length:]
TROTTLE_LINE_DATA.append(control_cmd_pb.throttle)
if len(TROTTLE_LINE_DATA) > FLAGS.data_length:
TROTTLE_LINE_DATA = TROTTLE_LINE_DATA[-FLAGS.data_length:]
def listener():
cyber.init()
test_node = cyber.Node("control_listener")
test_node.create_reader("/apollo/control",
control_cmd_pb2.ControlCommand, callback)
test_node.spin()
cyber.shutdown()
def compensate(data_list):
comp_data = [0] * FLAGS.data_length
comp_data.extend(data_list)
if len(comp_data) > FLAGS.data_length:
comp_data = comp_data[-FLAGS.data_length:]
return comp_data
def update(frame_number):
brake_data = compensate(BRAKE_LINE_DATA)
brake_line.set_ydata(brake_data)
throttle_data = compensate(TROTTLE_LINE_DATA)
throttle_line.set_ydata(throttle_data)
steering_data = compensate(STEERING_LINE_DATA)
steering_line.set_ydata(steering_data)
brake_text.set_text('brake = %.1f' % brake_data[-1])
throttle_text.set_text('throttle = %.1f' % throttle_data[-1])
steering_text.set_text('steering = %.1f' % steering_data[-1])
if __name__ == '__main__':
argv = FLAGS(sys.argv)
listener()
fig, ax = plt.subplots()
X = range(FLAGS.data_length)
Xs = [i * -1 for i in X]
Xs.sort()
steering_line, = ax.plot(
Xs, [0] * FLAGS.data_length, 'b', lw=3, alpha=0.5, label='steering')
throttle_line, = ax.plot(
Xs, [0] * FLAGS.data_length, 'g', lw=3, alpha=0.5, label='throttle')
brake_line, = ax.plot(
Xs, [0] * FLAGS.data_length, 'r', lw=3, alpha=0.5, label='brake')
brake_text = ax.text(0.75, 0.85, '', transform=ax.transAxes)
throttle_text = ax.text(0.75, 0.90, '', transform=ax.transAxes)
steering_text = ax.text(0.75, 0.95, '', transform=ax.transAxes)
ani = animation.FuncAnimation(fig, update, interval=100)
ax.set_ylim(-100, 120)
ax.set_xlim(-1 * FLAGS.data_length, 10)
ax.legend(loc="upper left")
plt.show()
|
msbeta/apollo
|
modules/tools/plot_control/plot_control.py
|
Python
|
apache-2.0
| 3,563
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""## Loss operations for use in neural networks.
All of the loss functions take a pair of predictions and ground truth labels,
from which the loss is computed. It is assumed that the shape of both these
tensors is of the form [batch_size, d1, ... dN] where `batch_size` is the number
of samples in the batch and `d1` ... `dN` are the remaining dimensions.
It is common, when training with multiple loss functions, to adjust the relative
strengths of individual losses. This is performed by rescaling the losses via
a `weight` parameter passed to the loss functions. For example, if we were
training with both log_loss and sum_of_squares_loss, and we wished that the
log_loss penalty be twice as severe as the sum_of_squares_loss, we would
implement this as:
# Explicitely set the weight.
tf.contrib.losses.log(predictions, targets, weight=2.0)
# Uses default weight of 1.0
tf.contrib.losses.sum_of_squares(predictions, targets)
While specifying a scalar loss rescales the loss over the entire batch,
we sometimes want to rescale the loss per batch sample. For example, if we have
certain examples that matter more to us to get correctly, we might want to have
a higher loss that other samples whose mistakes matter less. In this case, we
can provide a weight vector of length `batch_size` which results in the loss
for each sample in the batch being scaled by the corresponding weight element.
For example, consider the case of a classification problem where we want to
maximize our accuracy but we especially interested in obtaining high accuracy
for a specific class:
inputs, labels = LoadData(batch_size=3)
logits = MyModelPredictions(inputs)
# Ensures that the loss for examples whose ground truth class is `3` is 5x
# higher than the loss for all other examples.
weight = tf.mul(4, tf.cast(tf.equal(labels, 3), tf.float32)) + 1
onehot_labels = tf.one_hot(labels, num_classes=5)
tf.contrib.losses.softmax_cross_entropy(logits, onehot_labels, weight=weight)
Finally, in certain cases, we may want to specify a different loss for every
single measurable value. For example, if we are performing per-pixel depth
prediction, or per-pixel denoising, a single batch sample has P values where P
is the number of pixels in the image. For many losses, the number of measurable
values matches the number of elements in the predictions and targets tensors.
For others, such as softmax_cross_entropy and cosine_distance, the
loss functions reduces the dimensions of the inputs to produces a tensor of
losses for each measurable value. For example, softmax_cross_entropy takes as
input predictions and labels of dimension [batch_size, num_classes] but the
number of measurable values is [batch_size]. Consequently, when passing a weight
tensor to specify a different loss for every measurable value, the dimension of
the tensor will depend on the loss being used.
For a concrete example, consider the case of per-pixel depth prediction where
certain ground truth depth values are missing (due to sensor noise in the
capture process). In this case, we want to assign zero weight to losses for
these predictions.
# 'depths' that are missing have a value of 0:
images, depths = LoadData(...)
predictions = MyModelPredictions(images)
weight = tf.cast(tf.greater(depths, 0), tf.float32)
tf.contrib.losses.sum_of_squares(predictions, depths, weight)
Note that when using weights for the losses, the final average is computed
by rescaling the losses by the weights and then dividing by the total number of
non-zero samples. For an arbitrary set of weights, this may not necessarily
produce a weighted average. Instead, it simply and transparently rescales the
per-element losses before averaging over the number of observations. For example
if the losses computed by the loss function is an array [4, 1, 2, 3] and the
weights are an array [1, 0.5, 3, 9], then the average loss is:
(4*1 + 1*0.5 + 2*3 + 3*9) / 4
However, with a single loss function and an arbitrary set of weights, one can
still easily create a loss function such that the resulting loss is a
weighted average over the individual prediction errors:
images, labels = LoadData(...)
predictions = MyModelPredictions(images)
weight = MyComplicatedWeightingFunction(labels)
weight = tf.div(weight, tf.size(weight))
tf.contrib.losses.sum_of_squares(predictions, depths, weight)
@@absolute_difference
@@cosine_distance
@@log
@@sigmoid_cross_entropy
@@softmax_cross_entropy
@@sum_of_pairwise_squares
@@sum_of_squares
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
__all__ = [
"absolute_difference",
"cosine_distance",
"log",
"sigmoid_cross_entropy",
"softmax_cross_entropy",
"sum_of_pairwise_squares",
"sum_of_squares",
]
def _scale_losses(losses, weight):
"""Computes the scaled loss.
Args:
losses: A `Tensor` of size [batch_size, d1, ... dN].
weight: A `Tensor` of size [1], [batch_size] or [batch_size, d1, ... dN].
The `losses` are reduced (tf.reduce_sum) until its dimension matches
that of `weight` at which point the reduced `losses` are element-wise
multiplied by `weight` and a final reduce_sum is computed on the result.
Conceptually, this operation is equivalent to broadcasting (tiling)
`weight` to be the same size as `losses`, performing an element-wise
multiplication, and summing the result.
Returns:
A scalar tf.float32 `Tensor` whose value represents the sum of the scaled
`losses`.
"""
# First, compute the sum of the losses over all elements:
start_index = max(0, weight.get_shape().ndims)
reduction_indices = list(range(start_index, losses.get_shape().ndims))
reduced_losses = math_ops.reduce_sum(losses,
reduction_indices=reduction_indices)
reduced_losses = math_ops.mul(reduced_losses, weight)
return math_ops.reduce_sum(reduced_losses)
def _safe_mean(losses, num_present):
"""Computes a safe mean of the losses.
Args:
losses: A tensor whose elements contain individual loss measurements.
num_present: The number of measurable losses in the tensor.
Returns:
A scalar representing the mean of the losses. If `num_present` is zero,
then zero is returned.
"""
total_loss = math_ops.reduce_sum(losses)
return math_ops.select(
math_ops.greater(num_present, 0),
math_ops.div(total_loss, math_ops.select(
math_ops.equal(num_present, 0), 1.0, num_present)),
array_ops.zeros_like(total_loss),
name="value")
def _compute_weighted_loss(losses, weight):
"""Computes the weighted loss.
Args:
losses: A tensor of size [batch_size, d1, ... dN].
weight: A tensor of size [1] or [batch_size, d1, ... dK] where K < N.
Returns:
A scalar `Tensor` that returns the weighted loss.
Raises:
ValueError: If the weight shape is not compatible with the losses shape or
if the number of dimensions (rank) of either losses or weight is missing.
"""
losses = math_ops.to_float(losses)
weight = math_ops.to_float(ops.convert_to_tensor(weight))
if losses.get_shape().ndims is None:
raise ValueError("losses.get_shape().ndims cannot be None")
if weight.get_shape().ndims is None:
raise ValueError("weight.get_shape().ndims cannot be None")
total_loss = _scale_losses(losses, weight)
num_present = _num_present(losses, weight)
return _safe_mean(total_loss, num_present)
def _num_present(losses, weight, per_batch=False):
"""Computes the number of elements in the loss function induced by `weight`.
A given weight tensor induces different numbers of usable elements in the
`losses` tensor. The `weight` tensor is broadcast across `losses` for all
possible dimensions. For example, if `losses` is a tensor of dimension
[4, 5, 6, 3] and weight is a tensor of size [4, 5], then weight is, in effect,
tiled to match the size of `losses`. Following this effective tile, the total
number of present elements is the number of non-zero weights.
Args:
losses: A tensor of size [batch_size, d1, ... dN].
weight: A tensor of size [1] or [batch_size, d1, ... dK] where K < N.
per_batch: Whether to return the number of elements per batch or as a sum
total.
Returns:
The number of present (non-zero) elements in the losses tensor. If
`per_batch` is True, the value is returned as a tensor of size
[batch_size]. Otherwise, a single scalar tensor is returned.
"""
# To ensure that dims of [2, 1] gets mapped to [2,]
weight = array_ops.squeeze(weight)
# If the weight is a scalar, its easy to compute:
if weight.get_shape().ndims == 0:
batch_size = array_ops.reshape(array_ops.slice(array_ops.shape(losses),
[0], [1]), [])
num_per_batch = math_ops.div(math_ops.to_float(array_ops.size(losses)),
math_ops.to_float(batch_size))
num_per_batch = math_ops.select(math_ops.equal(weight, 0),
0.0, num_per_batch)
num_per_batch = math_ops.mul(array_ops.ones(
array_ops.reshape(batch_size, [1])), num_per_batch)
return num_per_batch if per_batch else math_ops.reduce_sum(num_per_batch)
# First, count the number of nonzero weights:
if weight.get_shape().ndims >= 1:
reduction_indices = list(range(1, weight.get_shape().ndims))
num_nonzero_per_batch = math_ops.reduce_sum(
math_ops.to_float(math_ops.not_equal(weight, 0)),
reduction_indices=reduction_indices)
# Next, determine the number of elements that weight would broadcast to:
broadcast_dims = array_ops.slice(array_ops.shape(losses),
[weight.get_shape().ndims], [-1])
num_to_broadcast = math_ops.to_float(math_ops.reduce_prod(broadcast_dims))
num_per_batch = math_ops.mul(num_nonzero_per_batch, num_to_broadcast)
return num_per_batch if per_batch else math_ops.reduce_sum(num_per_batch)
def absolute_difference(predictions, targets, weight=1.0, scope=None):
"""Adds an Absolute Difference loss to the training procedure.
`weight` acts as a coefficient for the loss. If a scalar is provided, then the
loss is simply scaled by the given value. If `weight` is a tensor of size
[batch_size], then the total loss for each sample of the batch is rescaled
by the corresponding element in the `weight` vector. If the shape of
`weight` matches the shape of `predictions`, then the loss of each
measurable element of `predictions` is scaled by the corresponding value of
`weight`.
Args:
predictions: The predicted outputs.
targets: The ground truth output tensor, same dimensions as 'predictions'.
weight: Coefficients for the loss a scalar, a tensor of shape
[batch_size] or a tensor whose shape matches `predictions`.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `targets` or
if the shape of `weight` is invalid.
"""
with ops.op_scope([predictions, targets],
scope, "sum_of_squares_loss") as scope:
predictions.get_shape().assert_is_compatible_with(targets.get_shape())
if weight is None:
raise ValueError("`weight` cannot be None")
predictions = math_ops.to_float(predictions)
targets = math_ops.to_float(targets)
losses = math_ops.abs(math_ops.sub(predictions, targets))
return _compute_weighted_loss(losses, weight)
def sigmoid_cross_entropy(logits, multi_class_labels, weight=1.0,
label_smoothing=0, scope=None):
"""Creates a cross-entropy loss using tf.nn.sigmoid_cross_entropy_with_logits.
Args:
logits: [batch_size, num_classes] logits outputs of the network .
multi_class_labels: [batch_size, num_classes] target labels in (0, 1).
weight: Coefficients for the loss. The tensor must be a scalar, a tensor of
shape [batch_size] or shape [batch_size, num_classes].
label_smoothing: If greater than 0 then smooth the labels.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
"""
with ops.op_scope([logits, multi_class_labels],
scope, "sigmoid_cross_entropy_loss"):
return _cross_entropy(logits, multi_class_labels, weight,
label_smoothing,
activation_fn=nn.sigmoid_cross_entropy_with_logits)
def softmax_cross_entropy(logits, onehot_labels, weight=1.0,
label_smoothing=0, scope=None):
"""Creates a cross-entropy loss using tf.nn.softmax_cross_entropy_with_logits.
It can scale the loss by weight factor, and smooth the labels.
Args:
logits: [batch_size, num_classes] logits outputs of the network .
onehot_labels: [batch_size, num_classes] target one_hot_encoded labels.
weight: Coefficients for the loss. The tensor must be a scalar or a tensor
of shape [batch_size].
label_smoothing: If greater than 0 then smooth the labels.
scope: the scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
"""
with ops.op_scope([logits, onehot_labels],
scope, "softmax_cross_entropy_loss"):
return _cross_entropy(logits, onehot_labels, weight,
label_smoothing,
activation_fn=nn.softmax_cross_entropy_with_logits)
def _cross_entropy(logits, onehot_labels, weight, label_smoothing,
activation_fn):
"""Adds a CrossEntropyLoss to the losses collection.
`weight` acts as a coefficient for the loss. If a scalar is provided,
then the loss is simply scaled by the given value. If `weight` is a
tensor of size [`batch_size`], then the loss weights apply to each
corresponding sample.
Args:
logits: [batch_size, num_classes] logits outputs of the network .
onehot_labels: [batch_size, num_classes] target one_hot_encoded labels.
weight: Coefficients for the loss. If the activation is SIGMOID, then the
weight shape must be one of [1], [batch_size] or logits.shape().
Otherwise, the weight shape must be either [1] or [batch_size].
label_smoothing: If greater than 0 then smooth the labels.
activation_fn: The activation function to use. The method must take three
arguments, the logits, the labels, and an operation name.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `targets` or
if the shape of `weight` is invalid or if `weight` is None.
"""
logits.get_shape().assert_is_compatible_with(onehot_labels.get_shape())
if weight is None:
raise ValueError("`weight` cannot be None")
onehot_labels = math_ops.cast(onehot_labels, logits.dtype)
if label_smoothing > 0:
num_classes = onehot_labels.get_shape()[1].value
smooth_positives = 1.0 - label_smoothing
smooth_negatives = label_smoothing / num_classes
onehot_labels = onehot_labels * smooth_positives + smooth_negatives
losses = activation_fn(logits, onehot_labels, name="xentropy")
return _compute_weighted_loss(losses, weight)
def log(predictions, targets, weight=1.0, epsilon=1e-7, scope=None):
"""Adds a Log Loss term to the training procedure.
`weight` acts as a coefficient for the loss. If a scalar is provided, then the
loss is simply scaled by the given value. If `weight` is a tensor of size
[batch_size], then the total loss for each sample of the batch is rescaled
by the corresponding element in the `weight` vector. If the shape of
`weight` matches the shape of `predictions`, then the loss of each
measurable element of `predictions` is scaled by the corresponding value of
`weight`.
Args:
predictions: The predicted outputs.
targets: The ground truth output tensor, same dimensions as 'predictions'.
weight: Coefficients for the loss a scalar, a tensor of shape
[batch_size] or a tensor whose shape matches `predictions`.
epsilon: A small increment to add to avoid taking a log of zero.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `targets` or
if the shape of `weight` is invalid.
"""
with ops.op_scope([predictions, targets],
scope, "log_loss") as scope:
predictions.get_shape().assert_is_compatible_with(targets.get_shape())
if weight is None:
raise ValueError("`weight` cannot be None")
predictions = math_ops.to_float(predictions)
targets = math_ops.to_float(targets)
losses = -math_ops.mul(
targets,
math_ops.log(predictions + epsilon)) - math_ops.mul(
(1 - targets), math_ops.log(1 - predictions + epsilon))
return _compute_weighted_loss(losses, weight)
def sum_of_squares(predictions, targets, weight=1.0, scope=None):
"""Adds a Sum-of-Squares loss to the training procedure.
`weight` acts as a coefficient for the loss. If a scalar is provided, then the
loss is simply scaled by the given value. If `weight` is a tensor of size
[batch_size], then the total loss for each sample of the batch is rescaled
by the corresponding element in the `weight` vector. If the shape of
`weight` matches the shape of `predictions`, then the loss of each
measurable element of `predictions` is scaled by the corresponding value of
`weight`.
Args:
predictions: The predicted outputs.
targets: The ground truth output tensor, same dimensions as 'predictions'.
weight: Coefficients for the loss a scalar, a tensor of shape
[batch_size] or a tensor whose shape matches `predictions`.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `targets` or
if the shape of `weight` is invalid.
"""
with ops.op_scope([predictions, targets],
scope, "sum_of_squares_loss") as scope:
predictions.get_shape().assert_is_compatible_with(targets.get_shape())
if weight is None:
raise ValueError("`weight` cannot be None")
predictions = math_ops.to_float(predictions)
targets = math_ops.to_float(targets)
losses = math_ops.square(math_ops.sub(predictions, targets))
return _compute_weighted_loss(losses, weight)
def sum_of_pairwise_squares(predictions, targets, weight=1.0, scope=None):
"""Adds a pairwise-errors-squared loss to the training procedure.
Unlike the sum_of_squares loss, which is a measure of the differences between
corresponding elements of `predictions` and `targets`, sum_of_pairwise_squares
is a measure of the differences between pairs of corresponding elements of
`predictions` and `targets`.
For example, if `targets`=[a, b, c] and `predictions`=[x, y, z], there are
three pairs of differences are summed to compute the loss:
loss = [ ((a-b) - (x-y)).^2 + ((a-c) - (x-z)).^2 + ((b-c) - (y-z)).^2 ] / 3
Note that since the inputs are of size [batch_size, d0, ... dN], the
corresponding pairs are computed within each batch sample but not across
samples within a batch. For example, if `predictions` represents a batch of
16 grayscale images of dimenion [batch_size, 100, 200], then the set of pairs
is drawn from each image, but not across images.
`weight` acts as a coefficient for the loss. If a scalar is provided, then the
loss is simply scaled by the given value. If `weight` is a tensor of size
[batch_size], then the total loss for each sample of the batch is rescaled
by the corresponding element in the `weight` vector.
Args:
predictions: The predicted outputs, a tensor of size [batch_size, d0, .. dN]
where N+1 is the total number of dimensions in `predictions`.
targets: The ground truth output tensor, whose shape must match the shape of
the `predictions` tensor.
weight: Coefficients for the loss a scalar, a tensor of shape [batch_size]
or a tensor whose shape matches `predictions`.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `targets` or
if the shape of `weight` is invalid.
"""
with ops.op_scope([predictions, targets],
scope, "sum_of_pairwise_squares_loss") as scope:
predictions.get_shape().assert_is_compatible_with(targets.get_shape())
if weight is None:
raise ValueError("`weight` cannot be None")
predictions = math_ops.to_float(predictions)
targets = math_ops.to_float(targets)
weight = math_ops.to_float(ops.convert_to_tensor(weight))
diffs = math_ops.sub(predictions, targets)
# Need to verify here since the function doesn't use _compute_weighted_loss
if diffs.get_shape().ndims is None:
raise ValueError("diffs.get_shape().ndims cannot be None")
if weight.get_shape().ndims is None:
raise ValueError("weight.get_shape().ndims cannot be None")
reduction_indices = list(range(1, diffs.get_shape().ndims))
sum_squares_diff_per_batch = math_ops.reduce_sum(
math_ops.square(diffs),
reduction_indices=reduction_indices)
num_present_per_batch = _num_present(diffs, weight, per_batch=True)
term1 = 2.0 * math_ops.div(sum_squares_diff_per_batch,
num_present_per_batch)
sum_diff = math_ops.reduce_sum(diffs, reduction_indices=reduction_indices)
term2 = 2.0 * math_ops.div(math_ops.square(sum_diff),
math_ops.square(num_present_per_batch))
loss = _scale_losses(term1 - term2, weight)
return math_ops.select(math_ops.reduce_sum(num_present_per_batch) > 0,
loss,
array_ops.zeros_like(loss),
name="value")
def cosine_distance(predictions, targets, dim, weight=1.0, scope=None):
"""Adds a cosine-distance loss to the training procedure.
Note that the function assumes that the predictions and targets are already
unit-normalized.
Args:
predictions: An arbitrary matrix.
targets: A `Tensor` whose shape matches 'predictions'
dim: The dimension along which the cosine distance is computed.
weight: Coefficients for the loss a scalar, a tensor of shape
[batch_size] or a tensor whose shape matches `predictions`.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If predictions.shape doesn't match targets.shape, if the ignore
mask is provided and its shape doesn't match targets.shape or if
the ignore mask is not boolean valued.
"""
with ops.op_scope([predictions, targets],
scope, "cosine_distance_loss") as scope:
predictions.get_shape().assert_is_compatible_with(targets.get_shape())
if weight is None:
raise ValueError("`weight` cannot be None")
predictions = math_ops.to_float(predictions)
targets = math_ops.to_float(targets)
radial_diffs = math_ops.mul(predictions, targets)
losses = 1 - math_ops.reduce_sum(radial_diffs, reduction_indices=[dim,])
return _compute_weighted_loss(losses, weight)
|
peterbraden/tensorflow
|
tensorflow/contrib/losses/python/losses/loss_ops.py
|
Python
|
apache-2.0
| 24,598
|
"""Support for the Environment Canada weather service."""
from datetime import datetime, timedelta
import logging
import re
from env_canada import ECData # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LOCATION,
CONF_LATITUDE,
CONF_LONGITUDE,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=10)
ATTR_UPDATED = "updated"
ATTR_STATION = "station"
ATTR_TIME = "alert time"
CONF_ATTRIBUTION = "Data provided by Environment Canada"
CONF_STATION = "station"
CONF_LANGUAGE = "language"
def validate_station(station):
"""Check that the station ID is well-formed."""
if station is None:
return
if not re.fullmatch(r"[A-Z]{2}/s0000\d{3}", station):
raise vol.error.Invalid('Station ID must be of the form "XX/s0000###"')
return station
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_LANGUAGE, default="english"): vol.In(["english", "french"]),
vol.Optional(CONF_STATION): validate_station,
vol.Inclusive(CONF_LATITUDE, "latlon"): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "latlon"): cv.longitude,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Environment Canada sensor."""
if config.get(CONF_STATION):
ec_data = ECData(
station_id=config[CONF_STATION], language=config.get(CONF_LANGUAGE)
)
else:
lat = config.get(CONF_LATITUDE, hass.config.latitude)
lon = config.get(CONF_LONGITUDE, hass.config.longitude)
ec_data = ECData(coordinates=(lat, lon), language=config.get(CONF_LANGUAGE))
sensor_list = list(ec_data.conditions.keys()) + list(ec_data.alerts.keys())
add_entities([ECSensor(sensor_type, ec_data) for sensor_type in sensor_list], True)
class ECSensor(Entity):
"""Implementation of an Environment Canada sensor."""
def __init__(self, sensor_type, ec_data):
"""Initialize the sensor."""
self.sensor_type = sensor_type
self.ec_data = ec_data
self._unique_id = None
self._name = None
self._state = None
self._attr = None
self._unit = None
@property
def unique_id(self) -> str:
"""Return the unique ID of the sensor."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
return self._attr
@property
def unit_of_measurement(self):
"""Return the units of measurement."""
return self._unit
def update(self):
"""Update current conditions."""
self.ec_data.update()
self.ec_data.conditions.update(self.ec_data.alerts)
conditions = self.ec_data.conditions
metadata = self.ec_data.metadata
sensor_data = conditions.get(self.sensor_type)
self._unique_id = f"{metadata['location']}-{self.sensor_type}"
self._attr = {}
self._name = sensor_data.get("label")
value = sensor_data.get("value")
if isinstance(value, list):
self._state = " | ".join([str(s.get("title")) for s in value])[:255]
self._attr.update(
{ATTR_TIME: " | ".join([str(s.get("date")) for s in value])}
)
elif self.sensor_type == "tendency":
self._state = str(value).capitalize()
elif value is not None and len(value) > 255:
self._state = value[:255]
_LOGGER.info("Value for %s truncated to 255 characters", self._unique_id)
else:
self._state = value
if sensor_data.get("unit") == "C" or self.sensor_type in [
"wind_chill",
"humidex",
]:
self._unit = TEMP_CELSIUS
else:
self._unit = sensor_data.get("unit")
timestamp = metadata.get("timestamp")
if timestamp:
updated_utc = datetime.strptime(timestamp, "%Y%m%d%H%M%S").isoformat()
else:
updated_utc = None
self._attr.update(
{
ATTR_ATTRIBUTION: CONF_ATTRIBUTION,
ATTR_UPDATED: updated_utc,
ATTR_LOCATION: metadata.get("location"),
ATTR_STATION: metadata.get("station"),
}
)
|
tchellomello/home-assistant
|
homeassistant/components/environment_canada/sensor.py
|
Python
|
apache-2.0
| 4,749
|
"""Use serial protocol of Acer projector to obtain state of the projector."""
from __future__ import annotations
import logging
import re
from typing import Any
import serial
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
CONF_FILENAME,
CONF_NAME,
CONF_TIMEOUT,
STATE_OFF,
STATE_ON,
STATE_UNKNOWN,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import (
CMD_DICT,
CONF_WRITE_TIMEOUT,
DEFAULT_NAME,
DEFAULT_TIMEOUT,
DEFAULT_WRITE_TIMEOUT,
ECO_MODE,
ICON,
INPUT_SOURCE,
LAMP,
LAMP_HOURS,
)
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_FILENAME): cv.isdevice,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(
CONF_WRITE_TIMEOUT, default=DEFAULT_WRITE_TIMEOUT
): cv.positive_int,
}
)
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType,
) -> None:
"""Connect with serial port and return Acer Projector."""
serial_port = config[CONF_FILENAME]
name = config[CONF_NAME]
timeout = config[CONF_TIMEOUT]
write_timeout = config[CONF_WRITE_TIMEOUT]
add_entities([AcerSwitch(serial_port, name, timeout, write_timeout)], True)
class AcerSwitch(SwitchEntity):
"""Represents an Acer Projector as a switch."""
_attr_icon = ICON
def __init__(
self,
serial_port: str,
name: str,
timeout: int,
write_timeout: int,
) -> None:
"""Init of the Acer projector."""
self.ser = serial.Serial(
port=serial_port, timeout=timeout, write_timeout=write_timeout
)
self._serial_port = serial_port
self._attr_name = name
self._attributes = {
LAMP_HOURS: STATE_UNKNOWN,
INPUT_SOURCE: STATE_UNKNOWN,
ECO_MODE: STATE_UNKNOWN,
}
def _write_read(self, msg: str) -> str:
"""Write to the projector and read the return."""
ret = ""
# Sometimes the projector won't answer for no reason or the projector
# was disconnected during runtime.
# This way the projector can be reconnected and will still work
try:
if not self.ser.is_open:
self.ser.open()
self.ser.write(msg.encode("utf-8"))
# Size is an experience value there is no real limit.
# AFAIK there is no limit and no end character so we will usually
# need to wait for timeout
ret = self.ser.read_until(size=20).decode("utf-8")
except serial.SerialException:
_LOGGER.error("Problem communicating with %s", self._serial_port)
self.ser.close()
return ret
def _write_read_format(self, msg: str) -> str:
"""Write msg, obtain answer and format output."""
# answers are formatted as ***\answer\r***
awns = self._write_read(msg)
if match := re.search(r"\r(.+)\r", awns):
return match.group(1)
return STATE_UNKNOWN
def update(self) -> None:
"""Get the latest state from the projector."""
awns = self._write_read_format(CMD_DICT[LAMP])
if awns == "Lamp 1":
self._attr_is_on = True
self._attr_available = True
elif awns == "Lamp 0":
self._attr_is_on = False
self._attr_available = True
else:
self._attr_available = False
for key in self._attributes:
if msg := CMD_DICT.get(key):
awns = self._write_read_format(msg)
self._attributes[key] = awns
self._attr_extra_state_attributes = self._attributes
def turn_on(self, **kwargs: Any) -> None:
"""Turn the projector on."""
msg = CMD_DICT[STATE_ON]
self._write_read(msg)
self._attr_is_on = True
def turn_off(self, **kwargs: Any) -> None:
"""Turn the projector off."""
msg = CMD_DICT[STATE_OFF]
self._write_read(msg)
self._attr_is_on = False
|
jawilson/home-assistant
|
homeassistant/components/acer_projector/switch.py
|
Python
|
apache-2.0
| 4,513
|
# =============================================================================
# Copyright (c) 2016, Cisco Systems, Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
from schema.base import BaseMigrate
from database import DBSession
sql_statements = [
'alter table system_option add base_url VARCHAR(100)',
'alter table system_option add enable_ldap_auth BOOLEAN default 0',
'alter table system_option add ldap_server_url VARCHAR(100)'
]
class SchemaMigrate(BaseMigrate):
def __init__(self, version):
BaseMigrate.__init__(self, version)
def start(self):
db_session = DBSession()
for sql in sql_statements:
try:
db_session.execute(sql)
except:
pass
|
kstaniek/csm
|
csmserver/schema/migrate_to_version_1.py
|
Python
|
apache-2.0
| 2,082
|
import threading
import pytest
from tornado import ioloop, web
from dummyserver.server import (
SocketServerThread,
run_tornado_app,
run_loop_in_thread,
DEFAULT_CERTS,
HAS_IPV6,
)
from dummyserver.handlers import TestingApp
from dummyserver.proxy import ProxyHandler
def consume_socket(sock, chunks=65536):
consumed = bytearray()
while True:
b = sock.recv(chunks)
consumed += b
if b.endswith(b"\r\n\r\n"):
break
return consumed
class SocketDummyServerTestCase(object):
"""
A simple socket-based server is created for this class that is good for
exactly one request.
"""
scheme = "http"
host = "localhost"
@classmethod
def _start_server(cls, socket_handler):
ready_event = threading.Event()
cls.server_thread = SocketServerThread(
socket_handler=socket_handler, ready_event=ready_event, host=cls.host
)
cls.server_thread.start()
ready_event.wait(5)
if not ready_event.is_set():
raise Exception("most likely failed to start server")
cls.port = cls.server_thread.port
@classmethod
def start_response_handler(cls, response, num=1, block_send=None):
ready_event = threading.Event()
def socket_handler(listener):
for _ in range(num):
ready_event.set()
sock = listener.accept()[0]
consume_socket(sock)
if block_send:
block_send.wait()
block_send.clear()
sock.send(response)
sock.close()
cls._start_server(socket_handler)
return ready_event
@classmethod
def start_basic_handler(cls, **kw):
return cls.start_response_handler(
b"HTTP/1.1 200 OK\r\n" b"Content-Length: 0\r\n" b"\r\n", **kw
)
@classmethod
def teardown_class(cls):
if hasattr(cls, "server_thread"):
cls.server_thread.join(0.1)
def assert_header_received(
self, received_headers, header_name, expected_value=None
):
header_name = header_name.encode("ascii")
if expected_value is not None:
expected_value = expected_value.encode("ascii")
header_titles = []
for header in received_headers:
key, value = header.split(b": ")
header_titles.append(key)
if key == header_name and expected_value is not None:
assert value == expected_value
assert header_name in header_titles
class IPV4SocketDummyServerTestCase(SocketDummyServerTestCase):
@classmethod
def _start_server(cls, socket_handler):
ready_event = threading.Event()
cls.server_thread = SocketServerThread(
socket_handler=socket_handler, ready_event=ready_event, host=cls.host
)
cls.server_thread.USE_IPV6 = False
cls.server_thread.start()
ready_event.wait(5)
if not ready_event.is_set():
raise Exception("most likely failed to start server")
cls.port = cls.server_thread.port
class HTTPDummyServerTestCase(object):
""" A simple HTTP server that runs when your test class runs
Have your test class inherit from this one, and then a simple server
will start when your tests run, and automatically shut down when they
complete. For examples of what test requests you can send to the server,
see the TestingApp in dummyserver/handlers.py.
"""
scheme = "http"
host = "localhost"
host_alt = "127.0.0.1" # Some tests need two hosts
certs = DEFAULT_CERTS
@classmethod
def _start_server(cls):
cls.io_loop = ioloop.IOLoop.current()
app = web.Application([(r".*", TestingApp)])
cls.server, cls.port = run_tornado_app(
app, cls.io_loop, cls.certs, cls.scheme, cls.host
)
cls.server_thread = run_loop_in_thread(cls.io_loop)
@classmethod
def _stop_server(cls):
cls.io_loop.add_callback(cls.server.stop)
cls.io_loop.add_callback(cls.io_loop.stop)
cls.server_thread.join()
@classmethod
def setup_class(cls):
cls._start_server()
@classmethod
def teardown_class(cls):
cls._stop_server()
class HTTPSDummyServerTestCase(HTTPDummyServerTestCase):
scheme = "https"
host = "localhost"
certs = DEFAULT_CERTS
class HTTPDummyProxyTestCase(object):
http_host = "localhost"
http_host_alt = "127.0.0.1"
https_host = "localhost"
https_host_alt = "127.0.0.1"
https_certs = DEFAULT_CERTS
proxy_host = "localhost"
proxy_host_alt = "127.0.0.1"
@classmethod
def setup_class(cls):
cls.io_loop = ioloop.IOLoop.current()
app = web.Application([(r".*", TestingApp)])
cls.http_server, cls.http_port = run_tornado_app(
app, cls.io_loop, None, "http", cls.http_host
)
app = web.Application([(r".*", TestingApp)])
cls.https_server, cls.https_port = run_tornado_app(
app, cls.io_loop, cls.https_certs, "https", cls.http_host
)
app = web.Application([(r".*", ProxyHandler)])
cls.proxy_server, cls.proxy_port = run_tornado_app(
app, cls.io_loop, None, "http", cls.proxy_host
)
cls.server_thread = run_loop_in_thread(cls.io_loop)
@classmethod
def teardown_class(cls):
cls.io_loop.add_callback(cls.http_server.stop)
cls.io_loop.add_callback(cls.https_server.stop)
cls.io_loop.add_callback(cls.proxy_server.stop)
cls.io_loop.add_callback(cls.io_loop.stop)
cls.server_thread.join()
@pytest.mark.skipif(not HAS_IPV6, reason="IPv6 not available")
class IPv6HTTPDummyServerTestCase(HTTPDummyServerTestCase):
host = "::1"
@pytest.mark.skipif(not HAS_IPV6, reason="IPv6 not available")
class IPv6HTTPDummyProxyTestCase(HTTPDummyProxyTestCase):
http_host = "localhost"
http_host_alt = "127.0.0.1"
https_host = "localhost"
https_host_alt = "127.0.0.1"
https_certs = DEFAULT_CERTS
proxy_host = "::1"
proxy_host_alt = "127.0.0.1"
|
kawamon/hue
|
desktop/core/ext-py/urllib3-1.25.8/dummyserver/testcase.py
|
Python
|
apache-2.0
| 6,185
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:project:data_processing.job_executions:index')
DETAILS_URL = reverse(
'horizon:project:data_processing.job_executions:details', args=['id'])
class DataProcessingJobExecutionTests(test.TestCase):
@test.create_stubs({api.sahara: ('job_execution_list',)})
def test_index(self):
api.sahara.job_execution_list(IsA(http.HttpRequest), {}) \
.AndReturn(self.job_executions.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(
res, 'project/data_processing.job_executions/job_executions.html')
self.assertContains(res, 'Executions')
@test.create_stubs({api.sahara: ('job_execution_get',)})
def test_details(self):
api.sahara.job_execution_get(IsA(http.HttpRequest), IsA(unicode)) \
.AndReturn(self.job_executions.list()[0])
self.mox.ReplayAll()
res = self.client.get(DETAILS_URL)
self.assertTemplateUsed(
res, 'project/data_processing.job_executions/details.html')
self.assertContains(res, 'RUNNING')
@test.create_stubs({api.sahara: ('job_execution_list',
'job_execution_delete')})
def test_delete(self):
job_exec = self.job_executions.first()
api.sahara.job_execution_list(IsA(http.HttpRequest), {}) \
.AndReturn(self.job_executions.list())
api.sahara.job_execution_delete(IsA(http.HttpRequest), job_exec.id)
self.mox.ReplayAll()
form_data = {'action': 'job_executions__delete__%s' % job_exec.id}
res = self.client.post(INDEX_URL, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.assertMessageCount(success=1)
|
orbitfp7/horizon
|
openstack_dashboard/dashboards/project/data_processing/job_executions/tests.py
|
Python
|
apache-2.0
| 2,525
|
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared testing utilities."""
# Avoid the grpc and google.cloud.grpc collision.
from __future__ import absolute_import
class _Monkey(object):
# context-manager for replacing module names in the scope of a test.
def __init__(self, module, **kw):
self.module = module
if len(kw) == 0: # pragma: NO COVER
raise ValueError('_Monkey was used with nothing to monkey-patch')
self.to_restore = {key: getattr(module, key) for key in kw}
for key, value in kw.items():
setattr(module, key, value)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
for key, value in self.to_restore.items():
setattr(self.module, key, value)
class _NamedTemporaryFile(object):
def __init__(self, suffix=''):
import os
import tempfile
filehandle, self.name = tempfile.mkstemp(suffix=suffix)
os.close(filehandle)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
import os
os.remove(self.name)
def _tempdir_maker():
import contextlib
import shutil
import tempfile
@contextlib.contextmanager
def _tempdir_mgr():
temp_dir = tempfile.mkdtemp()
yield temp_dir
shutil.rmtree(temp_dir)
return _tempdir_mgr
_tempdir = _tempdir_maker()
del _tempdir_maker
class _GAXBaseAPI(object):
_random_gax_error = False
def __init__(self, **kw):
self.__dict__.update(kw)
def _make_grpc_error(self, status_code):
from grpc._channel import _RPCState
from google.cloud.exceptions import GrpcRendezvous
details = 'Some error details.'
exc_state = _RPCState((), None, None, status_code, details)
return GrpcRendezvous(exc_state, None, None, None)
def _make_grpc_not_found(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.NOT_FOUND)
def _make_grpc_failed_precondition(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.FAILED_PRECONDITION)
def _make_grpc_deadline_exceeded(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.DEADLINE_EXCEEDED)
class _GAXPageIterator(object):
def __init__(self, *pages, **kwargs):
self._pages = iter(pages)
self.page_token = kwargs.get('page_token')
def next(self):
import six
return six.next(self._pages)
__next__ = next
|
quom/google-cloud-python
|
core/google/cloud/_testing.py
|
Python
|
apache-2.0
| 3,121
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Save Rendered Template Fields"""
import os
from typing import Optional
import sqlalchemy_jsonfield
from sqlalchemy import Column, String, and_, not_, tuple_
from sqlalchemy.orm import Session
from airflow.configuration import conf
from airflow.models.base import ID_LEN, Base
from airflow.models.taskinstance import TaskInstance
from airflow.serialization.helpers import serialize_template_field
from airflow.settings import json
from airflow.utils.session import provide_session
from airflow.utils.sqlalchemy import UtcDateTime
class RenderedTaskInstanceFields(Base):
"""Save Rendered Template Fields"""
__tablename__ = "rendered_task_instance_fields"
dag_id = Column(String(ID_LEN), primary_key=True)
task_id = Column(String(ID_LEN), primary_key=True)
execution_date = Column(UtcDateTime, primary_key=True)
rendered_fields = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False)
k8s_pod_yaml = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True)
def __init__(self, ti: TaskInstance, render_templates=True):
self.dag_id = ti.dag_id
self.task_id = ti.task_id
self.task = ti.task
self.execution_date = ti.execution_date
self.ti = ti
if render_templates:
ti.render_templates()
if os.environ.get("AIRFLOW_IS_K8S_EXECUTOR_POD", None):
self.k8s_pod_yaml = ti.render_k8s_pod_yaml()
self.rendered_fields = {
field: serialize_template_field(getattr(self.task, field)) for field in self.task.template_fields
}
self._redact()
def __repr__(self):
return f"<{self.__class__.__name__}: {self.dag_id}.{self.task_id} {self.execution_date}"
def _redact(self):
from airflow.utils.log.secrets_masker import redact
if self.k8s_pod_yaml:
self.k8s_pod_yaml = redact(self.k8s_pod_yaml)
for field, rendered in self.rendered_fields.items():
self.rendered_fields[field] = redact(rendered, field)
@classmethod
@provide_session
def get_templated_fields(cls, ti: TaskInstance, session: Session = None) -> Optional[dict]:
"""
Get templated field for a TaskInstance from the RenderedTaskInstanceFields
table.
:param ti: Task Instance
:param session: SqlAlchemy Session
:return: Rendered Templated TI field
"""
result = (
session.query(cls.rendered_fields)
.filter(
cls.dag_id == ti.dag_id, cls.task_id == ti.task_id, cls.execution_date == ti.execution_date
)
.one_or_none()
)
if result:
rendered_fields = result.rendered_fields
return rendered_fields
else:
return None
@classmethod
@provide_session
def get_k8s_pod_yaml(cls, ti: TaskInstance, session: Session = None) -> Optional[dict]:
"""
Get rendered Kubernetes Pod Yaml for a TaskInstance from the RenderedTaskInstanceFields
table.
:param ti: Task Instance
:param session: SqlAlchemy Session
:return: Kubernetes Pod Yaml
"""
result = (
session.query(cls.k8s_pod_yaml)
.filter(
cls.dag_id == ti.dag_id, cls.task_id == ti.task_id, cls.execution_date == ti.execution_date
)
.one_or_none()
)
return result.k8s_pod_yaml if result else None
@provide_session
def write(self, session: Session = None):
"""Write instance to database
:param session: SqlAlchemy Session
"""
session.merge(self)
@classmethod
@provide_session
def delete_old_records(
cls,
task_id: str,
dag_id: str,
num_to_keep=conf.getint("core", "max_num_rendered_ti_fields_per_task", fallback=0),
session: Session = None,
):
"""
Keep only Last X (num_to_keep) number of records for a task by deleting others
:param task_id: Task ID
:param dag_id: Dag ID
:param num_to_keep: Number of Records to keep
:param session: SqlAlchemy Session
"""
if num_to_keep <= 0:
return
tis_to_keep_query = (
session.query(cls.dag_id, cls.task_id, cls.execution_date)
.filter(cls.dag_id == dag_id, cls.task_id == task_id)
.order_by(cls.execution_date.desc())
.limit(num_to_keep)
)
if session.bind.dialect.name in ["postgresql", "sqlite"]:
# Fetch Top X records given dag_id & task_id ordered by Execution Date
subq1 = tis_to_keep_query.subquery('subq1')
session.query(cls).filter(
cls.dag_id == dag_id,
cls.task_id == task_id,
tuple_(cls.dag_id, cls.task_id, cls.execution_date).notin_(subq1),
).delete(synchronize_session=False)
elif session.bind.dialect.name in ["mysql"]:
# Fetch Top X records given dag_id & task_id ordered by Execution Date
subq1 = tis_to_keep_query.subquery('subq1')
# Second Subquery
# Workaround for MySQL Limitation (https://stackoverflow.com/a/19344141/5691525)
# Limitation: This version of MySQL does not yet support
# LIMIT & IN/ALL/ANY/SOME subquery
subq2 = session.query(subq1.c.dag_id, subq1.c.task_id, subq1.c.execution_date).subquery('subq2')
session.query(cls).filter(
cls.dag_id == dag_id,
cls.task_id == task_id,
tuple_(cls.dag_id, cls.task_id, cls.execution_date).notin_(subq2),
).delete(synchronize_session=False)
else:
# Fetch Top X records given dag_id & task_id ordered by Execution Date
tis_to_keep = tis_to_keep_query.all()
filter_tis = [
not_(
and_(
cls.dag_id == ti.dag_id,
cls.task_id == ti.task_id,
cls.execution_date == ti.execution_date,
)
)
for ti in tis_to_keep
]
session.query(cls).filter(and_(*filter_tis)).delete(synchronize_session=False)
|
nathanielvarona/airflow
|
airflow/models/renderedtifields.py
|
Python
|
apache-2.0
| 7,099
|
#!/usr/bin/python2.4
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This code is not supported by Google
#
"""urlstats.py analyzes a data file and generates a set of reports.
The file should be exported from the "Status and Reports > Export All URLs"
page on a Google Search Appliance (version 6.0 & 6.2). The exported URL file
contains the following fields separated by tab:
url
crawl_frequency
state
currently_inflight
error_count
contentfeed_datasource_fingerprint
in_index
locked
crawled_securely
last_crawled
content_type
contentsize_in_bytes
content_checksum
fetchtime_in_ms
"""
import getopt
import sys
# command line options
OPTIONS = 'h:'
LONG_OPTIONS = ['help',
'listurlslargerthan=',
'all',
'state',
'server',
'size',
'debug']
# add a key value pair for each type of report
REPORT_CFG = {'reportAll': True,
'reportState': False,
'reportServer': False,
'reportSize': False,
'listurlslargerthan': -1
}
DEBUG_MODE = False
def main():
global DEBUG_MODE
try:
opts, args = getopt.getopt(sys.argv[1:], OPTIONS, LONG_OPTIONS)
except getopt.GetoptError, err:
# print help information and exit:
print str(err)
Usage()
sys.exit(2)
for o, a in opts:
if o == '--listurlslargerthan':
REPORT_CFG['reportAll'] = False
REPORT_CFG['listurlslargerthan'] = int(a) # output the large files
elif o == '--state':
REPORT_CFG['reportAll'] = False
REPORT_CFG['reportState'] = True # generate a report based on URL state
elif o == '--server':
REPORT_CFG['reportAll'] = False
REPORT_CFG['reportServer'] = True # list of URL grouped by server
elif o == '--size':
REPORT_CFG['reportAll'] = False
REPORT_CFG['reportSize'] = True # generate a report based on URL size
elif o == '--debug':
DEBUG_MODE = True
elif o in ('-h', '--help'):
Usage()
sys.exit()
else:
assert False, 'unhandled option'
sys.exit()
try:
log_file = args[0] # The log file to be analyzed
except IndexError:
log_file = 'all_urls'
GenReport(log_file)
def GenReport(log_file):
"""Read each line of the log file and generate reports."""
total_url = 0
states = dict()
servers = dict()
# The content sizes in KB that we want to report on
# The for loop below assumes that this list is in ascending order
sizes_kb = [4, 8, 16, 32, 64, 128, 256, 512, 1024, 2*1024, 4*1024, 32*1024]
# initialize the content size map. it is a dictionary with
# key: content size threshold (up to) # value: number of documents
content_size_map = dict([(x*1024, 0) for x in sizes_kb])
# files that are larger than all the value in sizes_kb are considered
# very large
very_large_files = 0
try:
f = open(log_file, 'r')
except IOError:
print 'unable to open file %s' % file
Usage()
sys.exit()
# The url file can be large (>1GB), so we don't want to read
# the entire file into memory. We do more I/O and keep memory
# footprint small.
for line in f:
fields = line.split('\t')
# collect url state information
try:
state = fields[2].strip()
except IndexError, e:
print 'IndexError:', e
else:
total_url += 1
if state in states:
states[state] += 1
else:
states.update({state: 1})
# collect information to group urls by server, prot://host:[port]/
try:
# assume url format protocol://host/path
url_elems = fields[0].split('/', 3)
server = (url_elems[0], url_elems[2])
except IndexError, e:
print 'IndexError:', e
else:
if server in servers:
servers[server] += 1
else:
servers.update({server: 1})
# collect content size in byte
try:
size = int(fields[11])
except ValueError, e:
# We encountered some value that can not be converted to a number.
# Most likely it is the header line, but could be something else.
# We will just skip it unless we are in debug mode.
MyDebug('Unable to convert the string to a number. ValueError:')
MyDebug(e)
except IndexError, e:
# We encountered a line that contains less than 12 fields
# We will just skip it unless we are in debug mode.
MyDebug('Encountered a bad line:')
MyDebug(line)
MyDebug('IndexError:')
MyDebug(e)
else:
for size_kb in sizes_kb:
if size < size_kb*1024:
content_size_map[size_kb*1024] += 1
break
else:
# The for loop fell through, the content size is
# greater than all the size thresholds in the list
# The file is considered very large
very_large_files += 1
if (REPORT_CFG['listurlslargerthan'] != -1 and
size > REPORT_CFG['listurlslargerthan']):
print '%16s\t%s' % (line.split('\t')[11], line.split('\t')[0])
# TODO(Jason): collect extension info
# TODO(Jason): collect content_type info
# TODO(Jason): collect URL length info, such as longer than a threshold
# or way longer than average, and print the extra long urls
f.close()
# remove the header
try:
del states['state']
except KeyError, e:
MyDebug('Did not find a header line.')
else:
total_url -= 1 # removed the header, so decrement by one
# build a list, reversely sorted by number of URLs
states_sorted = states.items()
states_sorted.sort(key=lambda x: (x[1], x[0]), reverse=True)
# build a list, reversely sorted by number of URLs
servers_sorted = servers.items()
servers_sorted.sort(key=lambda x: (x[1], x[0]), reverse=True)
# build a list, reversely sorted by content size threshold
content_size_map_sorted = content_size_map.items()
content_size_map_sorted.sort(key=lambda x: (x[0], x[1]), reverse=True)
# print report
if (REPORT_CFG['reportState'] or
REPORT_CFG['reportServer'] or
REPORT_CFG['reportSize'] or
REPORT_CFG['reportAll']):
PrintSeparatorLine()
PrintTwoCol('Total URLs the GSA discovered', total_url)
PrintSeparatorLine()
# generate a summary of URL state
if REPORT_CFG['reportState'] or REPORT_CFG['reportAll']:
PrintTwoCol('NUMBER OF URLS', 'URL STATE')
PrintTwoCol ('--------------------', '---------------------')
for (state, count) in states_sorted:
PrintTwoCol(str(count).rjust(16), state)
PrintSeparatorLine()
# generate a summary of number of URLs per server
if REPORT_CFG['reportServer'] or REPORT_CFG['reportAll']:
PrintTwoCol('NUMBER OF URLS', 'SERVERS (total: %i)' % len(servers_sorted))
PrintTwoCol ('--------------------', '---------------------')
for (server, count) in servers_sorted:
PrintTwoCol(str(count).rjust(16), '%s//%s' % server)
PrintSeparatorLine()
# generate a summary of URL size
if REPORT_CFG['reportSize'] or REPORT_CFG['reportAll']:
PrintTwoCol('CONTENT SIZE (UP TO)', 'NUMBER OF URLS')
PrintTwoCol ('--------------------', '---------------------')
PrintTwoCol('32MB+', str(very_large_files).rjust(8))
for (size, count) in content_size_map_sorted:
PrintTwoCol(str(size).rjust(16), str(count).rjust(8))
# TODO(Jason): generate a summary of various extensions
# TODO(Jaosn): generate a summary of content_type
# END of GenReport
def MyDebug(s):
if DEBUG_MODE:
print s
def PrintSeparatorLine():
print '*********************************************'
def PrintTwoCol(col1, col2):
#print '%-30s\t%10s' % (col1, col2)
print '%16s\t%s' % (col1, col2)
def Usage():
"""Print the help message."""
print """
Usage: urlstats.py [--state|size|listurlslargerthan|debug][FILE]
Generate a report from FILE, which is the file exported from the
"Status and Reports > Export All URLs" page in the Admin Console
on a Google Search Appliance.
Examples:
1. To read the input file named all_urls from working directory and generate
all the reports, but not list the large URLs.
urlstats.py
2. To do the same things as example one but read from a different file
urlstats.py my_url_file.txt
3. To list URLs larger than 1000 bytes
urlstats.py --listurlslargerthan=1000
4. Only print a report about URL state
urlstats.py --state
"""
if __name__ == '__main__': # If we're being launched as a standalone app...
main()
|
groschovskiy/gsa-admin-toolkit
|
urlstats.py
|
Python
|
apache-2.0
| 8,994
|
import datetime
import sys
import threading
import rollbar
from django.conf import settings
from django.core.signals import request_finished
threadlocal = threading.local()
def process(queue):
if settings.DEBUG or settings.STAGING:
now = datetime.datetime.now()
print('Async tasks: {} to process'.format(len(queue)))
while queue:
func, args, kwargs = queue.pop()
try:
func(*args, **kwargs)
except Exception as e:
if settings.DEBUG or settings.STAGING:
raise e
rollbar.report_exc_info(sys.exc_info())
if settings.DEBUG or settings.STAGING:
delta = datetime.datetime.now() - now
print('Completed in {}'.format(delta))
def get_queue():
queue = getattr(threadlocal, 'ppqueue', None)
if queue is None:
queue = []
setattr(threadlocal, 'ppqueue', queue)
return queue
def push_task(func, args, kwargs):
get_queue().append((func, args, kwargs))
def after_request(sender, **kwargs):
queue = get_queue()
if not queue:
return
threading.Thread(target=process, args=(queue, )).start()
setattr(threadlocal, 'ppqueue', [])
request_finished.connect(after_request)
def run_after_request(func):
def wrap(*args, **kwargs):
push_task(func, args, kwargs)
return wrap
|
AlmostBetterNetwork/podmaster-host
|
pinecast/post_processing.py
|
Python
|
apache-2.0
| 1,349
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.test.client import Client
from django.http import HttpRequest
from django.contrib.auth import SESSION_KEY
from tardis.tardis_portal.models import User
from tardis.tardis_portal.models import UserProfile
from tardis.tardis_portal.auth.interfaces import GroupProvider
class MockSettings(object):
def __init__(self):
self.AUTH_PROVIDERS = ()
self.USER_PROVIDERS = ()
self.GROUP_PROVIDERS = ()
class MockGroupProvider(GroupProvider):
def __init__(self):
self.name = u'mockdb'
self.groups = [{"name": "Group 456", 'id': '2',
"members": [u'user1', u'user3']},
{"name": 'Group 123', 'id': '1',
'members': [u'user1', u'user2']},
{"name": 'Super Group', 'id': '3',
'members': [u'Group 123', u'user2']},
]
def getGroups(self, user):
for i in self.groups:
if str(user).split("_")[1] in i['members']:
yield i['id']
def getGroupById(self, id):
pass
def searchGroups(self, **filter):
for g in self.groups:
for s, t in filter.items():
if s not in g:
continue
if t in g[s]:
group = g.copy()
del group['members']
yield group
def getGroupsForEntity(self, id):
for g in self.groups:
if id not in g['members']:
continue
group = g.copy()
del group['members']
yield group
class MockRequest(HttpRequest):
def __init__(self):
super(MockRequest, self).__init__()
def setPost(self, field, value):
self.POST[field] = value
class MockAuthProvider():
def authenticate(self, request):
username = request.POST['username']
username = '%s_%s' % ('mockdb', username)
return User.objects.get(username=username)
class AuthServiceTestCase(TestCase):
urls = 'tardis.tardis_portal.tests.urls'
def setUp(self):
from django.contrib.auth.models import User
self.user1 = User.objects.create_user('mockdb_user1', '', 'secret')
self.user2 = User.objects.create_user('mockdb_user2', '', 'secret')
self.user3 = User.objects.create_user('mockdb_user3', '', 'secret')
self.userProfile1 = self.user1.userprofile
self.userProfile2 = self.user2.userprofile
self.userProfile3 = self.user3.userprofile
from tardis.tardis_portal.auth import AuthService, auth_service
s = MockSettings()
s.GROUP_PROVIDERS = \
('tardis.tardis_portal.tests.test_authservice.MockGroupProvider',)
a = AuthService(settings=s)
a._manual_init()
self._auth_service_group_providers = auth_service._group_providers
# add the local group provider to the singleton auth_service
auth_service._group_providers = a._group_providers
def tearDown(self):
self.user1.delete()
self.user2.delete()
self.user3.delete()
from tardis.tardis_portal.auth import auth_service
auth_service._group_providers = self._auth_service_group_providers
auth_service._manual_init()
def testInitialisation(self):
from tardis.tardis_portal.auth import AuthService
s = MockSettings()
s.USER_PROVIDERS = \
('tardis.tardis_portal.auth.localdb_auth.DjangoUserProvider',)
s.GROUP_PROVIDERS = \
('tardis.tardis_portal.auth.localdb_auth.DjangoGroupProvider',)
a = AuthService(settings=s)
a._manual_init()
self.assertEqual(len(a._user_providers), 1)
self.assertEqual(len(a._group_providers), 1)
def testGroupProvider(self):
c = Client()
login = c.login(username='mockdb_user1', password='secret')
self.assertTrue(login)
self.assert_(SESSION_KEY in c.session)
r = str(c.get('/test/groups/'))
self.assertEqual(r.count('mockdb'), 2)
self.assertTrue(',1)' in r)
self.assertTrue(',2)' in r)
login = c.login(username='mockdb_user2', password='secret')
self.assertTrue(login)
self.assert_(SESSION_KEY in c.session)
r = str(c.get('/test/groups/'))
self.assertEqual(r.count('mockdb'), 2, r)
self.assertTrue(',1)' in r)
self.assertTrue(',3)' in r)
def testGroupSearch(self):
from tardis.tardis_portal.auth import AuthService
s = MockSettings()
s.GROUP_PROVIDERS = \
('tardis.tardis_portal.tests.test_authservice.MockGroupProvider',)
a = AuthService(settings=s)
a._manual_init()
# check the correct group provider is registered
self.assertEqual(len(a._group_providers), 1)
# test searching for groups by substring
self.assertEqual(len(a.searchGroups(name='Group')), 3)
self.assertEqual(len(a.searchGroups(name='123')), 1)
self.assertEqual(a.searchGroups(name='123')[0]['id'], '1')
self.assertEqual(a.searchGroups(name='123')[0]['pluginname'], 'mockdb')
# test limiting the number of results
self.assertEqual(len(a.searchGroups(name='Group', max_results=1)), 1)
# test sorting the result
self.assertEqual(a.searchGroups(name='Group', sort_by='name')[0]['id'],
'1')
def testGetGroupsForEntity(self):
from tardis.tardis_portal.auth import AuthService
s = MockSettings()
s.GROUP_PROVIDERS = \
('tardis.tardis_portal.tests.test_authservice.MockGroupProvider',)
a = AuthService(settings=s)
a._manual_init()
# check the correct group provider is registered
self.assertEqual(len(a._group_providers), 1)
self.assertEqual(len([g for g in a.getGroupsForEntity('user1')]),
2)
self.assertEqual(len([g for g in a.getGroupsForEntity('Group 123')]),
1)
def testAuthenticate(self):
from tardis.tardis_portal.auth import AuthService
s = MockSettings()
s.USER_PROVIDERS = ()
s.GROUP_PROVIDERS = ()
s.AUTH_PROVIDERS = (('mockauth', 'Mock Auth',
'tardis.tardis_portal.tests.test_authservice.MockAuthProvider'),)
a = AuthService(settings=s)
request = MockRequest()
request.setPost('username', 'user1')
request.setPost('authMethod', 'mockdb')
user = a.authenticate(authMethod='mockauth', request=request)
realUser = User.objects.get(username='mockdb_user1')
self.assertEqual(user, realUser)
|
pansapiens/mytardis
|
tardis/tardis_portal/tests/test_authservice.py
|
Python
|
bsd-3-clause
| 6,778
|
# Copyright 2010 Google Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Tests of resumable downloads.
"""
import errno
import os
import re
import boto
from boto.s3.resumable_download_handler import get_cur_file_size
from boto.s3.resumable_download_handler import ResumableDownloadHandler
from boto.exception import ResumableTransferDisposition
from boto.exception import ResumableDownloadException
from .cb_test_harness import CallbackTestHarness
from tests.integration.gs.testcase import GSTestCase
SMALL_KEY_SIZE = 2 * 1024 # 2 KB.
LARGE_KEY_SIZE = 500 * 1024 # 500 KB.
class ResumableDownloadTests(GSTestCase):
"""Resumable download test suite."""
def make_small_key(self):
small_src_key_as_string = os.urandom(SMALL_KEY_SIZE)
small_src_key = self._MakeKey(data=small_src_key_as_string)
return small_src_key_as_string, small_src_key
def make_tracker_file(self, tmpdir=None):
if not tmpdir:
tmpdir = self._MakeTempDir()
tracker_file = os.path.join(tmpdir, 'tracker')
return tracker_file
def make_dst_fp(self, tmpdir=None):
if not tmpdir:
tmpdir = self._MakeTempDir()
dst_file = os.path.join(tmpdir, 'dstfile')
return open(dst_file, 'w')
def test_non_resumable_download(self):
"""
Tests that non-resumable downloads work
"""
dst_fp = self.make_dst_fp()
small_src_key_as_string, small_src_key = self.make_small_key()
small_src_key.get_contents_to_file(dst_fp)
self.assertEqual(SMALL_KEY_SIZE,
get_cur_file_size(dst_fp))
self.assertEqual(small_src_key_as_string,
small_src_key.get_contents_as_string())
def test_download_without_persistent_tracker(self):
"""
Tests a single resumable download, with no tracker persistence
"""
res_download_handler = ResumableDownloadHandler()
dst_fp = self.make_dst_fp()
small_src_key_as_string, small_src_key = self.make_small_key()
small_src_key.get_contents_to_file(
dst_fp, res_download_handler=res_download_handler)
self.assertEqual(SMALL_KEY_SIZE,
get_cur_file_size(dst_fp))
self.assertEqual(small_src_key_as_string,
small_src_key.get_contents_as_string())
def test_failed_download_with_persistent_tracker(self):
"""
Tests that failed resumable download leaves a correct tracker file
"""
harness = CallbackTestHarness()
tmpdir = self._MakeTempDir()
tracker_file_name = self.make_tracker_file(tmpdir)
dst_fp = self.make_dst_fp(tmpdir)
res_download_handler = ResumableDownloadHandler(
tracker_file_name=tracker_file_name, num_retries=0)
small_src_key_as_string, small_src_key = self.make_small_key()
try:
small_src_key.get_contents_to_file(
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
self.fail('Did not get expected ResumableDownloadException')
except ResumableDownloadException as e:
# We'll get a ResumableDownloadException at this point because
# of CallbackTestHarness (above). Check that the tracker file was
# created correctly.
self.assertEqual(e.disposition,
ResumableTransferDisposition.ABORT_CUR_PROCESS)
self.assertTrue(os.path.exists(tracker_file_name))
f = open(tracker_file_name)
etag_line = f.readline()
self.assertEquals(etag_line.rstrip('\n'), small_src_key.etag.strip('"\''))
def test_retryable_exception_recovery(self):
"""
Tests handling of a retryable exception
"""
# Test one of the RETRYABLE_EXCEPTIONS.
exception = ResumableDownloadHandler.RETRYABLE_EXCEPTIONS[0]
harness = CallbackTestHarness(exception=exception)
res_download_handler = ResumableDownloadHandler(num_retries=1)
dst_fp = self.make_dst_fp()
small_src_key_as_string, small_src_key = self.make_small_key()
small_src_key.get_contents_to_file(
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
# Ensure downloaded object has correct content.
self.assertEqual(SMALL_KEY_SIZE,
get_cur_file_size(dst_fp))
self.assertEqual(small_src_key_as_string,
small_src_key.get_contents_as_string())
def test_broken_pipe_recovery(self):
"""
Tests handling of a Broken Pipe (which interacts with an httplib bug)
"""
exception = IOError(errno.EPIPE, "Broken pipe")
harness = CallbackTestHarness(exception=exception)
res_download_handler = ResumableDownloadHandler(num_retries=1)
dst_fp = self.make_dst_fp()
small_src_key_as_string, small_src_key = self.make_small_key()
small_src_key.get_contents_to_file(
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
# Ensure downloaded object has correct content.
self.assertEqual(SMALL_KEY_SIZE,
get_cur_file_size(dst_fp))
self.assertEqual(small_src_key_as_string,
small_src_key.get_contents_as_string())
def test_non_retryable_exception_handling(self):
"""
Tests resumable download that fails with a non-retryable exception
"""
harness = CallbackTestHarness(
exception=OSError(errno.EACCES, 'Permission denied'))
res_download_handler = ResumableDownloadHandler(num_retries=1)
dst_fp = self.make_dst_fp()
small_src_key_as_string, small_src_key = self.make_small_key()
try:
small_src_key.get_contents_to_file(
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
self.fail('Did not get expected OSError')
except OSError as e:
# Ensure the error was re-raised.
self.assertEqual(e.errno, 13)
def test_failed_and_restarted_download_with_persistent_tracker(self):
"""
Tests resumable download that fails once and then completes,
with tracker file
"""
harness = CallbackTestHarness()
tmpdir = self._MakeTempDir()
tracker_file_name = self.make_tracker_file(tmpdir)
dst_fp = self.make_dst_fp(tmpdir)
small_src_key_as_string, small_src_key = self.make_small_key()
res_download_handler = ResumableDownloadHandler(
tracker_file_name=tracker_file_name, num_retries=1)
small_src_key.get_contents_to_file(
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
# Ensure downloaded object has correct content.
self.assertEqual(SMALL_KEY_SIZE,
get_cur_file_size(dst_fp))
self.assertEqual(small_src_key_as_string,
small_src_key.get_contents_as_string())
# Ensure tracker file deleted.
self.assertFalse(os.path.exists(tracker_file_name))
def test_multiple_in_process_failures_then_succeed(self):
"""
Tests resumable download that fails twice in one process, then completes
"""
res_download_handler = ResumableDownloadHandler(num_retries=3)
dst_fp = self.make_dst_fp()
small_src_key_as_string, small_src_key = self.make_small_key()
small_src_key.get_contents_to_file(
dst_fp, res_download_handler=res_download_handler)
# Ensure downloaded object has correct content.
self.assertEqual(SMALL_KEY_SIZE,
get_cur_file_size(dst_fp))
self.assertEqual(small_src_key_as_string,
small_src_key.get_contents_as_string())
def test_multiple_in_process_failures_then_succeed_with_tracker_file(self):
"""
Tests resumable download that fails completely in one process,
then when restarted completes, using a tracker file
"""
# Set up test harness that causes more failures than a single
# ResumableDownloadHandler instance will handle, writing enough data
# before the first failure that some of it survives that process run.
harness = CallbackTestHarness(
fail_after_n_bytes=LARGE_KEY_SIZE/2, num_times_to_fail=2)
larger_src_key_as_string = os.urandom(LARGE_KEY_SIZE)
larger_src_key = self._MakeKey(data=larger_src_key_as_string)
tmpdir = self._MakeTempDir()
tracker_file_name = self.make_tracker_file(tmpdir)
dst_fp = self.make_dst_fp(tmpdir)
res_download_handler = ResumableDownloadHandler(
tracker_file_name=tracker_file_name, num_retries=0)
try:
larger_src_key.get_contents_to_file(
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
self.fail('Did not get expected ResumableDownloadException')
except ResumableDownloadException as e:
self.assertEqual(e.disposition,
ResumableTransferDisposition.ABORT_CUR_PROCESS)
# Ensure a tracker file survived.
self.assertTrue(os.path.exists(tracker_file_name))
# Try it one more time; this time should succeed.
larger_src_key.get_contents_to_file(
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
self.assertEqual(LARGE_KEY_SIZE,
get_cur_file_size(dst_fp))
self.assertEqual(larger_src_key_as_string,
larger_src_key.get_contents_as_string())
self.assertFalse(os.path.exists(tracker_file_name))
# Ensure some of the file was downloaded both before and after failure.
self.assertTrue(
len(harness.transferred_seq_before_first_failure) > 1 and
len(harness.transferred_seq_after_first_failure) > 1)
def test_download_with_inital_partial_download_before_failure(self):
"""
Tests resumable download that successfully downloads some content
before it fails, then restarts and completes
"""
# Set up harness to fail download after several hundred KB so download
# server will have saved something before we retry.
harness = CallbackTestHarness(
fail_after_n_bytes=LARGE_KEY_SIZE/2)
larger_src_key_as_string = os.urandom(LARGE_KEY_SIZE)
larger_src_key = self._MakeKey(data=larger_src_key_as_string)
res_download_handler = ResumableDownloadHandler(num_retries=1)
dst_fp = self.make_dst_fp()
larger_src_key.get_contents_to_file(
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
# Ensure downloaded object has correct content.
self.assertEqual(LARGE_KEY_SIZE,
get_cur_file_size(dst_fp))
self.assertEqual(larger_src_key_as_string,
larger_src_key.get_contents_as_string())
# Ensure some of the file was downloaded both before and after failure.
self.assertTrue(
len(harness.transferred_seq_before_first_failure) > 1 and
len(harness.transferred_seq_after_first_failure) > 1)
def test_zero_length_object_download(self):
"""
Tests downloading a zero-length object (exercises boundary conditions).
"""
res_download_handler = ResumableDownloadHandler()
dst_fp = self.make_dst_fp()
k = self._MakeKey()
k.get_contents_to_file(dst_fp,
res_download_handler=res_download_handler)
self.assertEqual(0, get_cur_file_size(dst_fp))
def test_download_with_invalid_tracker_etag(self):
"""
Tests resumable download with a tracker file containing an invalid etag
"""
tmp_dir = self._MakeTempDir()
dst_fp = self.make_dst_fp(tmp_dir)
small_src_key_as_string, small_src_key = self.make_small_key()
invalid_etag_tracker_file_name = os.path.join(tmp_dir,
'invalid_etag_tracker')
f = open(invalid_etag_tracker_file_name, 'w')
f.write('3.14159\n')
f.close()
res_download_handler = ResumableDownloadHandler(
tracker_file_name=invalid_etag_tracker_file_name)
# An error should be printed about the invalid tracker, but then it
# should run the update successfully.
small_src_key.get_contents_to_file(
dst_fp, res_download_handler=res_download_handler)
self.assertEqual(SMALL_KEY_SIZE, get_cur_file_size(dst_fp))
self.assertEqual(small_src_key_as_string,
small_src_key.get_contents_as_string())
def test_download_with_inconsistent_etag_in_tracker(self):
"""
Tests resumable download with an inconsistent etag in tracker file
"""
tmp_dir = self._MakeTempDir()
dst_fp = self.make_dst_fp(tmp_dir)
small_src_key_as_string, small_src_key = self.make_small_key()
inconsistent_etag_tracker_file_name = os.path.join(tmp_dir,
'inconsistent_etag_tracker')
f = open(inconsistent_etag_tracker_file_name, 'w')
good_etag = small_src_key.etag.strip('"\'')
new_val_as_list = []
for c in reversed(good_etag):
new_val_as_list.append(c)
f.write('%s\n' % ''.join(new_val_as_list))
f.close()
res_download_handler = ResumableDownloadHandler(
tracker_file_name=inconsistent_etag_tracker_file_name)
# An error should be printed about the expired tracker, but then it
# should run the update successfully.
small_src_key.get_contents_to_file(
dst_fp, res_download_handler=res_download_handler)
self.assertEqual(SMALL_KEY_SIZE,
get_cur_file_size(dst_fp))
self.assertEqual(small_src_key_as_string,
small_src_key.get_contents_as_string())
def test_download_with_unwritable_tracker_file(self):
"""
Tests resumable download with an unwritable tracker file
"""
# Make dir where tracker_file lives temporarily unwritable.
tmp_dir = self._MakeTempDir()
tracker_file_name = os.path.join(tmp_dir, 'tracker')
save_mod = os.stat(tmp_dir).st_mode
try:
os.chmod(tmp_dir, 0)
res_download_handler = ResumableDownloadHandler(
tracker_file_name=tracker_file_name)
except ResumableDownloadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('Couldn\'t write URI tracker file'), -1)
finally:
# Restore original protection of dir where tracker_file lives.
os.chmod(tmp_dir, save_mod)
|
catapult-project/catapult
|
third_party/gsutil/gslib/vendored/boto/tests/integration/gs/test_resumable_downloads.py
|
Python
|
bsd-3-clause
| 16,183
|
class DimensionalityError(ValueError):
"""
Raised when the number of dimensions do not match what was expected.
"""
pass
|
karla3jo/menpo-old
|
menpo/exception.py
|
Python
|
bsd-3-clause
| 137
|
from __future__ import print_function
__author__ = "John Kirkham <kirkhamj@janelia.hhmi.org>"
__date__ = "$Jul 30, 2014 19:35:11 EDT$"
import imp
import nose
import nose.plugins
import nose.plugins.attrib
import numpy
import scipy
import scipy.spatial
import scipy.spatial.distance
import scipy.stats
import nanshe.util.iters
import nanshe.util.xnumpy
import nanshe.imp.segment
import nanshe.syn.data
has_spams = False
try:
imp.find_module("spams")
has_spams = True
except ImportError:
pass
class TestSegment(object):
def test_remove_zeroed_lines_1(self):
a = numpy.ones((1, 100, 101))
erosion_shape = [21, 1]
dilation_shape = [1, 3]
r = numpy.array([[0, 0, 0], [a.shape[1]-2, 3, 4]]).T.copy()
print(r)
ar = a.copy()
for each_r in r:
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(ar, 0, each_r[0]), -1, each_r[-1])[:] = 0
b = nanshe.imp.segment.remove_zeroed_lines(ar, erosion_shape=erosion_shape, dilation_shape=dilation_shape)
assert (a == b).all()
def test_remove_zeroed_lines_2(self):
a = numpy.ones((1, 100, 101))
erosion_shape = [21, 1]
dilation_shape = [1, 3]
r = numpy.array([[0, 0, 0], [1, 3, 4]]).T.copy()
print(r)
ar = a.copy()
for each_r in r:
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(ar, 0, each_r[0]), -1, each_r[-1])[:] = 0
b = nanshe.imp.segment.remove_zeroed_lines(ar, erosion_shape=erosion_shape, dilation_shape=dilation_shape)
assert (a == b).all()
def test_remove_zeroed_lines_3(self):
a = numpy.ones((1, 100, 101))
p = 0.2
erosion_shape = [21, 1]
dilation_shape = [1, 3]
nr = numpy.random.geometric(p)
r = numpy.array([numpy.repeat(0, nr), numpy.random.random_integers(1, a.shape[1] - 2, nr)]).T.copy()
print(r)
ar = a.copy()
for each_r in r:
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(ar, 0, each_r[0]), -1, each_r[-1])[:] = 0
b = nanshe.imp.segment.remove_zeroed_lines(ar, erosion_shape=erosion_shape, dilation_shape=dilation_shape)
assert (a == b).all()
def test_remove_zeroed_lines_4(self):
a = numpy.ones((1, 100, 101))
erosion_shape = [21, 1]
dilation_shape = [1, 3]
r = numpy.array([[0, 0, 0], [a.shape[1], 0, 4]]).T.copy()
print(r)
ar = a.copy()
for each_r in r:
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(ar, 0, each_r[0]), -1, each_r[-1])[:] = 0
b = nanshe.imp.segment.remove_zeroed_lines(ar, dilation_shape=dilation_shape, erosion_shape=erosion_shape)
assert (a == b).all()
def test_remove_zeroed_lines_5(self):
a = numpy.ones((1, 100, 101))
erosion_shape = [21, 1]
dilation_shape = [1, 3]
r = numpy.array([[0, 0, 0, 0], [a.shape[1], a.shape[1]-1, 0, 1]]).T.copy()
print(r)
ar = a.copy()
for each_r in r:
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(ar, 0, each_r[0]), -1, each_r[-1])[:] = 0
b = nanshe.imp.segment.remove_zeroed_lines(ar, dilation_shape=dilation_shape, erosion_shape=erosion_shape)
assert (a == b).all()
def test_remove_zeroed_lines_6(self):
a = numpy.repeat(numpy.arange(100)[None].T, 101, axis=1)[None].astype(float)
erosion_shape = [21, 1]
dilation_shape = [1, 3]
r = numpy.array([[0, 0, 0], [1, 3, 4]]).T.copy()
print(r)
ar = a.copy()
for each_r in r:
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(ar, 0, each_r[0]), -1, each_r[-1])[:] = 0
b = nanshe.imp.segment.remove_zeroed_lines(ar, erosion_shape=erosion_shape, dilation_shape=dilation_shape)
assert numpy.allclose(a, b, rtol=0, atol=1e-13)
def test_remove_zeroed_lines_7(self):
a = numpy.repeat(numpy.arange(100)[None], 101, axis=0)[None].astype(float)
a[0, :, 0] = 1
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(a, 0, 0), -1, 0)[:] = 1
erosion_shape = [21, 1]
dilation_shape = [1, 3]
r = numpy.array([[0, 0, 0, 0], [0, 2, 3, 4]]).T.copy()
print(r)
ar = a.copy()
for each_r in r:
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(ar, 0, each_r[0]), -1, each_r[-1])[:] = 0
b = nanshe.imp.segment.remove_zeroed_lines(ar, erosion_shape=erosion_shape, dilation_shape=dilation_shape)
assert numpy.allclose(a, b, rtol=0, atol=1e-13)
def test_remove_zeroed_lines_8(self):
a = numpy.ones((1, 100, 101))
erosion_shape = [21, 1]
dilation_shape = [1, 3]
r = numpy.array([[0, 0, 0], [a.shape[1]-2, 3, 4]]).T.copy()
print(r)
ar = a.copy()
for each_r in r:
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(ar, 0, each_r[0]), -1, each_r[-1])[:] = 0
b = numpy.zeros_like(a)
nanshe.imp.segment.remove_zeroed_lines(ar, erosion_shape=erosion_shape, dilation_shape=dilation_shape, out=b)
assert (a == b).all()
def test_remove_zeroed_lines_9(self):
a = numpy.ones((1, 100, 101))
erosion_shape = [21, 1]
dilation_shape = [1, 3]
r = numpy.array([[0, 0, 0], [a.shape[1]-2, 3, 4]]).T.copy()
print(r)
ar = a.copy()
for each_r in r:
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(ar, 0, each_r[0]), -1, each_r[-1])[:] = 0
b = ar
nanshe.imp.segment.remove_zeroed_lines(b, erosion_shape=erosion_shape, dilation_shape=dilation_shape, out=b)
assert (a == b).all()
@nose.plugins.attrib.attr("3D")
def test_remove_zeroed_lines_10(self):
a = numpy.ones((1, 100, 101, 102))
erosion_shape = [21, 1, 1]
dilation_shape = [1, 3, 1]
r = numpy.array([[0, 0, 0], [a.shape[1]-2, 3, 4], [0, 0, 0]]).T.copy()
print(r)
ar = a.copy()
for each_r in r:
nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(nanshe.util.xnumpy.index_axis_at_pos(ar, 0, each_r[0]), -1, each_r[-1]), -1, each_r[-2])[:] = 0
b = ar
nanshe.imp.segment.remove_zeroed_lines(b, erosion_shape=erosion_shape, dilation_shape=dilation_shape, out=b)
assert (a == b).all()
def test_estimate_f0_1(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 20
a = numpy.ones((100, 101, 102))
b = nanshe.imp.segment.estimate_f0(
a,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size
)
assert (b == a).all()
def test_estimate_f0_1b(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 20
a = numpy.ones((100, 101, 102))
b = a.copy()
nanshe.imp.segment.estimate_f0(
a,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size,
out=b
)
assert (b == a).all()
def test_estimate_f0_1c(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 20
a = numpy.ones((100, 101, 102))
b = a.copy()
nanshe.imp.segment.estimate_f0(
b,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size,
out=b
)
assert (b == a).all()
def test_estimate_f0_2(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 49
mean = 0.0
stdev = 1.0
a = numpy.random.normal(mean, stdev, (100, 101, 102))
b = nanshe.imp.segment.estimate_f0(
a,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size
)
# Seems to be basically 2 orders of magnitude in reduction. However, it may be a little above exactly two.
# Hence, multiplication by 99 instead of 100.
assert ((99.0*b.std()) < a.std())
@nose.plugins.attrib.attr("3D")
def test_estimate_f0_3(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 20
a = numpy.ones((100, 101, 102, 103))
b = nanshe.imp.segment.estimate_f0(
a,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size
)
assert (b == a).all()
@nose.plugins.attrib.attr("3D")
def test_estimate_f0_4(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 49
mean = 0.0
stdev = 1.0
a = numpy.random.normal(mean, stdev, (100, 101, 102, 103))
b = nanshe.imp.segment.estimate_f0(
a,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size
)
# Seems to be basically 2 orders of magnitude in reduction. However, it may be a little above exactly two.
# Hence, multiplication by 99 instead of 100.
assert ((99.0*b.std()) < a.std())
def test_extract_f0_1(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 20
bias = 100
a = numpy.ones((100, 101, 102))
b = nanshe.imp.segment.extract_f0(
a,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size,
bias=bias
)
assert (b == 0).all()
def test_extract_f0_1b(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 20
bias = 100
a = numpy.ones((100, 101, 102))
b = a.copy()
nanshe.imp.segment.extract_f0(
a,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size,
bias=bias,
out=b
)
assert (b == 0).all()
def test_extract_f0_1c(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 20
bias = 100
a = numpy.ones((100, 101, 102))
b = a.copy()
nanshe.imp.segment.extract_f0(
b,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size,
bias=bias,
out=b
)
assert (b == 0).all()
def test_extract_f0_2(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 49
bias = 100
mean = 0.0
stdev = 1.0
a = numpy.random.normal(mean, stdev, (100, 101, 102))
b = nanshe.imp.segment.extract_f0(
a,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size,
bias=bias
)
# Seems to be basically 2 orders of magnitude in reduction. However, it may be a little above exactly two.
# Hence, multiplication by 99 instead of 100.
assert ((99.0*b.std()) < a.std())
# Turns out that a difference greater than 0.1 will be over 10 standard deviations away.
assert (((a - 100.0*b) < 0.1).all())
@nose.plugins.attrib.attr("3D")
def test_extract_f0_3(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 20
bias = 100
a = numpy.ones((100, 101, 102, 103))
b = nanshe.imp.segment.extract_f0(
a,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size,
bias=bias
)
assert (b == 0).all()
@nose.plugins.attrib.attr("3D")
def test_extract_f0_4(self):
spatial_smoothing_gaussian_filter_stdev = 5.0
spatial_smoothing_gaussian_filter_window_size = 5.0
which_quantile = 0.5
temporal_smoothing_gaussian_filter_stdev = 5.0
temporal_smoothing_gaussian_filter_window_size = 5.0
half_window_size = 49
bias = 100
mean = 0.0
stdev = 1.0
a = numpy.random.normal(mean, stdev, (100, 101, 102, 103))
b = nanshe.imp.segment.extract_f0(
a,
spatial_smoothing_gaussian_filter_stdev=spatial_smoothing_gaussian_filter_stdev,
spatial_smoothing_gaussian_filter_window_size=spatial_smoothing_gaussian_filter_window_size,
which_quantile=which_quantile,
temporal_smoothing_gaussian_filter_stdev=temporal_smoothing_gaussian_filter_stdev,
temporal_smoothing_gaussian_filter_window_size=temporal_smoothing_gaussian_filter_window_size,
half_window_size=half_window_size,
bias=bias
)
# Seems to be basically 2 orders of magnitude in reduction. However, it may be a little above exactly two.
# Hence, multiplication by 99 instead of 100.
assert ((99.0*b.std()) < a.std())
# Turns out that a difference greater than 0.1 will be over 10 standard deviations away.
assert (((a - 100.0*b) < 0.1).all())
def test_preprocess_data_1(self):
## Does NOT test accuracy.
config = {
"normalize_data" : {
"renormalized_images" : {
"ord" : 2
}
},
"extract_f0" : {
"spatial_smoothing_gaussian_filter_stdev" : 5.0,
"spatial_smoothing_gaussian_filter_window_size" : 5.0,
"which_quantile" : 0.5,
"temporal_smoothing_gaussian_filter_stdev" : 5.0,
"temporal_smoothing_gaussian_filter_window_size" : 5.0,
"half_window_size" : 20,
"bias" : 100
},
"remove_zeroed_lines" : {
"erosion_shape" : [
21,
1
],
"dilation_shape" : [
1,
3
]
},
"wavelet.transform" : {
"scale" : [
3,
4,
4
]
}
}
space = numpy.array([100, 100, 100])
radii = numpy.array([5, 6])
magnitudes = numpy.array([15, 16])
points = numpy.array([[20, 30, 24],
[70, 59, 65]])
masks = nanshe.syn.data.generate_hypersphere_masks(space, points, radii)
images = nanshe.syn.data.generate_gaussian_images(space, points, radii/3.0, magnitudes) * masks
image_stack = images.max(axis=0)
nanshe.imp.segment.preprocess_data(image_stack, **config)
def test_preprocess_data_2(self):
## Does NOT test accuracy.
config = {
"normalize_data" : {
"renormalized_images" : {
"ord" : 2
}
},
"remove_zeroed_lines" : {
"erosion_shape" : [
21,
1
],
"dilation_shape" : [
1,
3
]
},
"wavelet.transform" : {
"scale" : [
3,
4,
4
]
}
}
space = numpy.array([100, 100, 100])
radii = numpy.array([5, 6])
magnitudes = numpy.array([15, 16])
points = numpy.array([[20, 30, 24],
[70, 59, 65]])
masks = nanshe.syn.data.generate_hypersphere_masks(space, points, radii)
images = nanshe.syn.data.generate_gaussian_images(space, points, radii/3.0, magnitudes) * masks
image_stack = images.max(axis=0)
nanshe.imp.segment.preprocess_data(image_stack, **config)
def test_preprocess_data_3(self):
## Does NOT test accuracy.
config = {
"normalize_data" : {
"renormalized_images" : {
"ord" : 2
}
},
"extract_f0" : {
"spatial_smoothing_gaussian_filter_stdev" : 5.0,
"spatial_smoothing_gaussian_filter_window_size" : 5.0,
"which_quantile" : 0.5,
"temporal_smoothing_gaussian_filter_stdev" : 5.0,
"temporal_smoothing_gaussian_filter_window_size" : 5.0,
"half_window_size" : 20,
"bias" : 100
},
"wavelet.transform" : {
"scale" : [
3,
4,
4
]
}
}
space = numpy.array([100, 100, 100])
radii = numpy.array([5, 6])
magnitudes = numpy.array([15, 16])
points = numpy.array([[20, 30, 24],
[70, 59, 65]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
image_stack = images.max(axis=0)
nanshe.imp.segment.preprocess_data(image_stack, **config)
def test_preprocess_data_4(self):
## Does NOT test accuracy.
config = {
"normalize_data" : {
"renormalized_images" : {
"ord" : 2
}
},
"extract_f0" : {
"spatial_smoothing_gaussian_filter_stdev" : 5.0,
"spatial_smoothing_gaussian_filter_window_size" : 5.0,
"which_quantile" : 0.5,
"temporal_smoothing_gaussian_filter_stdev" : 5.0,
"temporal_smoothing_gaussian_filter_window_size" : 5.0,
"half_window_size" : 20,
"bias" : 100
},
"remove_zeroed_lines" : {
"erosion_shape" : [
21,
1
],
"dilation_shape" : [
1,
3
]
}
}
space = numpy.array([100, 100, 100])
radii = numpy.array([5, 6])
magnitudes = numpy.array([15, 16])
points = numpy.array([[20, 30, 24],
[70, 59, 65]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
image_stack = images.max(axis=0)
nanshe.imp.segment.preprocess_data(image_stack, **config)
@nose.plugins.attrib.attr("3D")
def test_preprocess_data_5(self):
## Does NOT test accuracy.
config = {
"normalize_data" : {
"renormalized_images" : {
"ord" : 2
}
},
"extract_f0" : {
"spatial_smoothing_gaussian_filter_stdev" : 5.0,
"spatial_smoothing_gaussian_filter_window_size" : 5.0,
"which_quantile" : 0.5,
"temporal_smoothing_gaussian_filter_stdev" : 5.0,
"temporal_smoothing_gaussian_filter_window_size" : 5.0,
"half_window_size" : 20,
"bias" : 100
},
"wavelet.transform" : {
"scale" : [
3,
4,
4,
4
]
}
}
space = numpy.array([100, 100, 100, 100])
radii = numpy.array([5, 6])
magnitudes = numpy.array([15, 16])
points = numpy.array([[20, 30, 24, 85],
[70, 59, 65, 17]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
image_stack = images.max(axis=0)
nanshe.imp.segment.preprocess_data(image_stack, **config)
@nose.plugins.attrib.attr("3D")
def test_preprocess_data_6(self):
## Does NOT test accuracy.
config = {
"normalize_data" : {
"renormalized_images" : {
"ord" : 2
}
},
"wavelet.transform" : {
"scale" : [
3,
4,
4,
4
]
}
}
space = numpy.array([100, 100, 100, 100])
radii = numpy.array([5, 6])
magnitudes = numpy.array([15, 16])
points = numpy.array([[20, 30, 24, 85],
[70, 59, 65, 17]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
image_stack = images.max(axis=0)
nanshe.imp.segment.preprocess_data(image_stack, **config)
@nose.plugins.attrib.attr("3D")
def test_preprocess_data_7(self):
## Does NOT test accuracy.
config = {
"normalize_data" : {
"renormalized_images" : {
"ord" : 2
}
},
"extract_f0" : {
"spatial_smoothing_gaussian_filter_stdev" : 5.0,
"spatial_smoothing_gaussian_filter_window_size" : 5.0,
"which_quantile" : 0.5,
"temporal_smoothing_gaussian_filter_stdev" : 5.0,
"temporal_smoothing_gaussian_filter_window_size" : 5.0,
"half_window_size" : 20,
"bias" : 100
}
}
space = numpy.array([100, 100, 100, 100])
radii = numpy.array([5, 6])
magnitudes = numpy.array([15, 16])
points = numpy.array([[20, 30, 24, 85],
[70, 59, 65, 17]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
image_stack = images.max(axis=0)
nanshe.imp.segment.preprocess_data(image_stack, **config)
def test_generate_dictionary_00(self):
if not has_spams:
raise nose.SkipTest(
"Cannot run this test without SPAMS being installed."
)
p = numpy.array([[27, 51],
[66, 85],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(numpy.float32),
**{
"spams.trainDL" : {
"gamma2" : 0,
"gamma1" : 0,
"numThreads" : 1,
"K" : len(g),
"iter" : 10,
"modeD" : 0,
"posAlpha" : True,
"clean" : True,
"posD" : True,
"batchsize" : 256,
"lambda1" : 0.2,
"lambda2" : 0,
"mode" : 2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
def test_generate_dictionary_01(self):
if not has_spams:
raise nose.SkipTest(
"Cannot run this test without SPAMS being installed."
)
p = numpy.array([[27, 51],
[66, 85],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(float),
**{
"spams.trainDL" : {
"gamma2" : 0,
"gamma1" : 0,
"numThreads" : 1,
"K" : len(g),
"iter" : 10,
"modeD" : 0,
"posAlpha" : True,
"clean" : True,
"posD" : True,
"batchsize" : 256,
"lambda1" : 0.2,
"lambda2" : 0,
"mode" : 2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
@nose.plugins.attrib.attr("3D")
def test_generate_dictionary_02(self):
if not has_spams:
raise nose.SkipTest(
"Cannot run this test without SPAMS being installed."
)
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(numpy.float32),
**{
"spams.trainDL" : {
"gamma2" : 0,
"gamma1" : 0,
"numThreads" : 1,
"K" : len(g),
"iter" : 10,
"modeD" : 0,
"posAlpha" : True,
"clean" : True,
"posD" : True,
"batchsize" : 256,
"lambda1" : 0.2,
"lambda2" : 0,
"mode" : 2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
@nose.plugins.attrib.attr("3D")
def test_generate_dictionary_03(self):
if not has_spams:
raise nose.SkipTest(
"Cannot run this test without SPAMS being installed."
)
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(float),
**{
"spams.trainDL" : {
"gamma2" : 0,
"gamma1" : 0,
"numThreads" : 1,
"K" : len(g),
"iter" : 10,
"modeD" : 0,
"posAlpha" : True,
"clean" : True,
"posD" : True,
"batchsize" : 256,
"lambda1" : 0.2,
"lambda2" : 0,
"mode" : 2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
def test_generate_dictionary_04(self):
if not has_spams:
raise nose.SkipTest(
"Cannot run this test without SPAMS being installed."
)
p = numpy.array([[27, 51],
[66, 85],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(numpy.float32),
g.astype(numpy.float32),
len(g),
**{
"spams.trainDL" : {
"gamma2" : 0,
"gamma1" : 0,
"numThreads" : 1,
"iter" : 10,
"modeD" : 0,
"posAlpha" : True,
"clean" : True,
"posD" : True,
"batchsize" : 256,
"lambda1" : 0.2,
"lambda2" : 0,
"mode" : 2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
assert (g.astype(bool) == d.astype(bool)).all()
def test_generate_dictionary_05(self):
if not has_spams:
raise nose.SkipTest(
"Cannot run this test without SPAMS being installed."
)
p = numpy.array([[27, 51],
[66, 85],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(float),
g.astype(float),
len(g),
**{
"spams.trainDL" : {
"gamma2" : 0,
"gamma1" : 0,
"numThreads" : 1,
"iter" : 10,
"modeD" : 0,
"posAlpha" : True,
"clean" : True,
"posD" : True,
"batchsize" : 256,
"lambda1" : 0.2,
"lambda2" : 0,
"mode" : 2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
assert (g.astype(bool) == d.astype(bool)).all()
@nose.plugins.attrib.attr("3D")
def test_generate_dictionary_06(self):
if not has_spams:
raise nose.SkipTest(
"Cannot run this test without SPAMS being installed."
)
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(numpy.float32),
g.astype(numpy.float32),
len(g),
**{
"spams.trainDL" : {
"gamma2" : 0,
"gamma1" : 0,
"numThreads" : 1,
"iter" : 10,
"modeD" : 0,
"posAlpha" : True,
"clean" : True,
"posD" : True,
"batchsize" : 256,
"lambda1" : 0.2,
"lambda2" : 0,
"mode" : 2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
assert (g.astype(bool) == d.astype(bool)).all()
@nose.plugins.attrib.attr("3D")
def test_generate_dictionary_07(self):
if not has_spams:
raise nose.SkipTest(
"Cannot run this test without SPAMS being installed."
)
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(float),
g.astype(float),
len(g),
**{
"spams.trainDL" : {
"gamma2" : 0,
"gamma1" : 0,
"numThreads" : 1,
"iter" : 10,
"modeD" : 0,
"posAlpha" : True,
"clean" : True,
"posD" : True,
"batchsize" : 256,
"lambda1" : 0.2,
"lambda2" : 0,
"mode" : 2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
assert (g.astype(bool) == d.astype(bool)).all()
def test_generate_dictionary_08(self):
p = numpy.array([[27, 51],
[66, 85],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(float),
**{
"sklearn.decomposition.dict_learning_online" : {
"n_jobs" : 1,
"n_components" : len(g),
"n_iter" : 20,
"batch_size" : 256,
"alpha" : 0.2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
@nose.plugins.attrib.attr("3D")
def test_generate_dictionary_09(self):
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(float),
**{
"sklearn.decomposition.dict_learning_online" : {
"n_jobs" : 1,
"n_components" : len(g),
"n_iter" : 20,
"batch_size" : 256,
"alpha" : 0.2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
def test_generate_dictionary_10(self):
p = numpy.array([[27, 51],
[66, 85],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(float),
g.astype(float),
len(g),
**{
"sklearn.decomposition.dict_learning_online" : {
"n_jobs" : 1,
"n_iter" : 20,
"batch_size" : 256,
"alpha" : 0.2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
assert (g.astype(bool) == d.astype(bool)).all()
@nose.plugins.attrib.attr("3D")
def test_generate_dictionary_11(self):
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
g = nanshe.syn.data.generate_hypersphere_masks(space, p, radii)
d = nanshe.imp.segment.generate_dictionary(
g.astype(float),
g.astype(float),
len(g),
**{
"sklearn.decomposition.dict_learning_online" : {
"n_jobs" : 1,
"n_iter" : 20,
"batch_size" : 256,
"alpha" : 0.2
}
}
)
d = (d != 0)
assert (g.shape == d.shape)
assert (g.astype(bool).max(axis=0) == d.astype(bool).max(axis=0)).all()
unmatched_g = range(len(g))
matched = dict()
for i in nanshe.util.iters.irange(len(d)):
new_unmatched_g = []
for j in unmatched_g:
if not (d[i] == g[j]).all():
new_unmatched_g.append(j)
else:
matched[i] = j
unmatched_g = new_unmatched_g
print(unmatched_g)
assert (len(unmatched_g) == 0)
assert (g.astype(bool) == d.astype(bool)).all()
def test_generate_local_maxima_vigra_1(self):
p = numpy.array([[27, 51],
[66, 85],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
magnitudes = numpy.array((1, 1, 1), dtype=float)
g = nanshe.syn.data.generate_gaussian_images(
space, p, radii/3.0, magnitudes/3
)
m = nanshe.imp.segment.generate_local_maxima_vigra(g.max(axis=0))
assert (numpy.array(m.nonzero()) == p.T).all()
@nose.plugins.attrib.attr("3D")
def test_generate_local_maxima_vigra_2(self):
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
magnitudes = numpy.array((1, 1, 1), dtype=float)
g = nanshe.syn.data.generate_gaussian_images(
space, p, radii/3.0, magnitudes/3
)
m = nanshe.imp.segment.generate_local_maxima_vigra(g.max(axis=0))
assert (numpy.array(m.nonzero()) == p.T).all()
def test_generate_local_maxima_scikit_image_1(self):
p = numpy.array([[27, 51],
[66, 85],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
magnitudes = numpy.array((1, 1, 1), dtype=float)
g = nanshe.syn.data.generate_gaussian_images(
space, p, radii/3.0, magnitudes/3
)
m = nanshe.imp.segment.generate_local_maxima_scikit_image(g.max(axis=0))
@nose.plugins.attrib.attr("3D")
def test_generate_local_maxima_scikit_image_2(self):
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
magnitudes = numpy.array((1, 1, 1), dtype=float)
g = nanshe.syn.data.generate_gaussian_images(
space, p, radii/3.0, magnitudes/3
)
m = nanshe.imp.segment.generate_local_maxima_scikit_image(g.max(axis=0))
assert (numpy.array(m.nonzero()) == p.T).all()
def test_generate_local_maxima_1(self):
p = numpy.array([[27, 51],
[66, 85],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
magnitudes = numpy.array((1, 1, 1), dtype=float)
g = nanshe.syn.data.generate_gaussian_images(
space, p, radii/3.0, magnitudes/3
)
m = nanshe.imp.segment.generate_local_maxima(g.max(axis=0))
assert (numpy.array(m.nonzero()) == p.T).all()
@nose.plugins.attrib.attr("3D")
def test_generate_local_maxima_2(self):
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
magnitudes = numpy.array((1, 1, 1), dtype=float)
g = nanshe.syn.data.generate_gaussian_images(
space, p, radii/3.0, magnitudes/3
)
m = nanshe.imp.segment.generate_local_maxima(g.max(axis=0))
assert (numpy.array(m.nonzero()) == p.T).all()
def test_extended_region_local_maxima_properties_1(self):
p = numpy.array([[27, 51],
[66, 85],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
magnitudes = numpy.array((1, 1, 1), dtype=float)
g = nanshe.syn.data.generate_gaussian_images(
space, p, radii/3.0, magnitudes/3
)
m = (g > 0.00065)
g *= m
e = nanshe.imp.segment.extended_region_local_maxima_properties(
g.max(axis=0),
nanshe.util.xnumpy.enumerate_masks_max(m, axis=0)[0]
)
assert (numpy.bincount(e["label"])[1:] == 1).all()
assert (len(e) == len(p))
assert (e["local_max"] == p).all()
assert (e["area"] == numpy.apply_over_axes(numpy.sum, m, axes=range(1, m.ndim)).squeeze().astype(float)).all()
assert (e["centroid"] == e["local_max"]).all()
assert (e["intensity"] == g.max(axis=0)[tuple(p.T)]).all()
def test_extended_region_local_maxima_properties_2(self):
p = numpy.array([[27, 51],
[32, 53],
[77, 45]])
space = numpy.array((100, 100))
radii = numpy.array((5, 6, 7))
magnitudes = numpy.array((1, 1, 1), dtype=float)
g = nanshe.syn.data.generate_gaussian_images(
space, p, radii/3.0, magnitudes/3
)
g = numpy.array([g[0] + g[1], g[2]])
m = (g > 0.00065)
g *= m
e = nanshe.imp.segment.extended_region_local_maxima_properties(
g.max(axis=0),
nanshe.util.xnumpy.enumerate_masks_max(m, axis=0)[0]
)
assert (numpy.bincount(e["label"])[1:] == numpy.array([2, 1])).all()
assert (len(e) == len(p))
assert (e["local_max"] == p).all()
assert (e["area"][[0, 2]] == numpy.apply_over_axes(numpy.sum, m, axes=range(1, m.ndim)).squeeze().astype(float)).all()
# Not exactly equal due to floating point round off error
assert ((e["centroid"][0] - numpy.array(m[0].nonzero()).mean(axis=1)) < 1e-14).all()
# Not exactly equal due to floating point round off error
assert ((e["centroid"][1] - numpy.array(m[0].nonzero()).mean(axis=1)) < 1e-14).all()
assert (e["centroid"][2] == e["local_max"][2]).all()
assert (e["intensity"] == g.max(axis=0)[tuple(p.T)]).all()
@nose.plugins.attrib.attr("3D")
def test_extended_region_local_maxima_properties_3(self):
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
magnitudes = numpy.array((1, 1, 1), dtype=float)
g = nanshe.syn.data.generate_gaussian_images(
space, p, radii/3.0, magnitudes/3
)
m = (g > 0.00065)
g *= m
e = nanshe.imp.segment.extended_region_local_maxima_properties(
g.max(axis=0),
nanshe.util.xnumpy.enumerate_masks_max(m, axis=0)[0]
)
assert (numpy.bincount(e["label"])[1:] == 1).all()
assert (len(e) == len(p))
assert (e["local_max"] == p).all()
assert (e["area"] == numpy.apply_over_axes(numpy.sum, m, axes=range(1, m.ndim)).squeeze().astype(float)).all()
assert (e["centroid"] == e["local_max"]).all()
assert (e["intensity"] == g.max(axis=0)[tuple(p.T)]).all()
@nose.plugins.attrib.attr("3D")
def test_extended_region_local_maxima_properties_4(self):
p = numpy.array([[27, 51, 87],
[66, 85, 55],
[77, 45, 26]])
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 6, 7))
magnitudes = numpy.array((1, 1, 1), dtype=float)
g = nanshe.syn.data.generate_gaussian_images(
space, p, radii/3.0, magnitudes/3
)
g = numpy.array([g[0] + g[1], g[2]])
m = (g > 0.00065)
g *= m
e = nanshe.imp.segment.extended_region_local_maxima_properties(
g.max(axis=0),
nanshe.util.xnumpy.enumerate_masks_max(m, axis=0)[0]
)
assert (numpy.bincount(e["label"])[1:] == numpy.array([2, 1])).all()
assert (len(e) == len(p))
assert (e["local_max"] == p).all()
assert (e["area"][[0, 2]] == numpy.apply_over_axes(numpy.sum, m, axes=range(1, m.ndim)).squeeze().astype(float)).all()
# Not exactly equal due to floating point round off error
assert ((e["centroid"][0] - numpy.array(m[0].nonzero()).mean(axis=1)) < 1e-14).all()
# Not exactly equal due to floating point round off error
assert ((e["centroid"][1] - numpy.array(m[0].nonzero()).mean(axis=1)) < 1e-14).all()
assert (e["centroid"][2] == e["local_max"][2]).all()
assert (e["intensity"] == g.max(axis=0)[tuple(p.T)]).all()
def test_remove_low_intensity_local_maxima_1(self):
space = numpy.array((100, 100))
radii = numpy.array((5, 10))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[23, 36],
[58, 64]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = nanshe.util.xnumpy.enumerate_masks_max(masks, axis=0)[0]
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
e2 = nanshe.imp.segment.remove_low_intensity_local_maxima(e, 1.0)
assert (len(points) == len(e.props))
assert (0 == len(e2.props))
def test_remove_low_intensity_local_maxima_2(self):
space = numpy.array((100, 100))
radii = numpy.array((5, 10))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[23, 36],
[58, 64]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = nanshe.util.xnumpy.enumerate_masks_max(masks, axis=0)[0]
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
percentage_pixels_below_max = numpy.zeros((len(masks),), float)
for i in nanshe.util.iters.irange(len(masks)):
pixels_below_max = (images.max(axis=0)[masks[i].nonzero()] < images.max(axis=0)[masks[i]].max()).sum()
pixels = masks[i].sum()
percentage_pixels_below_max[i] = float(pixels_below_max) / float(pixels)
percentage_pixels_below_max = numpy.sort(percentage_pixels_below_max)
e2 = nanshe.imp.segment.remove_low_intensity_local_maxima(e, percentage_pixels_below_max[0])
assert (len(points) == len(e.props))
assert (len(e.props) == len(e2.props))
def test_remove_low_intensity_local_maxima_3(self):
space = numpy.array((100, 100))
radii = numpy.array((5, 10))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[23, 36],
[58, 64]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = nanshe.util.xnumpy.enumerate_masks_max(masks, axis=0)[0]
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
percentage_pixels_below_max = numpy.zeros((len(masks),), float)
for i in nanshe.util.iters.irange(len(masks)):
pixels_below_max = (images.max(axis=0)[masks[i].nonzero()] < images.max(axis=0)[masks[i]].max()).sum()
pixels = masks[i].sum()
percentage_pixels_below_max[i] = float(pixels_below_max) / float(pixels)
percentage_pixels_below_max = numpy.sort(percentage_pixels_below_max)
e2 = nanshe.imp.segment.remove_low_intensity_local_maxima(e, percentage_pixels_below_max[1])
assert (len(points) == len(e.props))
assert ((len(e.props) - 1) == len(e2.props))
def test_remove_low_intensity_local_maxima_4(self):
space = numpy.array((100, 100))
radii = numpy.array((5, 10))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[23, 36],
[58, 64]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = nanshe.util.xnumpy.enumerate_masks_max(masks, axis=0)[0]
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
percentage_pixels_below_max = numpy.zeros((len(masks),), float)
for i in nanshe.util.iters.irange(len(masks)):
pixels_below_max = (images.max(axis=0)[masks[i].nonzero()] < images.max(axis=0)[masks[i]].max()).sum()
pixels = masks[i].sum()
percentage_pixels_below_max[i] = float(pixels_below_max) / float(pixels)
percentage_pixels_below_max = numpy.sort(percentage_pixels_below_max)
e2 = nanshe.imp.segment.remove_low_intensity_local_maxima(e, percentage_pixels_below_max[1] + \
numpy.finfo(float).eps)
assert (len(points) == len(e.props))
assert ((len(e.props) - 2) == len(e2.props))
@nose.plugins.attrib.attr("3D")
def test_remove_low_intensity_local_maxima_5(self):
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 10))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[23, 36, 21],
[58, 64, 62]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = nanshe.util.xnumpy.enumerate_masks_max(masks, axis=0)[0]
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
e2 = nanshe.imp.segment.remove_low_intensity_local_maxima(e, 1.0)
assert (len(points) == len(e.props))
assert (0 == len(e2.props))
@nose.plugins.attrib.attr("3D")
def test_remove_low_intensity_local_maxima_6(self):
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 10))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[23, 36, 21],
[58, 64, 62]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = nanshe.util.xnumpy.enumerate_masks_max(masks, axis=0)[0]
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
percentage_pixels_below_max = numpy.zeros((len(masks),), float)
for i in nanshe.util.iters.irange(len(masks)):
pixels_below_max = (images.max(axis=0)[masks[i].nonzero()] < images.max(axis=0)[masks[i]].max()).sum()
pixels = masks[i].sum()
percentage_pixels_below_max[i] = float(pixels_below_max) / float(pixels)
percentage_pixels_below_max = numpy.sort(percentage_pixels_below_max)
e2 = nanshe.imp.segment.remove_low_intensity_local_maxima(e, percentage_pixels_below_max[0])
assert (len(points) == len(e.props))
assert (len(e.props) == len(e2.props))
@nose.plugins.attrib.attr("3D")
def test_remove_low_intensity_local_maxima_7(self):
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 10))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[23, 36, 21],
[58, 64, 62]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = nanshe.util.xnumpy.enumerate_masks_max(masks, axis=0)[0]
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
percentage_pixels_below_max = numpy.zeros((len(masks),), float)
for i in nanshe.util.iters.irange(len(masks)):
pixels_below_max = (images.max(axis=0)[masks[i].nonzero()] < images.max(axis=0)[masks[i]].max()).sum()
pixels = masks[i].sum()
percentage_pixels_below_max[i] = float(pixels_below_max) / float(pixels)
percentage_pixels_below_max = numpy.sort(percentage_pixels_below_max)
e2 = nanshe.imp.segment.remove_low_intensity_local_maxima(e, percentage_pixels_below_max[1])
assert (len(points) == len(e.props))
assert ((len(e.props) - 1) == len(e2.props))
@nose.plugins.attrib.attr("3D")
def test_remove_low_intensity_local_maxima_8(self):
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 10))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[23, 36, 21],
[58, 64, 62]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = nanshe.util.xnumpy.enumerate_masks_max(masks, axis=0)[0]
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
percentage_pixels_below_max = numpy.zeros((len(masks),), float)
for i in nanshe.util.iters.irange(len(masks)):
pixels_below_max = (images.max(axis=0)[masks[i].nonzero()] < images.max(axis=0)[masks[i]].max()).sum()
pixels = masks[i].sum()
percentage_pixels_below_max[i] = float(pixels_below_max) / float(pixels)
percentage_pixels_below_max = numpy.sort(percentage_pixels_below_max)
e2 = nanshe.imp.segment.remove_low_intensity_local_maxima(e, percentage_pixels_below_max[1] + \
numpy.finfo(float).eps)
assert (len(points) == len(e.props))
assert ((len(e.props) - 2) == len(e2.props))
def test_remove_too_close_local_maxima_1(self):
space = numpy.array((100, 100))
radii = numpy.array((5, 5))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[63, 69],
[58, 64]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = masks.max(axis=0).astype(int)
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
dist = scipy.spatial.distance.pdist(points).max()
i = 0
while (dist + i * numpy.finfo(type(dist)).eps) == dist:
i += 1
dist += i * numpy.finfo(type(dist)).eps
e2 = nanshe.imp.segment.remove_too_close_local_maxima(e, dist)
assert (len(points) == len(e.props))
assert (1 == len(e2.props))
def test_remove_too_close_local_maxima_2(self):
space = numpy.array((100, 100))
radii = numpy.array((5, 5))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[63, 69],
[58, 64]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = nanshe.util.xnumpy.enumerate_masks_max(masks, axis=0)[0]
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
dist = scipy.spatial.distance.pdist(points).max()
i = 0
while (dist + i * numpy.finfo(type(dist)).eps) == dist:
i += 1
dist += i * numpy.finfo(type(dist)).eps
e2 = nanshe.imp.segment.remove_too_close_local_maxima(e, dist)
assert (len(points) == len(e.props))
assert (len(points) == len(e2.props))
def test_remove_too_close_local_maxima_3(self):
space = numpy.array((100, 100))
radii = numpy.array((5, 5))
magnitudes = numpy.array((1, 1.01), dtype=float)
points = numpy.array([[63, 69],
[58, 64]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = masks.max(axis=0).astype(int)
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
dist = scipy.spatial.distance.pdist(points).max()
i = 0
while (dist + i * numpy.finfo(type(dist)).eps) == dist:
i += 1
dist += i * numpy.finfo(type(dist)).eps
e2 = nanshe.imp.segment.remove_too_close_local_maxima(e, dist)
assert (len(points) == len(e.props))
assert (1 == len(e2.props))
assert (points[magnitudes == magnitudes.max()] == e2.props["local_max"][0]).all()
def test_remove_too_close_local_maxima_4(self):
space = numpy.array((100, 100))
radii = numpy.array((5, 5))
magnitudes = numpy.array((1.01, 1), dtype=float)
points = numpy.array([[63, 69],
[58, 64]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = masks.max(axis=0).astype(int)
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
dist = scipy.spatial.distance.pdist(points).max()
i = 0
while (dist + i * numpy.finfo(type(dist)).eps) == dist:
i += 1
dist += i * numpy.finfo(type(dist)).eps
e2 = nanshe.imp.segment.remove_too_close_local_maxima(e, dist)
assert (len(points) == len(e.props))
assert (1 == len(e2.props))
assert (points[magnitudes == magnitudes.max()] == e2.props["local_max"][0]).all()
@nose.plugins.attrib.attr("3D")
def test_remove_too_close_local_maxima_5(self):
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 5))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[63, 69, 26],
[58, 64, 21]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = masks.max(axis=0).astype(int)
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
dist = scipy.spatial.distance.pdist(points).max()
i = 0
while (dist + i * numpy.finfo(type(dist)).eps) == dist:
i += 1
dist += i * numpy.finfo(type(dist)).eps
e2 = nanshe.imp.segment.remove_too_close_local_maxima(e, dist)
assert (len(points) == len(e.props))
assert (1 == len(e2.props))
@nose.plugins.attrib.attr("3D")
def test_remove_too_close_local_maxima_6(self):
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 5))
magnitudes = numpy.array((1, 1), dtype=float)
points = numpy.array([[63, 69, 26],
[58, 64, 21]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = nanshe.util.xnumpy.enumerate_masks_max(masks, axis=0)[0]
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
dist = scipy.spatial.distance.pdist(points).max()
i = 0
while (dist + i * numpy.finfo(type(dist)).eps) == dist:
i += 1
dist += i * numpy.finfo(type(dist)).eps
e2 = nanshe.imp.segment.remove_too_close_local_maxima(e, dist)
assert (len(points) == len(e.props))
assert (len(points) == len(e2.props))
@nose.plugins.attrib.attr("3D")
def test_remove_too_close_local_maxima_7(self):
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 5))
magnitudes = numpy.array((1, 1.01), dtype=float)
points = numpy.array([[63, 69, 26],
[58, 64, 21]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = masks.max(axis=0).astype(int)
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
dist = scipy.spatial.distance.pdist(points).max()
i = 0
while (dist + i * numpy.finfo(type(dist)).eps) == dist:
i += 1
dist += i * numpy.finfo(type(dist)).eps
e2 = nanshe.imp.segment.remove_too_close_local_maxima(e, dist)
assert (len(points) == len(e.props))
assert (1 == len(e2.props))
assert (points[magnitudes == magnitudes.max()] == e2.props["local_max"][0]).all()
@nose.plugins.attrib.attr("3D")
def test_remove_too_close_local_maxima_8(self):
space = numpy.array((100, 100, 100))
radii = numpy.array((5, 5))
magnitudes = numpy.array((1.01, 1), dtype=float)
points = numpy.array([[63, 69, 26],
[58, 64, 21]])
masks = nanshe.syn.data.generate_hypersphere_masks(
space, points, radii
)
images = nanshe.syn.data.generate_gaussian_images(
space, points, radii/3.0, magnitudes
) * masks
labels = masks.max(axis=0).astype(int)
e = nanshe.imp.segment.ExtendedRegionProps(images.max(axis=0), labels)
dist = scipy.spatial.distance.pdist(points).max()
i = 0
while (dist + i * numpy.finfo(type(dist)).eps) == dist:
i += 1
dist += i * numpy.finfo(type(dist)).eps
e2 = nanshe.imp.segment.remove_too_close_local_maxima(e, dist)
assert (len(points) == len(e.props))
assert (1 == len(e2.props))
assert (points[magnitudes == magnitudes.max()] == e2.props["local_max"][0]).all()
def test_wavelet_thresholding_1(self):
params = {
"significance_threshold" : 3.0,
"wavelet_scale" : 5,
"noise_threshold" : 3.0
}
shape = numpy.array((500, 500))
neuron_centers = numpy.array([[177, 52], [127, 202], [343, 271]])
original_neurons_image = nanshe.syn.data.generate_gaussian_images(shape, neuron_centers, (50.0/3.0,)*len(neuron_centers), (1.0/3.0,)*len(neuron_centers)).sum(axis=0)
original_neurons_mask = (original_neurons_image >= 0.00014218114898827068)
wtt_image, wtt_mask = nanshe.imp.segment.wavelet_thresholding(
original_neurons_image, **params
)
assert (wtt_mask[-2] == original_neurons_mask).all()
assert ((wtt_mask[-1] & original_neurons_mask) == original_neurons_mask).all()
def test_match_regions_properties_1(self):
props = numpy.array(
[
(1, 1990.0, [3.7402010050251255, 127.0, 202.0], 0.9990127357638044, 39.484721299262105),
(2, 1988.0, [3.7399396378269616, 177.0, 52.021126760563384], 0.9990128314664918, 39.49948424388854),
(3, 1990.0, [3.7402010050251255, 343.0, 271.0], 0.9990127357638044, 39.484721299262105)
],
dtype=[
('label', '<i8'),
('area', '<f8'),
('centroid', '<f8', (3,)),
('eccentricity', '<f8'),
('major_axis_length', '<f8')
]
)
params = {
"area": {
"min" : 1990, "max" : 2000
}
}
matches = nanshe.imp.segment.match_regions_properties(props, params)
assert len(matches) == len(props)
assert (matches == numpy.array([ True, False, True])).all()
def test_wavelet_denoising_1(self):
params = {
"remove_low_intensity_local_maxima" : {
"percentage_pixels_below_max" : 0
},
"wavelet.transform" : {
"scale" : 5
},
"accepted_region_shape_constraints" : {
"major_axis_length" : {
"max" : 25.0,
"min" : 0.0
}
},
"accepted_neuron_shape_constraints" : {
"eccentricity" : {
"max" : 0.9,
"min" : 0.0
},
"area" : {
"max" : 600,
"min" : 30
}
},
"estimate_noise" : {
"significance_threshold" : 3.0
},
"significant_mask" : {
"noise_threshold" : 3.0
},
"remove_too_close_local_maxima" : {
"min_local_max_distance" : 100.0
},
"use_watershed" : True
}
shape = numpy.array((500, 500))
neuron_centers = numpy.array([[177, 52], [127, 202], [343, 271]])
original_neuron_image = nanshe.syn.data.generate_gaussian_images(shape, neuron_centers, (50.0/3.0,)*len(neuron_centers), (1.0/3.0,)*len(neuron_centers)).sum(axis=0)
original_neurons_mask = (original_neuron_image >= 0.00014218114898827068)
neurons = nanshe.imp.segment.wavelet_denoising(original_neuron_image, **params)
assert (len(neuron_centers) == len(neurons))
assert (original_neurons_mask == neurons["mask"].max(axis=0)).all()
assert ((original_neurons_mask*original_neuron_image) == neurons["image"].max(axis=0)).all()
def test_wavelet_denoising_2(self):
params = {
"remove_low_intensity_local_maxima" : {
"percentage_pixels_below_max" : 0
},
"wavelet.transform" : {
"scale" : 5
},
"accepted_region_shape_constraints" : {
"major_axis_length" : {
"max" : 150.0,
"min" : 0.0
}
},
"accepted_neuron_shape_constraints" : {
"eccentricity" : {
"max" : 0.9,
"min" : 0.0
},
"area" : {
"max" : 10000,
"min" : 0
}
},
"estimate_noise" : {
"significance_threshold" : 3.0
},
"significant_mask" : {
"noise_threshold" : 3.0
},
"remove_too_close_local_maxima" : {
"min_local_max_distance" : 100.0
},
"use_watershed" : True
}
shape = numpy.array((500, 500))
neuron_centers = numpy.array([[127, 202], [177, 52], [343, 271]])
neuron_radii = numpy.array((50.0,)*len(neuron_centers))
neuron_magnitudes = numpy.array((1.0/3.0,)*len(neuron_centers))
neuron_spreads = neuron_radii / 3.0
neuron_images = nanshe.syn.data.generate_gaussian_images(shape, neuron_centers, neuron_spreads, neuron_magnitudes)
neuron_masks = (neuron_images >= (neuron_magnitudes.max() * scipy.stats.norm.pdf(3 * neuron_spreads.max(), scale=neuron_spreads.max())**len(shape)))
neuron_images *= neuron_masks
neurons = nanshe.imp.segment.wavelet_denoising(neuron_images.max(axis=0), **params)
# Resort neuron image order based on most similar.
result_neurons_distance = scipy.spatial.distance.cdist(neuron_images.reshape(neurons.shape + (-1,)), neurons["image"].reshape(neurons.shape + (-1,)))
neuron_centers_old = neuron_centers
neuron_radii_old = neuron_radii
neuron_magnitudes_old = neuron_magnitudes
neuron_images_old = neuron_images
neuron_masks_old = neuron_masks
neuron_centers = numpy.zeros(neuron_centers_old.shape, dtype=neuron_centers_old.dtype)
neuron_radii = numpy.zeros(neuron_radii_old.shape, dtype=neuron_radii_old.dtype)
neuron_magnitudes = numpy.zeros(neuron_magnitudes_old.shape, dtype=neuron_magnitudes_old.dtype)
neuron_images = numpy.zeros(neuron_images_old.shape, dtype=neuron_images_old.dtype)
neuron_masks = numpy.zeros(neuron_masks_old.shape, dtype=neuron_masks_old.dtype)
for i1, i2 in enumerate(result_neurons_distance.argmin(axis=1)):
neuron_centers[i1] = neuron_centers_old[i2]
neuron_radii[i1] = neuron_radii_old[i2]
neuron_magnitudes[i1] = neuron_magnitudes_old[i2]
neuron_images[i1] = neuron_images_old[i2]
neuron_masks[i1] = neuron_masks_old[i2]
neuron_centers_old = None
neuron_radii_old = None
neuron_magnitudes_old = None
neuron_images_old = None
neuron_masks_old = None
assert (len(neuron_centers) == len(neurons))
assert (numpy.abs(neurons["image"].max(axis=0) - neuron_images.max(axis=0)).max() < 1.0e-4)
assert (numpy.abs(neurons["image"] - neuron_images).max() < 1.0e-4)
@nose.plugins.attrib.attr("3D")
def test_wavelet_denoising_3(self):
params = {
"remove_low_intensity_local_maxima" : {
"percentage_pixels_below_max" : 0
},
"wavelet.transform" : {
"scale" : 5
},
"accepted_region_shape_constraints" : {
"major_axis_length" : {
"max" : 30.0,
"min" : 0.0
}
},
"accepted_neuron_shape_constraints" : {
"eccentricity" : {
"max" : 0.9,
"min" : 0.0
},
"area" : {
"max" : 30000,
"min" : 10000
}
},
"estimate_noise" : {
"significance_threshold" : 3.0
},
"significant_mask" : {
"noise_threshold" : 3.0
},
"remove_too_close_local_maxima" : {
"min_local_max_distance" : 100.0
},
"use_watershed" : True
}
shape = numpy.array((100, 100, 100))
neuron_centers = numpy.array([[21, 17, 46], [46, 71, 83], [77, 52, 17]])
neuron_radii = numpy.array((10.0,)*len(neuron_centers))
neuron_magnitudes = numpy.array((1.0/3.0,)*len(neuron_centers))
neuron_spreads = neuron_radii / 3.0
neuron_images = nanshe.syn.data.generate_gaussian_images(shape, neuron_centers, neuron_spreads, neuron_magnitudes)
neuron_masks = (neuron_images >= (neuron_magnitudes.max() * scipy.stats.norm.pdf(3 * neuron_spreads.max(), scale=neuron_spreads.max())**len(shape)))
neuron_images *= neuron_masks
neurons = nanshe.imp.segment.wavelet_denoising(neuron_images.max(axis=0), **params)
# Resort neuron image order based on most similar.
result_neurons_distance = scipy.spatial.distance.cdist(neuron_images.reshape(neurons.shape + (-1,)), neurons["image"].reshape(neurons.shape + (-1,)))
neuron_centers_old = neuron_centers
neuron_radii_old = neuron_radii
neuron_magnitudes_old = neuron_magnitudes
neuron_images_old = neuron_images
neuron_masks_old = neuron_masks
neuron_centers = numpy.zeros(neuron_centers_old.shape, dtype=neuron_centers_old.dtype)
neuron_radii = numpy.zeros(neuron_radii_old.shape, dtype=neuron_radii_old.dtype)
neuron_magnitudes = numpy.zeros(neuron_magnitudes_old.shape, dtype=neuron_magnitudes_old.dtype)
neuron_images = numpy.zeros(neuron_images_old.shape, dtype=neuron_images_old.dtype)
neuron_masks = numpy.zeros(neuron_masks_old.shape, dtype=neuron_masks_old.dtype)
for i1, i2 in enumerate(result_neurons_distance.argmin(axis=1)):
neuron_centers[i1] = neuron_centers_old[i2]
neuron_radii[i1] = neuron_radii_old[i2]
neuron_magnitudes[i1] = neuron_magnitudes_old[i2]
neuron_images[i1] = neuron_images_old[i2]
neuron_masks[i1] = neuron_masks_old[i2]
neuron_centers_old = None
neuron_radii_old = None
neuron_magnitudes_old = None
neuron_images_old = None
neuron_masks_old = None
assert (len(neuron_centers) == len(neurons))
assert (numpy.abs(neurons["image"].max(axis=0) - neuron_images.max(axis=0)).max() < 1.0e-6)
assert (numpy.abs(neurons["image"] - neuron_images).max() < 1.0e-6)
def test_extract_neurons_1(self):
image = 5 * numpy.ones((100, 100))
xy = numpy.indices(image.shape)
circle_centers = numpy.array([[25, 25], [74, 74]])
circle_radii = numpy.array([25, 25])
circle_offsets = nanshe.util.xnumpy.expand_view(circle_centers, image.shape) - \
nanshe.util.xnumpy.expand_view(xy, reps_before=len(circle_centers))
circle_offsets_squared = circle_offsets**2
circle_masks = (circle_offsets_squared.sum(axis=1)**.5 < nanshe.util.xnumpy.expand_view(circle_radii, image.shape))
circle_images = circle_masks * image
circle_mask_mean = numpy.zeros((len(circle_masks), image.ndim,))
circle_mask_cov = numpy.zeros((len(circle_masks), image.ndim, image.ndim,))
for circle_mask_i in nanshe.util.iters.irange(len(circle_masks)):
each_circle_mask_points = numpy.array(circle_masks[circle_mask_i].nonzero(), dtype=float)
circle_mask_mean[circle_mask_i] = each_circle_mask_points.mean(axis=1)
circle_mask_cov[circle_mask_i] = numpy.cov(each_circle_mask_points)
neurons = nanshe.imp.segment.extract_neurons(image, circle_masks)
assert (len(circle_masks) == len(neurons))
assert (circle_masks == neurons["mask"]).all()
assert (circle_images == neurons["image"]).all()
assert (numpy.apply_over_axes(numpy.sum, circle_masks, range(1, circle_masks.ndim)) == neurons["area"]).all()
assert (numpy.apply_over_axes(numpy.max, circle_images, range(1, circle_masks.ndim)) == neurons["max_F"]).all()
assert (circle_mask_mean == neurons["gaussian_mean"]).all()
assert (circle_mask_cov == neurons["gaussian_cov"]).all()
assert (neurons["centroid"] == neurons["gaussian_mean"]).all()
@nose.plugins.attrib.attr("3D")
def test_extract_neurons_2(self):
image = 5 * numpy.ones((100, 100, 100))
xyz = numpy.indices(image.shape)
circle_centers = numpy.array([[25, 25, 25], [74, 74, 74]])
circle_radii = numpy.array([25, 25])
circle_offsets = nanshe.util.xnumpy.expand_view(circle_centers, image.shape) - \
nanshe.util.xnumpy.expand_view(xyz, reps_before=len(circle_centers))
circle_offsets_squared = circle_offsets**2
circle_masks = (circle_offsets_squared.sum(axis=1)**.5 < nanshe.util.xnumpy.expand_view(circle_radii, image.shape))
circle_images = circle_masks * image
circle_mask_mean = numpy.zeros((len(circle_masks), image.ndim,))
circle_mask_cov = numpy.zeros((len(circle_masks), image.ndim, image.ndim,))
for circle_mask_i in nanshe.util.iters.irange(len(circle_masks)):
each_circle_mask_points = numpy.array(circle_masks[circle_mask_i].nonzero(), dtype=float)
circle_mask_mean[circle_mask_i] = each_circle_mask_points.mean(axis=1)
circle_mask_cov[circle_mask_i] = numpy.cov(each_circle_mask_points)
neurons = nanshe.imp.segment.extract_neurons(image, circle_masks)
assert (len(circle_masks) == len(neurons))
assert (circle_masks == neurons["mask"]).all()
assert (circle_images == neurons["image"]).all()
assert (numpy.apply_over_axes(numpy.sum, circle_masks, range(1, circle_masks.ndim)) == neurons["area"]).all()
assert (numpy.apply_over_axes(numpy.max, circle_images, range(1, circle_masks.ndim)) == neurons["max_F"]).all()
assert (circle_mask_mean == neurons["gaussian_mean"]).all()
assert (circle_mask_cov == neurons["gaussian_cov"]).all()
assert (neurons["centroid"] == neurons["gaussian_mean"]).all()
def test_fuse_neurons_1(self):
fraction_mean_neuron_max_threshold = 0.01
image = 5 * numpy.ones((100, 100))
xy = numpy.indices(image.shape)
circle_centers = numpy.array([[25, 25], [74, 74]])
circle_radii = numpy.array([25, 25])
circle_offsets = nanshe.util.xnumpy.expand_view(circle_centers, image.shape) - \
nanshe.util.xnumpy.expand_view(xy, reps_before=len(circle_centers))
circle_offsets_squared = circle_offsets**2
circle_masks = (circle_offsets_squared.sum(axis=1)**.5 < nanshe.util.xnumpy.expand_view(circle_radii, image.shape))
circle_mask_mean = numpy.zeros((len(circle_masks), image.ndim,))
circle_mask_cov = numpy.zeros((len(circle_masks), image.ndim, image.ndim,))
for circle_mask_i in nanshe.util.iters.irange(len(circle_masks)):
each_circle_mask_points = numpy.array(circle_masks[circle_mask_i].nonzero(), dtype=float)
circle_mask_mean[circle_mask_i] = each_circle_mask_points.mean(axis=1)
circle_mask_cov[circle_mask_i] = numpy.cov(each_circle_mask_points)
neurons = nanshe.imp.segment.extract_neurons(image, circle_masks)
fused_neurons = nanshe.imp.segment.fuse_neurons(neurons[0], neurons[1],
fraction_mean_neuron_max_threshold)
assert (neurons["mask"].sum(axis=0) == fused_neurons["mask"]).all()
assert (neurons["image"].mean(axis=0) == fused_neurons["image"]).all()
assert (numpy.array(neurons["area"].sum()) == fused_neurons["area"])
assert (fused_neurons["image"].max() == fused_neurons["max_F"])
assert (neurons["gaussian_mean"].mean(axis=0) == fused_neurons["gaussian_mean"]).all()
assert (fused_neurons["centroid"] == fused_neurons["gaussian_mean"]).all()
@nose.plugins.attrib.attr("3D")
def test_fuse_neurons_2(self):
fraction_mean_neuron_max_threshold = 0.01
image = 5 * numpy.ones((100, 100, 100))
xy = numpy.indices(image.shape)
circle_centers = numpy.array([[25, 25, 25], [74, 74, 74]])
circle_radii = numpy.array([25, 25])
circle_offsets = nanshe.util.xnumpy.expand_view(circle_centers, image.shape) - \
nanshe.util.xnumpy.expand_view(xy, reps_before=len(circle_centers))
circle_offsets_squared = circle_offsets**2
circle_masks = (circle_offsets_squared.sum(axis=1)**.5 < nanshe.util.xnumpy.expand_view(circle_radii, image.shape))
circle_mask_mean = numpy.zeros((len(circle_masks), image.ndim,))
circle_mask_cov = numpy.zeros((len(circle_masks), image.ndim, image.ndim,))
for circle_mask_i in nanshe.util.iters.irange(len(circle_masks)):
each_circle_mask_points = numpy.array(circle_masks[circle_mask_i].nonzero(), dtype=float)
circle_mask_mean[circle_mask_i] = each_circle_mask_points.mean(axis=1)
circle_mask_cov[circle_mask_i] = numpy.cov(each_circle_mask_points)
neurons = nanshe.imp.segment.extract_neurons(image, circle_masks)
fused_neurons = nanshe.imp.segment.fuse_neurons(neurons[0], neurons[1],
fraction_mean_neuron_max_threshold)
assert (neurons["mask"].sum(axis=0) == fused_neurons["mask"]).all()
assert (neurons["image"].mean(axis=0) == fused_neurons["image"]).all()
assert (numpy.array(neurons["area"].sum()) == fused_neurons["area"])
assert (fused_neurons["image"].max() == fused_neurons["max_F"])
assert (neurons["gaussian_mean"].mean(axis=0) == fused_neurons["gaussian_mean"]).all()
assert (fused_neurons["centroid"] == fused_neurons["gaussian_mean"]).all()
def test_merge_neuron_sets_1(self):
alignment_min_threshold = 0.6
overlap_min_threshold = 0.6
fuse_neurons = {"fraction_mean_neuron_max_threshold" : 0.01}
image = 5 * numpy.ones((100, 100))
xy = numpy.indices(image.shape)
circle_centers = numpy.array([[25, 25], [74, 74]])
circle_radii = numpy.array([25, 25])
circle_offsets = nanshe.util.xnumpy.expand_view(circle_centers, image.shape) - \
nanshe.util.xnumpy.expand_view(xy, reps_before=len(circle_centers))
circle_offsets_squared = circle_offsets**2
circle_masks = (circle_offsets_squared.sum(axis=1)**.5 < nanshe.util.xnumpy.expand_view(circle_radii, image.shape))
neurons = nanshe.imp.segment.extract_neurons(image, circle_masks)
merged_neurons = nanshe.imp.segment.merge_neuron_sets(neurons[:1], neurons[1:], alignment_min_threshold, overlap_min_threshold, fuse_neurons=fuse_neurons)
assert (len(neurons) == len(circle_centers))
assert (neurons == merged_neurons).all()
def test_merge_neuron_sets_2(self):
alignment_min_threshold = 0.6
overlap_min_threshold = 0.6
fuse_neurons = {"fraction_mean_neuron_max_threshold" : 0.01}
image = 5 * numpy.ones((100, 100))
xy = numpy.indices(image.shape)
circle_centers = numpy.array([[25, 25]])
circle_radii = numpy.array([25])
circle_offsets = nanshe.util.xnumpy.expand_view(circle_centers, image.shape) - \
nanshe.util.xnumpy.expand_view(xy, reps_before=len(circle_centers))
circle_offsets_squared = circle_offsets**2
circle_masks = (circle_offsets_squared.sum(axis=1)**.5 < nanshe.util.xnumpy.expand_view(circle_radii, image.shape))
neurons = nanshe.imp.segment.extract_neurons(image, circle_masks)
merged_neurons = nanshe.imp.segment.merge_neuron_sets(neurons, neurons, alignment_min_threshold, overlap_min_threshold, fuse_neurons=fuse_neurons)
assert (len(neurons) == len(circle_centers))
assert (neurons == merged_neurons).all()
@nose.plugins.attrib.attr("3D")
def test_merge_neuron_sets_3(self):
alignment_min_threshold = 0.6
overlap_min_threshold = 0.6
fuse_neurons = {"fraction_mean_neuron_max_threshold" : 0.01}
image = 5 * numpy.ones((100, 100, 100))
xyz = numpy.indices(image.shape)
circle_centers = numpy.array([[25, 25, 25], [74, 74, 74]])
circle_radii = numpy.array([25, 25])
circle_offsets = nanshe.util.xnumpy.expand_view(circle_centers, image.shape) - \
nanshe.util.xnumpy.expand_view(xyz, reps_before=len(circle_centers))
circle_offsets_squared = circle_offsets**2
circle_masks = (circle_offsets_squared.sum(axis=1)**.5 < nanshe.util.xnumpy.expand_view(circle_radii, image.shape))
neurons = nanshe.imp.segment.extract_neurons(image, circle_masks)
merged_neurons = nanshe.imp.segment.merge_neuron_sets(neurons[:1], neurons[1:], alignment_min_threshold, overlap_min_threshold, fuse_neurons=fuse_neurons)
assert (len(neurons) == len(circle_centers))
assert (neurons == merged_neurons).all()
@nose.plugins.attrib.attr("3D")
def test_merge_neuron_sets_4(self):
alignment_min_threshold = 0.6
overlap_min_threshold = 0.6
fuse_neurons = {"fraction_mean_neuron_max_threshold" : 0.01}
image = 5 * numpy.ones((100, 100, 100))
xyz = numpy.indices(image.shape)
circle_centers = numpy.array([[25, 25, 25]])
circle_radii = numpy.array([25])
circle_offsets = nanshe.util.xnumpy.expand_view(circle_centers, image.shape) - \
nanshe.util.xnumpy.expand_view(xyz, reps_before=len(circle_centers))
circle_offsets_squared = circle_offsets**2
circle_masks = (circle_offsets_squared.sum(axis=1)**.5 < nanshe.util.xnumpy.expand_view(circle_radii, image.shape))
neurons = nanshe.imp.segment.extract_neurons(image, circle_masks)
merged_neurons = nanshe.imp.segment.merge_neuron_sets(neurons, neurons, alignment_min_threshold, overlap_min_threshold, fuse_neurons=fuse_neurons)
assert (len(neurons) == len(circle_centers))
assert (neurons == merged_neurons).all()
def test_postprocess_data_1(self):
config = {
"wavelet_denoising" : {
"remove_low_intensity_local_maxima" : {
"percentage_pixels_below_max" : 0.0
},
"wavelet.transform" : {
"scale" : 4
},
"accepted_region_shape_constraints" : {
"major_axis_length" : {
"max" : 25.0,
"min" : 0.0
}
},
"accepted_neuron_shape_constraints" : {
"eccentricity" : {
"max" : 0.9,
"min" : 0.0
},
"area" : {
"max" : 600,
"min" : 30
}
},
"estimate_noise" : {
"significance_threshold" : 3.0
},
"significant_mask" : {
"noise_threshold" : 3.0
},
"remove_too_close_local_maxima" : {
"min_local_max_distance" : 10.0
},
"use_watershed" : True
},
"merge_neuron_sets" : {
"alignment_min_threshold" : 0.6,
"fuse_neurons" : {
"fraction_mean_neuron_max_threshold" : 0.01
},
"overlap_min_threshold" : 0.6
}
}
space = numpy.array([100, 100])
radii = numpy.array([7, 6, 6, 6, 7, 6])
magnitudes = numpy.array([15, 16, 15, 17, 16, 16])
points = numpy.array([[30, 24],
[59, 65],
[21, 65],
[13, 12],
[72, 16],
[45, 32]])
masks = nanshe.syn.data.generate_hypersphere_masks(space, points, radii)
images = nanshe.syn.data.generate_gaussian_images(space, points, radii/3.0, magnitudes) * masks
bases_indices = [[1,3,4], [0,2], [5]]
bases_masks = numpy.zeros((len(bases_indices),) + masks.shape[1:], dtype=masks.dtype)
bases_images = numpy.zeros((len(bases_indices),) + images.shape[1:], dtype=images.dtype)
for i, each_basis_indices in enumerate(bases_indices):
bases_masks[i] = masks[list(each_basis_indices)].max(axis=0)
bases_images[i] = images[list(each_basis_indices)].max(axis=0)
neurons = nanshe.imp.segment.postprocess_data(bases_images, **config)
assert (len(points) == len(neurons))
neuron_max_matches = nanshe.util.xnumpy.all_permutations_equal(neurons["max_F"], neurons["image"])
neuron_max_matches = neuron_max_matches.max(axis=0).max(axis=0)
neuron_points = numpy.array(neuron_max_matches.nonzero()).T.copy()
matched = dict()
unmatched_points = numpy.arange(len(points))
for i in nanshe.util.iters.irange(len(neuron_points)):
new_unmatched_points = []
for j in unmatched_points:
if not (neuron_points[i] == points[j]).all():
new_unmatched_points.append(j)
else:
matched[i] = j
unmatched_points = new_unmatched_points
assert (len(unmatched_points) == 0)
def test_postprocess_data_2(self):
config = {
"wavelet_denoising" : {
"remove_low_intensity_local_maxima" : {
"percentage_pixels_below_max" : 0.0
},
"wavelet.transform" : {
"scale" : 4
},
"accepted_region_shape_constraints" : {
"major_axis_length" : {
"max" : 25.0,
"min" : 0.0
}
},
"accepted_neuron_shape_constraints" : {
"eccentricity" : {
"max" : 0.9,
"min" : 0.0
},
"area" : {
"max" : 600,
"min" : 30
}
},
"estimate_noise" : {
"significance_threshold" : 3.0
},
"significant_mask" : {
"noise_threshold" : 3.0
},
"remove_too_close_local_maxima" : {
"min_local_max_distance" : 10.0
},
"use_watershed" : True
},
"merge_neuron_sets" : {
"alignment_min_threshold" : 0.6,
"fuse_neurons" : {
"fraction_mean_neuron_max_threshold" : 0.01
},
"overlap_min_threshold" : 0.6
}
}
space = numpy.array([100, 100])
radii = numpy.array([25])
magnitudes = numpy.array([15])
points = numpy.array([[25, 25]])
masks = nanshe.syn.data.generate_hypersphere_masks(space, numpy.vstack([points, points]), numpy.hstack([radii, radii]))
images = nanshe.syn.data.generate_gaussian_images(space, numpy.vstack([points, points]), numpy.hstack([radii, radii])/3.0, numpy.hstack([magnitudes, magnitudes])) * masks
print(masks.shape)
bases_indices = [[0], [1]]
bases_masks = numpy.zeros((len(bases_indices),) + masks.shape[1:], dtype=masks.dtype)
bases_images = numpy.zeros((len(bases_indices),) + images.shape[1:], dtype=images.dtype)
for i, each_basis_indices in enumerate(bases_indices):
bases_masks[i] = masks[list(each_basis_indices)].max(axis=0)
bases_images[i] = images[list(each_basis_indices)].max(axis=0)
neurons = nanshe.imp.segment.postprocess_data(bases_images, **config)
assert (len(points) == len(neurons))
neuron_max_matches = nanshe.util.xnumpy.all_permutations_equal(neurons["max_F"], neurons["image"])
neuron_max_matches = neuron_max_matches.max(axis=0).max(axis=0)
neuron_points = numpy.array(neuron_max_matches.nonzero()).T.copy()
matched = dict()
unmatched_points = numpy.arange(len(points))
for i in nanshe.util.iters.irange(len(neuron_points)):
new_unmatched_points = []
for j in unmatched_points:
if not (neuron_points[i] == points[j]).all():
new_unmatched_points.append(j)
else:
matched[i] = j
unmatched_points = new_unmatched_points
assert (len(unmatched_points) == 0)
@nose.plugins.attrib.attr("3D")
def test_postprocess_data_3(self):
config = {
"wavelet_denoising" : {
"remove_low_intensity_local_maxima" : {
"percentage_pixels_below_max" : 0.0
},
"wavelet.transform" : {
"scale" : 4
},
"accepted_region_shape_constraints" : {
"major_axis_length" : {
"max" : 30.0,
"min" : 0.0
}
},
"accepted_neuron_shape_constraints" : {
"eccentricity" : {
"max" : 0.9,
"min" : 0.0
},
"area" : {
"max" : 6000.0,
"min" : 1000.0
}
},
"estimate_noise" : {
"significance_threshold" : 3.0
},
"significant_mask" : {
"noise_threshold" : 3.0
},
"remove_too_close_local_maxima" : {
"min_local_max_distance" : 20.0
},
"use_watershed" : True
},
"merge_neuron_sets" : {
"alignment_min_threshold" : 0.6,
"fuse_neurons" : {
"fraction_mean_neuron_max_threshold" : 0.01
},
"overlap_min_threshold" : 0.6
}
}
space = numpy.array([100, 100, 100])
radii = numpy.array([7, 6, 6, 6, 7, 6])
magnitudes = numpy.array([15, 16, 15, 17, 16, 16])
points = numpy.array([[30, 24, 68],
[59, 65, 47],
[21, 65, 21],
[13, 12, 21],
[72, 16, 67],
[45, 32, 27]])
masks = nanshe.syn.data.generate_hypersphere_masks(space, points, radii)
images = nanshe.syn.data.generate_gaussian_images(space, points, radii/3.0, magnitudes) * masks
bases_indices = [[1,3,4], [0,2], [5]]
bases_masks = numpy.zeros((len(bases_indices),) + masks.shape[1:], dtype=masks.dtype)
bases_images = numpy.zeros((len(bases_indices),) + images.shape[1:], dtype=images.dtype)
for i, each_basis_indices in enumerate(bases_indices):
bases_masks[i] = masks[list(each_basis_indices)].max(axis=0)
bases_images[i] = images[list(each_basis_indices)].max(axis=0)
neurons = nanshe.imp.segment.postprocess_data(bases_images, **config)
assert (len(points) == len(neurons))
neuron_max_matches = nanshe.util.xnumpy.all_permutations_equal(neurons["max_F"], neurons["image"])
neuron_max_matches = neuron_max_matches.max(axis=0).max(axis=0)
neuron_points = numpy.array(neuron_max_matches.nonzero()).T.copy()
matched = dict()
unmatched_points = numpy.arange(len(points))
for i in nanshe.util.iters.irange(len(neuron_points)):
new_unmatched_points = []
for j in unmatched_points:
if not (neuron_points[i] == points[j]).all():
new_unmatched_points.append(j)
else:
matched[i] = j
unmatched_points = new_unmatched_points
assert (len(unmatched_points) == 0)
@nose.plugins.attrib.attr("3D")
def test_postprocess_data_4(self):
config = {
"wavelet_denoising" : {
"remove_low_intensity_local_maxima" : {
"percentage_pixels_below_max" : 0.0
},
"wavelet.transform" : {
"scale" : 4
},
"accepted_region_shape_constraints" : {
"major_axis_length" : {
"max" : 30.0,
"min" : 0.0
}
},
"accepted_neuron_shape_constraints" : {
"eccentricity" : {
"max" : 0.9,
"min" : 0.0
},
"area" : {
"max" : 70000.0,
"min" : 10000.0
}
},
"estimate_noise" : {
"significance_threshold" : 3.0
},
"significant_mask" : {
"noise_threshold" : 3.0
},
"remove_too_close_local_maxima" : {
"min_local_max_distance" : 20.0
},
"use_watershed" : True
},
"merge_neuron_sets" : {
"alignment_min_threshold" : 0.6,
"fuse_neurons" : {
"fraction_mean_neuron_max_threshold" : 0.01
},
"overlap_min_threshold" : 0.6
}
}
space = numpy.array([100, 100, 100])
radii = numpy.array([25])
magnitudes = numpy.array([15])
points = numpy.array([[25, 25, 25]])
masks = nanshe.syn.data.generate_hypersphere_masks(space, numpy.vstack([points, points]), numpy.hstack([radii, radii]))
images = nanshe.syn.data.generate_gaussian_images(space, numpy.vstack([points, points]), numpy.hstack([radii, radii])/3.0, numpy.hstack([magnitudes, magnitudes])) * masks
bases_indices = [[0], [1]]
bases_masks = numpy.zeros((len(bases_indices),) + masks.shape[1:], dtype=masks.dtype)
bases_images = numpy.zeros((len(bases_indices),) + images.shape[1:], dtype=images.dtype)
for i, each_basis_indices in enumerate(bases_indices):
bases_masks[i] = masks[list(each_basis_indices)].max(axis=0)
bases_images[i] = images[list(each_basis_indices)].max(axis=0)
neurons = nanshe.imp.segment.postprocess_data(bases_images, **config)
assert (len(points) == len(neurons))
neuron_max_matches = nanshe.util.xnumpy.all_permutations_equal(neurons["max_F"], neurons["image"])
neuron_max_matches = neuron_max_matches.max(axis=0).max(axis=0)
neuron_points = numpy.array(neuron_max_matches.nonzero()).T.copy()
matched = dict()
unmatched_points = numpy.arange(len(points))
for i in nanshe.util.iters.irange(len(neuron_points)):
new_unmatched_points = []
for j in unmatched_points:
if not (neuron_points[i] == points[j]).all():
new_unmatched_points.append(j)
else:
matched[i] = j
unmatched_points = new_unmatched_points
assert (len(unmatched_points) == 0)
|
DudLab/nanshe
|
tests/test_nanshe/test_imp/test_segment.py
|
Python
|
bsd-3-clause
| 110,829
|
from itertools import chain
from nineml.user.component import Property, Component, Prototype, Definition
from nineml.exceptions import (
NineMLUsageError, NineMLNameError, name_error, NineMLUnitMismatchError)
from nineml.base import (
ContainerObject, DynamicPortsObject)
class Initial(Property):
"""
Represents the initial state of a state variable
"""
nineml_type = "Initial"
@classmethod
def _child_accessor_name(cls):
return 'initial_value'
class DynamicsProperties(Component, DynamicPortsObject):
"""
A DynamicsProperties may be regarded as a parameterized instance of a
nineml.abstraction.Dynamics.
Parameters
----------
name : str
a name for the component_class.
definition : Definition
the URL of an abstraction layer component_class class definition,
a Definition or a Prototype instance.
properties : List[Property]|Dict[str,Quantity]
a dictionary containing (value,units) pairs or a
for the component_class's properties.
initial_values : List[Property]|Dict[str,Quantity]
a dictionary containing (value,units) pairs or a
for the component_class's state variables.
"""
nineml_type = 'DynamicsProperties'
nineml_children = Component.nineml_children + (Initial,)
def __init__(self, name, definition, properties={}, initial_values={},
initial_regime=None,
check_initial_values=False):
super(DynamicsProperties, self).__init__(
name=name, definition=definition, properties=properties)
if isinstance(initial_values, dict):
initial_values = (Initial(name, qty)
for name, qty in initial_values.items())
self.add(*initial_values)
if check_initial_values:
self.check_initial_values()
self.initial_regime = initial_regime
@property
def component_classes(self):
"""
Returns the component class wrapped in an iterator for duck typing
with Selection objects
"""
return iter([self.component_class])
def flatten(self, name=None):
return self.clone(name=name, clone_definitions=True)
def get_nineml_type(self):
return self.nineml_type
def check_initial_values(self):
for var in self.definition.component_class.state_variables:
try:
initial_value = self.initial_value(var.name)
except KeyError:
raise NineMLUsageError(
"Initial value not specified for {}".format(var.name))
initial_units = initial_value.units
initial_dimension = initial_units.dimension
var_dimension = var.dimension
if initial_dimension != var_dimension:
raise NineMLUsageError(
"Dimensions for '{}' initial value, {}, in '{}' don't "
"match that of its definition in '{}', {}."
.format(var.name, initial_dimension, self.name,
self.component_class.name, var_dimension))
def __getinitargs__(self):
return (self.name, self.definition, self._properties,
self._initial_values, self._url)
def __getitem__(self, name):
try:
return self.initial_value(name).quantity
except NineMLNameError:
super(DynamicsProperties, self).__getitem__(name)
def __setitem__(self, name, qty):
try:
self.initial_value(name).quantity = qty
except NineMLNameError:
super(DynamicsProperties, self).__setitem__(name, qty)
@property
def initial_values(self):
if isinstance(self.definition, Prototype):
comp = self.definition.component
return (
(self._initial_values[n]
if n in self._initial_values else comp.initial_value(n))
for n in set(chain(self._initial_values,
comp.initial_value_names)))
else:
return iter(self._initial_values.values())
@name_error
def initial_value(self, name):
try:
return self._initial_values[name]
except KeyError:
try:
return self.definition.component.initial_value(name)
except AttributeError:
raise NineMLNameError(
"No initial value named '{}' in component class"
.format(name))
@property
def initial_regime(self):
return self._initial_regime
@initial_regime.setter
def initial_regime(self, regime_name):
if regime_name is None:
# If regime not provided pick the regime with the most time derivs.
# this is a bit of a hack until the state-layer is implemented
regime_name = max(self.component_class.regimes,
key=lambda x: x.num_time_derivatives).name
elif regime_name not in self.component_class.regime_names:
raise NineMLUsageError(
"Specified initial regime, '{}', is not a name of a regime in "
"'{}' Dynamics class (available '{}')"
.format(regime_name, self.component_class.name,
"', '".join(self.component_class.regime_names)))
self._initial_regime = regime_name
def set(self, prop):
try:
super(DynamicsProperties, self).set(prop)
except NineMLNameError:
try:
state_variable = self.component_class.state_variable(prop.name)
except NineMLNameError:
raise NineMLNameError(
"'{}' Dynamics does not have a Parameter or StateVariable "
"named '{}'".format(self.component_class.name, prop.name))
if prop.units.dimension != state_variable.dimension:
raise NineMLUnitMismatchError(
"Dimensions for '{}' initial value ('{}') don't match that"
" of state variable in component class ('{}')."
.format(prop.name, prop.units.dimension.name,
state_variable.dimension.name))
self._initial_values[prop.name] = prop
@property
def initial_value_names(self):
if isinstance(self.definition, Prototype):
return (p.name for p in self.initial_values)
else:
return iter(self._initial_values.keys())
@property
def num_initial_values(self):
return len(list(self.initial_values))
@property
def attributes_with_units(self):
return chain(
super(DynamicsProperties, self).attributes_with_units,
self.initial_values, *[
v.value.distribution.properties for v in self.initial_values
if v.value.is_random()])
def elements(self, local=False):
"""
Overrides the elements method in ContainerObject base class to allow
for "local" kwarg to only iterate the members that are declared in
this instance (i.e. not the prototype)
"""
if local:
return chain(iter(self._properties.values()),
iter(self._initial_values.values()))
else:
return ContainerObject.elements(self)
def serialize_node(self, node, **options):
super(DynamicsProperties, self).serialize_node(node, **options)
node.children(iter(self._initial_values.values()), **options)
@classmethod
def unserialize_node(cls, node, **options):
name = node.attr('name', **options)
definition = node.child((Definition, Prototype), **options)
properties = node.children(Property, **options)
initial_values = node.children(Initial, **options)
return cls(name, definition, properties=properties,
initial_values=initial_values)
def serialize_node_v1(self, node, **options):
self.serialize_node(node, **options)
@classmethod
def unserialize_node_v1(cls, node, **options):
return cls.unserialize_node(node, **options)
def analog_receive_port(self, name):
return self.component_class.analog_receive_port(name)
@property
def analog_receive_ports(self):
return self.component_class.analog_receive_ports
@property
def analog_receive_port_names(self):
return self.component_class.analog_receive_port_names
@property
def num_analog_receive_ports(self):
return self.component_class.num_analog_receive_ports
def analog_send_port(self, name):
return self.component_class.analog_send_port(name)
@property
def analog_send_ports(self):
return self.component_class.analog_send_ports
@property
def analog_send_port_names(self):
return self.component_class.analog_send_port_names
@property
def num_analog_send_ports(self):
return self.component_class.num_analog_send_ports
def analog_reduce_port(self, name):
return self.component_class.analog_reduce_port(name)
@property
def analog_reduce_ports(self):
return self.component_class.analog_reduce_ports
@property
def analog_reduce_port_names(self):
return self.component_class.analog_reduce_port_names
@property
def num_analog_reduce_ports(self):
return self.component_class.num_analog_reduce_ports
def event_receive_port(self, name):
return self.component_class.event_receive_port(name)
@property
def event_receive_ports(self):
return self.component_class.event_receive_ports
@property
def event_receive_port_names(self):
return self.component_class.event_receive_port_names
@property
def num_event_receive_ports(self):
return self.component_class.num_event_receive_ports
def event_send_port(self, name):
return self.component_class.event_send_port(name)
@property
def event_send_ports(self):
return self.component_class.event_send_ports
@property
def event_send_port_names(self):
return self.component_class.event_send_port_names
@property
def num_event_send_ports(self):
return self.component_class.num_event_send_ports
|
INCF/lib9ML
|
nineml/user/dynamics.py
|
Python
|
bsd-3-clause
| 10,378
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
#
#------------------------------------------------------------------------------
""" A status bar manager realizes itself in a status bar control.
"""
# Major package imports.
import wx
# Enthought library imports.
from traits.api import Any, HasTraits, List, Property, Str, Unicode
class StatusBarManager(HasTraits):
""" A status bar manager realizes itself in a status bar control. """
# The message displayed in the first field of the status bar.
message = Property
# The messages to be displayed in the status bar fields.
messages = List(Unicode)
# The toolkit-specific control that represents the status bar.
status_bar = Any
###########################################################################
# 'StatusBarManager' interface.
###########################################################################
def create_status_bar(self, parent):
""" Creates a status bar. """
if self.status_bar is None:
self.status_bar = wx.StatusBar(parent)
self.status_bar._pyface_control = self
if len(self.messages) > 1:
self.status_bar.SetFieldsCount(len(self.messages))
for i in range(len(self.messages)):
self.status_bar.SetStatusText(self.messages[i], i)
else:
self.status_bar.SetStatusText(self.message)
return self.status_bar
###########################################################################
# Property handlers.
###########################################################################
def _get_message(self):
""" Property getter. """
if len(self.messages) > 0:
message = self.messages[0]
else:
message = ''
return message
def _set_message(self, value):
""" Property setter. """
if len(self.messages) > 0:
old = self.messages[0]
self.messages[0] = value
else:
old = ''
self.messages.append(value)
self.trait_property_changed('message', old, value)
return
###########################################################################
# Trait event handlers.
###########################################################################
def _messages_changed(self):
""" Sets the text displayed on the status bar. """
if self.status_bar is not None:
for i in range(len(self.messages)):
self.status_bar.SetStatusText(self.messages[i], i)
return
def _messages_items_changed(self):
""" Sets the text displayed on the status bar. """
if self.status_bar is not None:
for i in range(len(self.messages)):
self.status_bar.SetStatusText(self.messages[i], i)
return
#### EOF ######################################################################
|
geggo/pyface
|
pyface/ui/wx/action/status_bar_manager.py
|
Python
|
bsd-3-clause
| 3,465
|
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utilities to process compresssed files."""
import contextlib
import logging
import os
import struct
import tempfile
import zipfile
@contextlib.contextmanager
def UnzipToTemp(zip_path, inner_path):
"""Extract a |inner_path| from a |zip_path| file to an auto-deleted temp file.
Args:
zip_path: Path to the zip file.
inner_path: Path to the file within |zip_path| to extract.
Yields:
The path of the temp created (and auto-deleted when context exits).
"""
try:
_, suffix = os.path.splitext(inner_path)
# Can't use NamedTemporaryFile() because it deletes via __del__, which will
# trigger in both this and the fork()'ed processes.
fd, temp_file = tempfile.mkstemp(suffix=suffix)
logging.debug('Extracting %s', inner_path)
with zipfile.ZipFile(zip_path) as z:
os.write(fd, z.read(inner_path))
os.close(fd)
yield temp_file
finally:
os.unlink(temp_file)
def ReadZipInfoExtraFieldLength(zip_file, zip_info):
"""Reads the value of |extraLength| from |zip_info|'s local file header.
|zip_info| has an |extra| field, but it's read from the central directory.
Android's zipalign tool sets the extra field only in local file headers.
"""
# Refer to https://en.wikipedia.org/wiki/Zip_(file_format)#File_headers
zip_file.fp.seek(zip_info.header_offset + 28)
return struct.unpack('<H', zip_file.fp.read(2))[0]
def MeasureApkSignatureBlock(zip_file):
"""Measures the size of the v2 / v3 signing block.
Refer to: https://source.android.com/security/apksigning/v2
"""
# Seek to "end of central directory" struct.
eocd_offset_from_end = -22 - len(zip_file.comment)
zip_file.fp.seek(eocd_offset_from_end, os.SEEK_END)
assert zip_file.fp.read(4) == b'PK\005\006', (
'failed to find end-of-central-directory')
# Read out the "start of central directory" offset.
zip_file.fp.seek(eocd_offset_from_end + 16, os.SEEK_END)
start_of_central_directory = struct.unpack('<I', zip_file.fp.read(4))[0]
# Compute the offset after the last zip entry.
last_info = max(zip_file.infolist(), key=lambda i: i.header_offset)
last_header_size = (30 + len(last_info.filename) +
ReadZipInfoExtraFieldLength(zip_file, last_info))
end_of_last_file = (last_info.header_offset + last_header_size +
last_info.compress_size)
return start_of_central_directory - end_of_last_file
|
scheib/chromium
|
tools/binary_size/libsupersize/zip_util.py
|
Python
|
bsd-3-clause
| 2,567
|
from __future__ import unicode_literals
from django.contrib.gis.geos import HAS_GEOS
from django.contrib.gis.tests.utils import no_oracle
from django.db import connection
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import override_settings
from django.utils import timezone
if HAS_GEOS:
from django.contrib.gis.db.models import Collect, Count, Extent, F, Union
from django.contrib.gis.geometry.backend import Geometry
from django.contrib.gis.geos import GEOSGeometry, Point, MultiPoint
from .models import City, Location, DirectoryEntry, Parcel, Book, Author, Article, Event
@skipUnlessDBFeature("gis_enabled")
class RelatedGeoModelTest(TestCase):
fixtures = ['initial']
def test02_select_related(self):
"Testing `select_related` on geographic models (see #7126)."
qs1 = City.objects.all()
qs2 = City.objects.select_related()
qs3 = City.objects.select_related('location')
# Reference data for what's in the fixtures.
cities = (
('Aurora', 'TX', -97.516111, 33.058333),
('Roswell', 'NM', -104.528056, 33.387222),
('Kecksburg', 'PA', -79.460734, 40.18476),
)
for qs in (qs1, qs2, qs3):
for ref, c in zip(cities, qs):
nm, st, lon, lat = ref
self.assertEqual(nm, c.name)
self.assertEqual(st, c.state)
self.assertEqual(Point(lon, lat), c.location.point)
@skipUnlessDBFeature("has_transform_method")
def test03_transform_related(self):
"Testing the `transform` GeoQuerySet method on related geographic models."
# All the transformations are to state plane coordinate systems using
# US Survey Feet (thus a tolerance of 0 implies error w/in 1 survey foot).
tol = 0
def check_pnt(ref, pnt):
self.assertAlmostEqual(ref.x, pnt.x, tol)
self.assertAlmostEqual(ref.y, pnt.y, tol)
self.assertEqual(ref.srid, pnt.srid)
# Each city transformed to the SRID of their state plane coordinate system.
transformed = (('Kecksburg', 2272, 'POINT(1490553.98959621 314792.131023984)'),
('Roswell', 2257, 'POINT(481902.189077221 868477.766629735)'),
('Aurora', 2276, 'POINT(2269923.2484839 7069381.28722222)'),
)
for name, srid, wkt in transformed:
# Doing this implicitly sets `select_related` select the location.
# TODO: Fix why this breaks on Oracle.
qs = list(City.objects.filter(name=name).transform(srid, field_name='location__point'))
check_pnt(GEOSGeometry(wkt, srid), qs[0].location.point)
@skipUnlessDBFeature("supports_extent_aggr")
def test04a_related_extent_aggregate(self):
"Testing the `extent` GeoQuerySet aggregates on related geographic models."
# This combines the Extent and Union aggregates into one query
aggs = City.objects.aggregate(Extent('location__point'))
# One for all locations, one that excludes New Mexico (Roswell).
all_extent = (-104.528056, 29.763374, -79.460734, 40.18476)
txpa_extent = (-97.516111, 29.763374, -79.460734, 40.18476)
e1 = City.objects.extent(field_name='location__point')
e2 = City.objects.exclude(state='NM').extent(field_name='location__point')
e3 = aggs['location__point__extent']
# The tolerance value is to four decimal places because of differences
# between the Oracle and PostGIS spatial backends on the extent calculation.
tol = 4
for ref, e in [(all_extent, e1), (txpa_extent, e2), (all_extent, e3)]:
for ref_val, e_val in zip(ref, e):
self.assertAlmostEqual(ref_val, e_val, tol)
@skipUnlessDBFeature("has_unionagg_method")
def test04b_related_union_aggregate(self):
"Testing the `unionagg` GeoQuerySet aggregates on related geographic models."
# This combines the Extent and Union aggregates into one query
aggs = City.objects.aggregate(Union('location__point'))
# These are the points that are components of the aggregate geographic
# union that is returned. Each point # corresponds to City PK.
p1 = Point(-104.528056, 33.387222)
p2 = Point(-97.516111, 33.058333)
p3 = Point(-79.460734, 40.18476)
p4 = Point(-96.801611, 32.782057)
p5 = Point(-95.363151, 29.763374)
# The second union aggregate is for a union
# query that includes limiting information in the WHERE clause (in other
# words a `.filter()` precedes the call to `.unionagg()`).
ref_u1 = MultiPoint(p1, p2, p4, p5, p3, srid=4326)
ref_u2 = MultiPoint(p2, p3, srid=4326)
u1 = City.objects.unionagg(field_name='location__point')
u2 = City.objects.exclude(
name__in=('Roswell', 'Houston', 'Dallas', 'Fort Worth'),
).unionagg(field_name='location__point')
u3 = aggs['location__point__union']
self.assertEqual(type(u1), MultiPoint)
self.assertEqual(type(u3), MultiPoint)
# Ordering of points in the result of the union is not defined and
# implementation-dependent (DB backend, GEOS version)
self.assertSetEqual(set([p.ewkt for p in ref_u1]), set([p.ewkt for p in u1]))
self.assertSetEqual(set([p.ewkt for p in ref_u2]), set([p.ewkt for p in u2]))
self.assertSetEqual(set([p.ewkt for p in ref_u1]), set([p.ewkt for p in u3]))
def test05_select_related_fk_to_subclass(self):
"Testing that calling select_related on a query over a model with an FK to a model subclass works"
# Regression test for #9752.
list(DirectoryEntry.objects.all().select_related())
def test06_f_expressions(self):
"Testing F() expressions on GeometryFields."
# Constructing a dummy parcel border and getting the City instance for
# assigning the FK.
b1 = GEOSGeometry(
'POLYGON((-97.501205 33.052520,-97.501205 33.052576,'
'-97.501150 33.052576,-97.501150 33.052520,-97.501205 33.052520))',
srid=4326
)
pcity = City.objects.get(name='Aurora')
# First parcel has incorrect center point that is equal to the City;
# it also has a second border that is different from the first as a
# 100ft buffer around the City.
c1 = pcity.location.point
c2 = c1.transform(2276, clone=True)
b2 = c2.buffer(100)
Parcel.objects.create(name='P1', city=pcity, center1=c1, center2=c2, border1=b1, border2=b2)
# Now creating a second Parcel where the borders are the same, just
# in different coordinate systems. The center points are also the
# same (but in different coordinate systems), and this time they
# actually correspond to the centroid of the border.
c1 = b1.centroid
c2 = c1.transform(2276, clone=True)
Parcel.objects.create(name='P2', city=pcity, center1=c1, center2=c2, border1=b1, border2=b1)
# Should return the second Parcel, which has the center within the
# border.
qs = Parcel.objects.filter(center1__within=F('border1'))
self.assertEqual(1, len(qs))
self.assertEqual('P2', qs[0].name)
if connection.features.supports_transform:
# This time center2 is in a different coordinate system and needs
# to be wrapped in transformation SQL.
qs = Parcel.objects.filter(center2__within=F('border1'))
self.assertEqual(1, len(qs))
self.assertEqual('P2', qs[0].name)
# Should return the first Parcel, which has the center point equal
# to the point in the City ForeignKey.
qs = Parcel.objects.filter(center1=F('city__location__point'))
self.assertEqual(1, len(qs))
self.assertEqual('P1', qs[0].name)
if connection.features.supports_transform:
# This time the city column should be wrapped in transformation SQL.
qs = Parcel.objects.filter(border2__contains=F('city__location__point'))
self.assertEqual(1, len(qs))
self.assertEqual('P1', qs[0].name)
def test07_values(self):
"Testing values() and values_list() and GeoQuerySets."
# GeoQuerySet and GeoValuesQuerySet, and GeoValuesListQuerySet respectively.
gqs = Location.objects.all()
gvqs = Location.objects.values()
gvlqs = Location.objects.values_list()
# Incrementing through each of the models, dictionaries, and tuples
# returned by the different types of GeoQuerySets.
for m, d, t in zip(gqs, gvqs, gvlqs):
# The values should be Geometry objects and not raw strings returned
# by the spatial database.
self.assertIsInstance(d['point'], Geometry)
self.assertIsInstance(t[1], Geometry)
self.assertEqual(m.point, d['point'])
self.assertEqual(m.point, t[1])
@override_settings(USE_TZ=True)
def test_07b_values(self):
"Testing values() and values_list() with aware datetime. See #21565."
Event.objects.create(name="foo", when=timezone.now())
list(Event.objects.values_list('when'))
def test08_defer_only(self):
"Testing defer() and only() on Geographic models."
qs = Location.objects.all()
def_qs = Location.objects.defer('point')
for loc, def_loc in zip(qs, def_qs):
self.assertEqual(loc.point, def_loc.point)
def test09_pk_relations(self):
"Ensuring correct primary key column is selected across relations. See #10757."
# The expected ID values -- notice the last two location IDs
# are out of order. Dallas and Houston have location IDs that differ
# from their PKs -- this is done to ensure that the related location
# ID column is selected instead of ID column for the city.
city_ids = (1, 2, 3, 4, 5)
loc_ids = (1, 2, 3, 5, 4)
ids_qs = City.objects.order_by('id').values('id', 'location__id')
for val_dict, c_id, l_id in zip(ids_qs, city_ids, loc_ids):
self.assertEqual(val_dict['id'], c_id)
self.assertEqual(val_dict['location__id'], l_id)
def test10_combine(self):
"Testing the combination of two GeoQuerySets. See #10807."
buf1 = City.objects.get(name='Aurora').location.point.buffer(0.1)
buf2 = City.objects.get(name='Kecksburg').location.point.buffer(0.1)
qs1 = City.objects.filter(location__point__within=buf1)
qs2 = City.objects.filter(location__point__within=buf2)
combined = qs1 | qs2
names = [c.name for c in combined]
self.assertEqual(2, len(names))
self.assertTrue('Aurora' in names)
self.assertTrue('Kecksburg' in names)
def test11_geoquery_pickle(self):
"Ensuring GeoQuery objects are unpickled correctly. See #10839."
import pickle
from django.contrib.gis.db.models.sql import GeoQuery
qs = City.objects.all()
q_str = pickle.dumps(qs.query)
q = pickle.loads(q_str)
self.assertEqual(GeoQuery, q.__class__)
# TODO: fix on Oracle -- get the following error because the SQL is ordered
# by a geometry object, which Oracle apparently doesn't like:
# ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type
@no_oracle
def test12a_count(self):
"Testing `Count` aggregate use with the `GeoManager` on geo-fields."
# The City, 'Fort Worth' uses the same location as Dallas.
dallas = City.objects.get(name='Dallas')
# Count annotation should be 2 for the Dallas location now.
loc = Location.objects.annotate(num_cities=Count('city')).get(id=dallas.location.id)
self.assertEqual(2, loc.num_cities)
def test12b_count(self):
"Testing `Count` aggregate use with the `GeoManager` on non geo-fields. See #11087."
# Should only be one author (Trevor Paglen) returned by this query, and
# the annotation should have 3 for the number of books, see #11087.
# Also testing with a `GeoValuesQuerySet`, see #11489.
qs = Author.objects.annotate(num_books=Count('books')).filter(num_books__gt=1)
vqs = Author.objects.values('name').annotate(num_books=Count('books')).filter(num_books__gt=1)
self.assertEqual(1, len(qs))
self.assertEqual(3, qs[0].num_books)
self.assertEqual(1, len(vqs))
self.assertEqual(3, vqs[0]['num_books'])
def test13c_count(self):
"Testing `Count` aggregate with `.values()`. See #15305."
qs = Location.objects.filter(id=5).annotate(num_cities=Count('city')).values('id', 'point', 'num_cities')
self.assertEqual(1, len(qs))
self.assertEqual(2, qs[0]['num_cities'])
self.assertIsInstance(qs[0]['point'], GEOSGeometry)
# TODO: The phantom model does appear on Oracle.
@no_oracle
def test13_select_related_null_fk(self):
"Testing `select_related` on a nullable ForeignKey via `GeoManager`. See #11381."
Book.objects.create(title='Without Author')
b = Book.objects.select_related('author').get(title='Without Author')
# Should be `None`, and not a 'dummy' model.
self.assertEqual(None, b.author)
@skipUnlessDBFeature("supports_collect_aggr")
def test14_collect(self):
"Testing the `collect` GeoQuerySet method and `Collect` aggregate."
# Reference query:
# SELECT AsText(ST_Collect("relatedapp_location"."point")) FROM "relatedapp_city" LEFT OUTER JOIN
# "relatedapp_location" ON ("relatedapp_city"."location_id" = "relatedapp_location"."id")
# WHERE "relatedapp_city"."state" = 'TX';
ref_geom = GEOSGeometry(
'MULTIPOINT(-97.516111 33.058333,-96.801611 32.782057,'
'-95.363151 29.763374,-96.801611 32.782057)'
)
c1 = City.objects.filter(state='TX').collect(field_name='location__point')
c2 = City.objects.filter(state='TX').aggregate(Collect('location__point'))['location__point__collect']
for coll in (c1, c2):
# Even though Dallas and Ft. Worth share same point, Collect doesn't
# consolidate -- that's why 4 points in MultiPoint.
self.assertEqual(4, len(coll))
self.assertTrue(ref_geom.equals(coll))
def test15_invalid_select_related(self):
"Testing doing select_related on the related name manager of a unique FK. See #13934."
qs = Article.objects.select_related('author__article')
# This triggers TypeError when `get_default_columns` has no `local_only`
# keyword. The TypeError is swallowed if QuerySet is actually
# evaluated as list generation swallows TypeError in CPython.
str(qs.query)
def test16_annotated_date_queryset(self):
"Ensure annotated date querysets work if spatial backend is used. See #14648."
birth_years = [dt.year for dt in
list(Author.objects.annotate(num_books=Count('books')).dates('dob', 'year'))]
birth_years.sort()
self.assertEqual([1950, 1974], birth_years)
# TODO: Related tests for KML, GML, and distance lookups.
|
sublime1809/django
|
django/contrib/gis/tests/relatedapp/tests.py
|
Python
|
bsd-3-clause
| 15,405
|
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import sys
import weakref
import gc
import os
import shutil
import stat
import inspect
import functools
import six
import imath
import IECore
import Gaffer
import GafferTest
class ScriptNodeTest( GafferTest.TestCase ) :
def setUp( self ) :
GafferTest.TestCase.setUp( self )
ScriptNodeTest.lastNode = None
ScriptNodeTest.lastScript = None
ScriptNodeTest.lastResult = None
def test( self ) :
s = Gaffer.ScriptNode()
self.assertEqual( s.getName(), "ScriptNode" )
self.assertEqual( s["fileName"].typeName(), "Gaffer::StringPlug" )
def testExecution( self ) :
s = Gaffer.ScriptNode()
s.execute( "script.addChild( Gaffer.Node( 'child' ) )" )
self.assertEqual( s["child"].typeName(), "Gaffer::Node" )
def testSelection( self ) :
s = Gaffer.ScriptNode()
self.assertIsInstance( s.selection(), Gaffer.Set )
n = Gaffer.Node()
self.assertRaises( Exception, s.selection().add, n )
s.addChild( n )
s.selection().add( n )
self.assertIn( n, s.selection() )
s.removeChild( n )
self.assertNotIn( n, s.selection() )
def testFocus( self ) :
s = Gaffer.ScriptNode()
cs = GafferTest.CapturingSlot( s.focusChangedSignal() )
f = s.focusSet()
self.assertIsInstance( f, Gaffer.Set )
self.assertEqual( f.size(), 0 )
s["n1"] = Gaffer.Node()
s["n2"] = Gaffer.Node()
n3 = Gaffer.Node()
s.setFocus( s["n1"] )
self.assertEqual( s.getFocus(), s["n1"] )
self.assertEqual( set( f ), { s["n1"] } )
self.assertEqual( len( cs ), 1 )
self.assertTrue( cs[0][0].isSame( s ) )
self.assertTrue( cs[0][1].isSame( s["n1"] ) )
s.setFocus( s["n2"] )
self.assertEqual( s.getFocus(), s["n2"] )
self.assertEqual( set( f ), { s["n2"] } )
self.assertEqual( len( cs ), 2 )
self.assertTrue( cs[1][0].isSame( s ) )
self.assertTrue( cs[1][1].isSame( s["n2"] ) )
with six.assertRaisesRegex( self, Exception, "Node is not a child of this script" ) :
s.setFocus( n3 )
self.assertEqual( set( f ), { s["n2"] } )
self.assertEqual( len( cs ), 2 )
with six.assertRaisesRegex( self, Exception, "Python argument types in.*" ) :
s.setFocus( Gaffer.Plug() )
self.assertEqual( set( f ), { s["n2"] } )
self.assertEqual( len( cs ), 2 )
s.setFocus( None )
self.assertEqual( s.getFocus(), None )
self.assertEqual( set( f ), set() )
self.assertEqual( len( cs ), 3 )
self.assertTrue( cs[2][0].isSame( s ) )
self.assertEqual( cs[2][1], None )
def testSerialisation( self ) :
s = Gaffer.ScriptNode()
s["a1"] = GafferTest.AddNode()
s["a1"]["op1"].setValue( 5 )
s["a1"]["op2"].setValue( 6 )
s["a2"] = GafferTest.AddNode()
s["a2"]["op1"].setInput( s["a1"]["sum"] )
s["a2"]["op2"].setValue( 10 )
s2 = Gaffer.ScriptNode()
se = s.serialise()
s2.execute( se )
self.assertTrue( s2["a2"]["op1"].getInput().isSame( s2["a1"]["sum"] ) )
def testDynamicPlugSerialisation( self ) :
s1 = Gaffer.ScriptNode()
s1["n1"] = GafferTest.AddNode()
s1["n2"] = GafferTest.AddNode()
s1["n1"]["dynamicPlug"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s1["n1"]["dynamicPlug"].setInput( s1["n2"]["sum"] )
s1["n1"]["dynamicPlug2"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s1["n1"]["dynamicPlug2"].setValue( 100 )
s1["n1"]["dynamicStringPlug"] = Gaffer.StringPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s1["n1"]["dynamicStringPlug"].setValue( "hiThere" )
s1["n1"]["dynamicOutPlug"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic, direction=Gaffer.Plug.Direction.Out )
s1["n1"]["dynamicColorOutPlug"] = Gaffer.Color3fPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic, direction=Gaffer.Plug.Direction.Out )
s2 = Gaffer.ScriptNode()
s2.execute( s1.serialise() )
self.assertTrue( s2["n1"]["dynamicPlug"].getInput().isSame( s2["n2"]["sum"] ) )
self.assertEqual( s2["n1"]["dynamicPlug2"].getValue(), 100 )
self.assertEqual( s2["n1"]["dynamicStringPlug"].getValue(), "hiThere" )
self.assertIsInstance( s2["n1"]["dynamicOutPlug"], Gaffer.IntPlug )
self.assertIsInstance( s2["n1"]["dynamicColorOutPlug"], Gaffer.Color3fPlug )
def testLifetime( self ) :
s = Gaffer.ScriptNode()
w = weakref.ref( s )
del s
IECore.RefCounted.collectGarbage()
self.assertEqual( w(), None )
def testSaveAndLoad( self ) :
s = Gaffer.ScriptNode()
s["a1"] = GafferTest.AddNode()
s["a2"] = GafferTest.AddNode()
s["a1"]["op1"].setValue( 5 )
s["a1"]["op2"].setValue( 6 )
s["a2"] = GafferTest.AddNode()
s["a2"]["op1"].setInput( s["a1"]["sum"] )
s["a2"]["op2"].setValue( 10 )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
s2 = Gaffer.ScriptNode()
s2["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s2.load()
self.assertTrue( s2["a2"]["op1"].getInput().isSame( s2["a1"]["sum"] ) )
def testLoadClearsFirst( self ) :
s = Gaffer.ScriptNode()
s["a1"] = GafferTest.AddNode()
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
s.load()
self.assertNotIn( "a2", s )
def testSaveFailureHandling( self ) :
s = Gaffer.ScriptNode()
s["a1"] = GafferTest.AddNode()
s["fileName"].setValue( "/this/directory/doesnt/exist" )
self.assertRaises( Exception, s.save )
def testLoadFailureHandling( self ) :
s = Gaffer.ScriptNode()
s["fileName"].setValue( "/this/file/doesnt/exist" )
self.assertRaises( Exception, s.load )
def testCopyPaste( self ) :
app = Gaffer.ApplicationRoot()
s1 = Gaffer.ScriptNode()
s2 = Gaffer.ScriptNode()
app["scripts"]["s1"] = s1
app["scripts"]["s2"] = s2
n1 = GafferTest.AddNode()
s1["n1"] = n1
s1.copy()
s2.paste()
self.assertTrue( s1["n1"].isInstanceOf( GafferTest.AddNode.staticTypeId() ) )
self.assertTrue( s2["n1"].isInstanceOf( GafferTest.AddNode.staticTypeId() ) )
def testSerialisationWithKeywords( self ) :
s = Gaffer.ScriptNode()
s["n1"] = GafferTest.KeywordPlugNode()
se = s.serialise()
s2 = Gaffer.ScriptNode()
s2.execute( se )
def testSerialisationWithNodeKeywords( self ) :
s = Gaffer.ScriptNode()
s["in"] = Gaffer.Node()
se = s.serialise()
s2 = Gaffer.ScriptNode()
s2.execute( se )
self.assertEqual( s2["in"].typeName(), "Gaffer::Node" )
def testDeriveAndOverrideAcceptsChild( self ) :
class MyScriptNode( Gaffer.ScriptNode ) :
def __init__( self, name ) :
Gaffer.ScriptNode.__init__( self, name )
def acceptsChild( self, child ) :
return isinstance( child, GafferTest.AddNode )
IECore.registerRunTimeTyped( MyScriptNode )
n = MyScriptNode( "s" )
c1 = GafferTest.AddNode()
c2 = Gaffer.Node()
n.addChild( c1 )
self.assertTrue( c1.parent() is n )
self.assertTrue( c1.scriptNode() is n )
self.assertRaises( RuntimeError, n.addChild, c2 )
self.assertIsNone( c2.parent() )
def testExecutionExceptions( self ) :
n = Gaffer.ScriptNode()
self.assertRaises( RuntimeError, n.execute, "raise ValueError" )
def testVariableScope( self ) :
# if a variable gets made in one execution, it shouldn't persist in the next.
n = Gaffer.ScriptNode()
n.execute( "a = 10" )
six.assertRaisesRegex( self, Exception, "NameError: name 'a' is not defined", n.execute, "a * 10" )
def testClassScope( self ) :
# this works in a normal python console, so it damn well better work
# in a script editor.
s = inspect.cleandoc(
"""
class A() :
def __init__( self ) :
print( A )
a = A()
"""
)
n = Gaffer.ScriptNode()
n.execute( s )
def testDeselectionOnDelete( self ) :
s = Gaffer.ScriptNode()
n1 = GafferTest.AddNode()
n2 = GafferTest.AddNode()
s["n1"] = n1
s["n2"] = n2
s.selection().add( n1 )
self.assertIn( n1, s.selection() )
del s["n1"]
self.assertNotIn( n1, s.selection() )
def testContext( self ) :
s = Gaffer.ScriptNode()
c = s.context()
c.setFrame( 10.0 )
self.assertEqual( s.context().getFrame(), 10.0 )
self.assertTrue( s.context().isSame( c ) )
def testFrameRange( self ) :
s = Gaffer.ScriptNode()
self.assertIsInstance( s["frameRange"]["start"], Gaffer.IntPlug )
self.assertIsInstance( s["frameRange"]["end"], Gaffer.IntPlug )
self.assertEqual( s["frameRange"]["start"].getValue(), 1 )
self.assertEqual( s["frameRange"]["end"].getValue(), 100 )
s["frameRange"]["start"].setValue( 110 )
self.assertEqual( s["frameRange"]["start"].getValue(), 110 )
self.assertEqual( s["frameRange"]["end"].getValue(), 110 )
s["frameRange"]["end"].setValue( 200 )
self.assertEqual( s["frameRange"]["start"].getValue(), 110 )
self.assertEqual( s["frameRange"]["end"].getValue(), 200 )
s["frameRange"]["end"].setValue( 100 )
self.assertEqual( s["frameRange"]["start"].getValue(), 100 )
self.assertEqual( s["frameRange"]["end"].getValue(), 100 )
def testFrameRangeLoadAndSave( self ) :
s = Gaffer.ScriptNode()
s["frameRange"]["start"].setValue( 110 )
s["frameRange"]["end"].setValue( 200 )
self.assertEqual( s["frameRange"]["start"].getValue(), 110 )
self.assertEqual( s["frameRange"]["end"].getValue(), 200 )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
s2 = Gaffer.ScriptNode()
s2["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s2.load()
self.assertEqual( s2["frameRange"]["start"].getValue(), 110 )
self.assertEqual( s2["frameRange"]["end"].getValue(), 200 )
def testApplicationRoot( self ) :
s = Gaffer.ScriptNode()
self.assertIsNone( s.applicationRoot() )
a = Gaffer.ApplicationRoot()
a["scripts"]["one"] = s
self.assertTrue( s.applicationRoot().isSame( a ) )
def testLifeTimeAfterExecution( self ) :
# the ScriptNode used to keep an internal dictionary
# as the context for all script execution. this created the
# danger of circular references keeping it alive forever.
# that is no longer the case, but this test remains to ensure
# that the same problem doesn't crop up in the future.
a = Gaffer.ApplicationRoot()
a["scripts"]["s"] = Gaffer.ScriptNode()
a["scripts"]["s"].execute( "script.addChild( Gaffer.Node( \"a\" ) )" )
a["scripts"]["s"].execute( "circularRef = script.getChild( \"a\" ).parent()" )
w = weakref.ref( a["scripts"]["s"] )
del a["scripts"]["s"]
IECore.RefCounted.collectGarbage()
self.assertEqual( w(), None )
def testDeleteNodes( self ) :
s = Gaffer.ScriptNode()
s["n"] = Gaffer.Node()
s["n1"] = Gaffer.Node()
s["n2"] = Gaffer.Node()
self.assertEqual( len( s.children( Gaffer.Node ) ), 3 )
s.deleteNodes()
self.assertEqual( len( s.children( Gaffer.Node ) ), 0 )
def testDeleteManyNodes( self ) :
s = Gaffer.ScriptNode()
for i in range( 0, 10000 ) :
s["c%d"%i] = Gaffer.Node()
s.deleteNodes()
self.assertEqual( len( s.children( Gaffer.Node ) ), 0 )
def testDeleteNodesDoesntRemovePlugs( self ) :
s = Gaffer.ScriptNode()
s.deleteNodes()
self.assertIn( "fileName", s )
def testDeleteNodesWithFilter( self ) :
s = Gaffer.ScriptNode()
s["n"] = Gaffer.Node()
s["n1"] = Gaffer.Node()
s["n2"] = Gaffer.Node()
self.assertEqual( len( s.children( Gaffer.Node ) ), 3 )
s.deleteNodes( filter = Gaffer.StandardSet( [ s["n1"] ] ) )
self.assertEqual( len( s.children( Gaffer.Node ) ), 2 )
self.assertIn( "n", s )
self.assertNotIn( "n1", s )
self.assertIn( "n2", s )
def testDeleteNodesMaintainsConnections( self ) :
s = Gaffer.ScriptNode()
n1 = GafferTest.AddNode()
n2 = GafferTest.MultiplyNode()
n3 = GafferTest.AddNode()
n4 = GafferTest.AddNode()
s.addChild( n1 )
s.addChild( n2 )
s.addChild( n3 )
s.addChild( n4 )
n2["op1"].setInput( n1["sum"] )
n2["op2"].setInput( n1["sum"] )
n3["op1"].setInput( n1["sum"] )
n3["op2"].setInput( n1["sum"] )
n4["op1"].setInput( n2["product"] )
n4["op2"].setInput( n3["sum"] )
self.assertTrue( n2["op1"].getInput().isSame( n1["sum"] ) )
self.assertTrue( n2["op2"].getInput().isSame( n1["sum"] ) )
self.assertTrue( n3["op1"].getInput().isSame( n1["sum"] ) )
self.assertTrue( n3["op2"].getInput().isSame( n1["sum"] ) )
self.assertTrue( n4["op1"].getInput().isSame( n2["product"] ) )
self.assertTrue( n4["op2"].getInput().isSame( n3["sum"] ) )
s.deleteNodes( filter = Gaffer.StandardSet( [ n2, n3 ] ) )
self.assertEqual( n2["op1"].getInput(), None )
self.assertEqual( n2["op2"].getInput(), None )
self.assertEqual( n3["op1"].getInput(), None )
self.assertEqual( n3["op2"].getInput(), None )
# None because MultiplyOp does not define enabledPlug()
self.assertEqual( n4["op1"].getInput(), None )
self.assertTrue( n4["op2"].getInput().isSame( n1["sum"] ) )
n2["op1"].setInput( n1["sum"] )
n2["op2"].setInput( n1["sum"] )
n3["op1"].setInput( n1["sum"] )
n3["op2"].setInput( n1["sum"] )
n4["op1"].setInput( n2["product"] )
n4["op2"].setInput( n3["sum"] )
s.addChild( n2 )
s.addChild( n3 )
s.deleteNodes( filter = Gaffer.StandardSet( [ n2, n3 ] ), reconnect = False )
self.assertEqual( n2["op1"].getInput(), None )
self.assertEqual( n2["op2"].getInput(), None )
self.assertEqual( n3["op1"].getInput(), None )
self.assertEqual( n3["op2"].getInput(), None )
self.assertEqual( n4["op1"].getInput(), None )
self.assertEqual( n4["op2"].getInput(), None )
def testDeleteNodesWithEnabledPlugsWithoutCorrespondingInput( self ) :
class MyAddNode( GafferTest.AddNode ) :
def correspondingInput( self, output ) :
return None
s = Gaffer.ScriptNode()
n1 = GafferTest.AddNode()
n2 = MyAddNode()
n3 = GafferTest.AddNode()
s.addChild( n1 )
s.addChild( n2 )
s.addChild( n3 )
n2["op1"].setInput( n1["sum"] )
n2["op2"].setInput( n1["sum"] )
n3["op1"].setInput( n2["sum"] )
n3["op2"].setInput( n2["sum"] )
self.assertTrue( n2["op1"].getInput().isSame( n1["sum"] ) )
self.assertTrue( n2["op2"].getInput().isSame( n1["sum"] ) )
self.assertTrue( n3["op1"].getInput().isSame( n2["sum"] ) )
self.assertTrue( n3["op2"].getInput().isSame( n2["sum"] ) )
s.deleteNodes( filter = Gaffer.StandardSet( [ n2 ] ) )
self.assertEqual( n2["op1"].getInput(), None )
self.assertEqual( n2["op2"].getInput(), None )
self.assertEqual( n3["op1"].getInput(), None )
self.assertEqual( n3["op2"].getInput(), None )
def testReconnectionToFloatingNodes( self ) :
s = Gaffer.ScriptNode()
n1 = GafferTest.AddNode()
n2 = GafferTest.AddNode()
n3 = GafferTest.AddNode()
s.addChild( n1 )
s.addChild( n2 )
n2["op1"].setInput( n1["sum"] )
n2["op2"].setInput( n1["sum"] )
n3["op1"].setInput( n2["sum"] )
n3["op2"].setInput( n2["sum"] )
self.assertTrue( n2["op1"].getInput().isSame( n1["sum"] ) )
self.assertTrue( n2["op2"].getInput().isSame( n1["sum"] ) )
self.assertTrue( n3["op1"].getInput().isSame( n2["sum"] ) )
self.assertTrue( n3["op2"].getInput().isSame( n2["sum"] ) )
s.deleteNodes( filter = Gaffer.StandardSet( [ n2 ] ) )
# the inputs to n3 shouldn't have been reconnected, as it's not a descendant of the script:
self.assertEqual( n3["op1"].getInput(), None )
self.assertEqual( n3["op2"].getInput(), None )
def testDynamicPlugSaveAndLoad( self ) :
s = Gaffer.ScriptNode()
s["customSetting"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["customSetting"].setValue( 100 )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
s2 = Gaffer.ScriptNode()
s2["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s2.load()
self.assertEqual( s2["customSetting"].getValue(), 100 )
def testSerialiseCircularConnections( self ) :
s = Gaffer.ScriptNode()
s["n1"] = Gaffer.Node()
s["n2"] = Gaffer.Node()
s["n1"]["in"] = Gaffer.Plug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["n1"]["out"] = Gaffer.Plug( direction = Gaffer.Plug.Direction.Out, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["n2"]["in"] = Gaffer.Plug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["n2"]["out"] = Gaffer.Plug( direction = Gaffer.Plug.Direction.Out, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["n1"]["in"].setInput( s["n2"]["out"] )
s["n2"]["in"].setInput( s["n1"]["out"] )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertTrue( s2["n1"]["in"].getInput().isSame( s2["n2"]["out"] ) )
self.assertTrue( s2["n2"]["in"].getInput().isSame( s2["n1"]["out"] ) )
def testSerialiseWithFilter( self ) :
s = Gaffer.ScriptNode()
s["n1"] = GafferTest.AddNode()
s["n2"] = GafferTest.AddNode()
s["n2"]["op1"].setInput( s["n1"]["sum"] )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise( filter = Gaffer.StandardSet( [ s["n2"] ] ) ) )
self.assertTrue( "n2" in s2 )
self.assertTrue( "n1" not in s2 )
self.assertEqual( s2["n2"]["op1"].getInput(), None )
def testCopyIgnoresNestedSelections( self ) :
a = Gaffer.ApplicationRoot()
s = Gaffer.ScriptNode()
a["scripts"].addChild( s )
s["n1"] = GafferTest.AddNode()
s["n2"] = GafferTest.AddNode()
s["b"] = Gaffer.Box()
s["b"]["n1"] = GafferTest.AddNode()
s.selection().add( s["n1"] )
s.selection().add( s["b"]["n1"] )
s.copy( filter = s.selection() )
s2 = Gaffer.ScriptNode()
a["scripts"].addChild( s2 )
s2.paste()
self.assertTrue( "n1" in s2 )
self.assertTrue( "b" not in s2 )
s.selection().clear()
s.selection().add( s["b"]["n1"] )
s.copy( filter = s.selection() )
s2 = Gaffer.ScriptNode()
a["scripts"].addChild( s2 )
s2.paste()
self.assertTrue( "b" not in s2 )
self.assertTrue( "n1" not in s2 )
def testCopyPasteWithSpecificSourceParent( self ) :
a = Gaffer.ApplicationRoot()
s = Gaffer.ScriptNode()
a["scripts"].addChild( s )
s["n1"] = GafferTest.AddNode()
s["n2"] = GafferTest.AddNode()
s["b"] = Gaffer.Box()
s["b"]["n3"] = GafferTest.AddNode()
s["b"]["n4"] = GafferTest.AddNode()
s.selection().add( s["n1"] )
s.selection().add( s["b"]["n3"] )
s.copy( parent=s["b"], filter = s.selection() )
s2 = Gaffer.ScriptNode()
a["scripts"].addChild( s2 )
s2.paste()
self.assertTrue( "n1" not in s2 )
self.assertTrue( "n2" not in s2 )
self.assertTrue( "b" not in s2 )
self.assertTrue( "n3" in s2 )
self.assertTrue( "n4" not in s2 )
def testCopyPasteWithSpecificDestinationParent( self ) :
a = Gaffer.ApplicationRoot()
s = Gaffer.ScriptNode()
a["scripts"].addChild( s )
s["n1"] = GafferTest.AddNode()
s["n2"] = GafferTest.AddNode()
s.selection().add( s["n1"] )
s.copy( filter = s.selection() )
s2 = Gaffer.ScriptNode()
a["scripts"].addChild( s2 )
s2["b"] = Gaffer.Box()
s2.paste( parent = s2["b"] )
self.assertTrue( "n1" not in s2 )
self.assertTrue( "n2" not in s2 )
self.assertTrue( "n1" in s2["b"] )
self.assertTrue( "n2" not in s2["b"] )
self.assertEqual( len( s2.selection() ), 1 )
self.assertTrue( s2["b"]["n1"] in s2.selection() )
def testCutWithSpecificSourceParent( self ) :
a = Gaffer.ApplicationRoot()
s = Gaffer.ScriptNode()
a["scripts"].addChild( s )
s["n1"] = GafferTest.AddNode()
s["n2"] = GafferTest.AddNode()
s["b"] = Gaffer.Box()
s["b"]["n3"] = GafferTest.AddNode()
s["b"]["n4"] = GafferTest.AddNode()
s.selection().add( s["n1"] )
s.selection().add( s["b"]["n3"] )
s.cut( parent=s["b"], filter = s.selection() )
self.assertTrue( "n1" in s )
self.assertTrue( "n2" in s )
self.assertTrue( "b" in s )
self.assertTrue( "n3" not in s["b"] )
self.assertTrue( "n4" in s["b"] )
s2 = Gaffer.ScriptNode()
a["scripts"].addChild( s2 )
s2.paste()
self.assertTrue( "n1" not in s2 )
self.assertTrue( "n2" not in s2 )
self.assertTrue( "b" not in s2 )
self.assertTrue( "n3" in s2 )
self.assertTrue( "n4" not in s2 )
def testActionSignal( self ) :
s = Gaffer.ScriptNode()
cs = GafferTest.CapturingSlot( s.actionSignal() )
# shouldn't trigger anything, because it's not in an undo scope
s.addChild( Gaffer.Node() )
self.assertEqual( len( cs ), 0 )
# should trigger something, because it's in an undo scope
with Gaffer.UndoScope( s ) :
s.addChild( Gaffer.Node( "a" ) )
self.assertEqual( len( cs ), 1 )
self.assertTrue( cs[0][0].isSame( s ) )
self.assertTrue( isinstance( cs[0][1], Gaffer.Action ) )
self.assertEqual( cs[0][2], Gaffer.Action.Stage.Do )
# undo should trigger as well
s.undo()
self.assertEqual( len( cs ), 2 )
self.assertTrue( cs[1][0].isSame( s ) )
self.assertTrue( cs[1][1].isSame( cs[0][1] ) )
self.assertEqual( cs[1][2], Gaffer.Action.Stage.Undo )
# as should redo
s.redo()
self.assertEqual( len( cs ), 3 )
self.assertTrue( cs[2][0].isSame( s ) )
self.assertTrue( cs[2][1].isSame( cs[0][1] ) )
self.assertEqual( cs[2][2], Gaffer.Action.Stage.Redo )
def testLoadingMovedScriptDoesntKeepOldFileName( self ) :
s = Gaffer.ScriptNode()
s["n"] = Gaffer.Node()
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
shutil.move( self.temporaryDirectory() + "/test.gfr", self.temporaryDirectory() + "/test2.gfr" )
s = Gaffer.ScriptNode()
s["fileName"].setValue( self.temporaryDirectory() + "/test2.gfr" )
s.load()
self.assertEqual( s["fileName"].getValue(), self.temporaryDirectory() + "/test2.gfr" )
def testUnsavedChanges( self ) :
s = Gaffer.ScriptNode()
self.assertEqual( s["unsavedChanges"].getValue(), False )
# the unsaved changes flag only reacts to undoable changes
# so this shouldn't set the flag
s["nonUndoableNode"] = GafferTest.AddNode()
self.assertEqual( s["unsavedChanges"].getValue(), False )
# but this should.
with Gaffer.UndoScope( s ) :
s["node"] = GafferTest.AddNode()
self.assertEqual( s["unsavedChanges"].getValue(), True )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
self.assertEqual( s["unsavedChanges"].getValue(), False )
with Gaffer.UndoScope( s ) :
s["node"]["op1"].setValue( 10 )
self.assertEqual( s["unsavedChanges"].getValue(), True )
s.save()
self.assertEqual( s["unsavedChanges"].getValue(), False )
with Gaffer.UndoScope( s ) :
s["node"]["op1"].setValue( 20 )
self.assertEqual( s["unsavedChanges"].getValue(), True )
s.save()
self.assertEqual( s["unsavedChanges"].getValue(), False )
s.undo()
self.assertEqual( s["unsavedChanges"].getValue(), True )
s.save()
self.assertEqual( s["unsavedChanges"].getValue(), False )
s.redo()
self.assertEqual( s["unsavedChanges"].getValue(), True )
s.save()
self.assertEqual( s["unsavedChanges"].getValue(), False )
with Gaffer.UndoScope( s ) :
s["node2"] = GafferTest.AddNode()
self.assertEqual( s["unsavedChanges"].getValue(), True )
s.save()
self.assertEqual( s["unsavedChanges"].getValue(), False )
with Gaffer.UndoScope( s ) :
s["node2"]["op1"].setInput( s["node"]["sum"] )
self.assertEqual( s["unsavedChanges"].getValue(), True )
s.save()
self.assertEqual( s["unsavedChanges"].getValue(), False )
s.load()
self.assertEqual( s["unsavedChanges"].getValue(), False )
def testSerialiseToFile( self ) :
s = Gaffer.ScriptNode()
s["n1"] = GafferTest.AddNode()
s["n2"] = GafferTest.AddNode()
s["n2"]["op1"].setInput( s["n1"]["sum"] )
s.serialiseToFile( self.temporaryDirectory() + "/test.gfr" )
s2 = Gaffer.ScriptNode()
s2["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s2.load()
self.assertTrue( "n1" in s2 )
self.assertTrue( "n2" in s2 )
self.assertTrue( s2["n2"]["op1"].getInput().isSame( s2["n1"]["sum"] ) )
s.serialiseToFile( self.temporaryDirectory() + "/test.gfr", filter = Gaffer.StandardSet( [ s["n2"] ] ) )
s3 = Gaffer.ScriptNode()
s3["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s3.load()
self.assertTrue( "n1" not in s3 )
self.assertTrue( "n2" in s3 )
def testExecuteFile( self ) :
s = Gaffer.ScriptNode()
s["n1"] = GafferTest.AddNode()
s["n2"] = GafferTest.AddNode()
s["n2"]["op1"].setInput( s["n1"]["sum"] )
s.serialiseToFile( self.temporaryDirectory() + "/test.gfr" )
s2 = Gaffer.ScriptNode()
self.assertRaises( RuntimeError, s2.executeFile, "thisFileDoesntExist.gfr" )
s2.executeFile( self.temporaryDirectory() + "/test.gfr" )
self.assertTrue( s2["n2"]["op1"].getInput().isSame( s2["n1"]["sum"] ) )
def testUndoAndRedoOrder( self ) :
s = Gaffer.ScriptNode()
s["n"] = Gaffer.Node()
s["n"]["p"] = Gaffer.IntPlug()
values = []
def f( plug ) :
values.append( plug.getValue() )
s["n"].plugSetSignal().connect( f, scoped = False )
with Gaffer.UndoScope( s ) :
s["n"]["p"].setValue( 10 )
s["n"]["p"].setValue( 20 )
self.assertEqual( values, [ 10, 20 ] )
s.undo()
self.assertEqual( values, [ 10, 20, 10, 0 ] )
s.redo()
self.assertEqual( values, [ 10, 20, 10, 0, 10, 20 ] )
def testUndoAddedSignal( self ) :
s = Gaffer.ScriptNode()
cs = GafferTest.CapturingSlot( s.undoAddedSignal() )
s["n"] = Gaffer.Node()
self.assertEqual( cs, [] )
with Gaffer.UndoScope( s ) :
s["n"]["p"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["n"]["p"].setValue( 100 )
self.assertEqual( len( cs ), 1 )
self.assertTrue( cs[0][0].isSame( s ) )
with Gaffer.UndoScope( s, mergeGroup = "test" ) :
s["n"]["p"].setValue( 200 )
self.assertEqual( len( cs ), 2 )
self.assertTrue( cs[1][0].isSame( s ) )
with Gaffer.UndoScope( s, mergeGroup = "test" ) :
s["n"]["p"].setValue( 300 )
# undo was merged, so a new one wasn't added
self.assertEqual( len( cs ), 2 )
self.assertTrue( cs[1][0].isSame( s ) )
def testCustomVariables( self ) :
s = Gaffer.ScriptNode()
p = Gaffer.NameValuePlug( "test", IECore.IntData( 10 ), flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["variables"].addChild( p )
self.assertEqual( s.context().get( "test" ), 10 )
p["value"].setValue( 20 )
self.assertEqual( s.context().get( "test" ), 20 )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
s2 = Gaffer.ScriptNode()
s2["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s2.load()
self.assertEqual( s2["variables"][p.getName()]["value"].getValue(), 20 )
self.assertEqual( s2.context().get( "test" ), 20 )
def testFileNameVariables( self ) :
s = Gaffer.ScriptNode()
self.assertEqual( s.context().get( "script:name" ), "" )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
self.assertEqual( s.context().get( "script:name" ), "test" )
def testReloadWithCustomVariables( self ) :
s = Gaffer.ScriptNode()
s["variables"].addChild( Gaffer.NameValuePlug( "test", IECore.IntData( 10 ), flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
s["variables"][0]["value"].setValue( 100 )
s.load()
self.assertEqual( len( s["variables"] ), 1 )
self.assertEqual( s["variables"][0]["value"].getValue(), 10 )
def testLoadCustomVariablesWithDefaultValues( self ) :
s = Gaffer.ScriptNode()
p = Gaffer.NameValuePlug( "test", IECore.IntData( 10 ), flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["variables"].addChild( p )
self.assertEqual( s.context().get( "test" ), 10 )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
s2 = Gaffer.ScriptNode()
s2["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s2.load()
self.assertEqual( s2["variables"][p.getName()]["value"].getValue(), 10 )
self.assertEqual( s2.context().get( "test" ), 10 )
def testCurrentActionStage( self ) :
s = Gaffer.ScriptNode()
s["n"] = GafferTest.AddNode()
actionStages = []
def f( plug ) :
actionStages.append( s.currentActionStage() )
if s.currentActionStage() != Gaffer.Action.Stage.Invalid :
self.assertFalse( s.undoAvailable() )
self.assertFalse( s.redoAvailable() )
s["n"].plugSetSignal().connect( f, scoped = False )
self.assertEqual( s.currentActionStage(), Gaffer.Action.Stage.Invalid )
self.assertEqual( len( actionStages ), 0 )
s["n"]["op1"].setValue( 10 )
self.assertEqual( len( actionStages ), 1 )
self.assertEqual( actionStages[-1], Gaffer.Action.Stage.Invalid )
with Gaffer.UndoScope( s ) :
s["n"]["op1"].setValue( 11 )
self.assertEqual( len( actionStages ), 2 )
self.assertEqual( actionStages[-1], Gaffer.Action.Stage.Do )
s.undo()
self.assertEqual( len( actionStages ), 3 )
self.assertEqual( actionStages[-1], Gaffer.Action.Stage.Undo )
s.redo()
self.assertEqual( len( actionStages ), 4 )
self.assertEqual( actionStages[-1], Gaffer.Action.Stage.Redo )
s.undo()
self.assertEqual( len( actionStages ), 5 )
self.assertEqual( actionStages[-1], Gaffer.Action.Stage.Undo )
self.assertEqual( s.currentActionStage(), Gaffer.Action.Stage.Invalid )
def testUndoListDoesntCreateReferenceCycles( self ) :
s = Gaffer.ScriptNode()
c = s.refCount()
s["n"] = Gaffer.Node()
s["n"]["p"] = Gaffer.IntPlug()
with Gaffer.UndoScope( s ) :
s.setName( "somethingElse" )
self.assertEqual( s.getName(), "somethingElse" )
self.assertEqual( s.refCount(), c )
with Gaffer.UndoScope( s ) :
s["n"].setName( "n2" )
self.assertEqual( s["n2"].getName(), "n2" )
self.assertEqual( s.refCount(), c )
with Gaffer.UndoScope( s ) :
Gaffer.Metadata.registerValue( s, "test", 10 )
Gaffer.Metadata.registerValue( s["n2"], "test", 10 )
Gaffer.Metadata.registerValue( s["n2"]["p"], "test", 10 )
self.assertEqual( s.refCount(), c )
with Gaffer.UndoScope( s ) :
s["n3"] = Gaffer.Node()
n4 = Gaffer.Node( "n4" )
s.addChild( n4 )
self.assertEqual( s.refCount(), c )
with Gaffer.UndoScope( s ) :
s["n3"].addChild( s["n2"]["p"] )
self.assertEqual( s.refCount(), c )
with Gaffer.UndoScope( s ) :
s.addChild( s["n3"]["p"] )
self.assertEqual( s.refCount(), c )
with Gaffer.UndoScope( s ) :
s["n3"].addChild( s["p"] )
self.assertEqual( s.refCount(), c )
with Gaffer.UndoScope( s ) :
s.removeChild( s["n2"] )
self.assertEqual( s.refCount(), c )
def testErrorTolerantExecution( self ) :
s = Gaffer.ScriptNode()
s["n"] = GafferTest.AddNode()
with IECore.CapturingMessageHandler() as c :
s.execute( 'parent["n"]["op1"].setValue( 101 )\niWillFail()\nparent["n"]["op2"].setValue( 102 )', continueOnError=True )
self.assertEqual( s["n"]["op1"].getValue(), 101 )
self.assertEqual( s["n"]["op2"].getValue(), 102 )
self.assertEqual( len( c.messages ), 1 )
self.assertEqual( c.messages[0].level, IECore.Msg.Level.Error )
self.assertTrue( "Line 2" in c.messages[0].context )
self.assertTrue( "name 'iWillFail' is not defined" in c.messages[0].message )
def testExecuteReturnValue( self ) :
s = Gaffer.ScriptNode()
self.assertEqual( s.execute( "a = 10" ), False )
self.assertEqual( s.execute( "a = 10", continueOnError=True ), False )
with IECore.CapturingMessageHandler() : # suppress error reporting, to avoid confusing test output
self.assertEqual( s.execute( "a = iDontExist", continueOnError=True ), True )
def testExecuteExceptionsIncludeLineNumber( self ) :
s = Gaffer.ScriptNode()
six.assertRaisesRegex( self, RuntimeError, "Line 2 .* name 'iDontExist' is not defined", s.execute, "a = 10\na=iDontExist" )
def testFileVersioning( self ) :
s = Gaffer.ScriptNode()
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:milestoneVersion" ), None )
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:majorVersion" ), None )
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:minorVersion" ), None )
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:patchVersion" ), None )
s.serialiseToFile( self.temporaryDirectory() + "/test.gfr" )
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:milestoneVersion" ), None )
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:majorVersion" ), None )
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:minorVersion" ), None )
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:patchVersion" ), None )
s2 = Gaffer.ScriptNode()
s2["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s2.load()
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:milestoneVersion" ), Gaffer.About.milestoneVersion() )
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:majorVersion" ), Gaffer.About.majorVersion() )
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:minorVersion" ), Gaffer.About.minorVersion() )
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:patchVersion" ), Gaffer.About.patchVersion() )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
s2.load()
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:milestoneVersion" ), Gaffer.About.milestoneVersion() )
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:majorVersion" ), Gaffer.About.majorVersion() )
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:minorVersion" ), Gaffer.About.minorVersion() )
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:patchVersion" ), Gaffer.About.patchVersion() )
def testFileVersioningUpdatesOnSave( self ) :
s = Gaffer.ScriptNode()
s["fileName"].setValue( os.path.dirname( __file__ ) + "/scripts/previousSerialisationVersion.gfr" )
s.load()
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:milestoneVersion" ), 0 )
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:majorVersion" ), 14 )
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:minorVersion" ), 0 )
self.assertEqual( Gaffer.Metadata.value( s, "serialiser:patchVersion" ), 0 )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
s2 = Gaffer.ScriptNode()
s2["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s2.load()
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:milestoneVersion" ), Gaffer.About.milestoneVersion() )
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:majorVersion" ), Gaffer.About.majorVersion() )
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:minorVersion" ), Gaffer.About.minorVersion() )
self.assertEqual( Gaffer.Metadata.value( s2, "serialiser:patchVersion" ), Gaffer.About.patchVersion() )
def testFramesPerSecond( self ) :
s = Gaffer.ScriptNode()
self.assertEqual( s["framesPerSecond"].getValue(), 24.0 )
self.assertEqual( s.context().getFramesPerSecond(), 24.0 )
s["framesPerSecond"].setValue( 48.0 )
self.assertEqual( s.context().getFramesPerSecond(), 48.0 )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( s2["framesPerSecond"].getValue(), 48.0 )
self.assertEqual( s2.context().getFramesPerSecond(), 48.0 )
def testFrame( self ) :
s = Gaffer.ScriptNode()
self.assertEqual( s["frame"].getValue(), 1 )
self.assertEqual( s.context().getFrame(), 1.0 )
s["frame"].setValue( 2.0 )
self.assertEqual( s.context().getFrame(), 2.0 )
s.context().setFrame( 4.0 )
self.assertEqual( s["frame"].getValue(), 4.0 )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( s2["frame"].getValue(), 4.0 )
self.assertEqual( s2.context().getFrame(), 4.0 )
def testFrameRange( self ) :
s = Gaffer.ScriptNode()
self.assertEqual( s["frameRange"]["start"].getValue(), 1 )
self.assertEqual( s["frameRange"]["end"].getValue(), 100 )
self.assertEqual( s.context().get( "frameRange:start" ), 1 )
self.assertEqual( s.context().get( "frameRange:end" ), 100 )
s["frameRange"]["start"].setValue( 20 )
s["frameRange"]["end"].setValue( 50 )
self.assertEqual( s.context().get( "frameRange:start" ), 20 )
self.assertEqual( s.context().get( "frameRange:end" ), 50 )
# frame range remains valid
s["frameRange"]["end"].setValue( 15 )
self.assertEqual( s["frameRange"]["start"].getValue(), 15 )
self.assertEqual( s["frameRange"]["end"].getValue(), 15 )
self.assertEqual( s.context().get( "frameRange:start" ), 15 )
self.assertEqual( s.context().get( "frameRange:end" ), 15 )
s["frameRange"]["end"].setValue( 150 )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( s2["frameRange"]["start"].getValue(), 15 )
self.assertEqual( s2["frameRange"]["end"].getValue(), 150 )
self.assertEqual( s2.context().get( "frameRange:start" ), 15 )
self.assertEqual( s2.context().get( "frameRange:end" ), 150 )
def testLineNumberForExecutionSyntaxError( self ) :
s = Gaffer.ScriptNode()
six.assertRaisesRegex( self,
IECore.Exception,
"^Line 2",
s.execute,
inspect.cleandoc(
"""
a = 10
i am a syntax error
b = 20
"""
)
)
def testFileNameInExecutionError( self ) :
fileName = self.temporaryDirectory() + "/test.gfr"
with open( fileName, "w" ) as f :
f.write( "a = 10\n" )
f.write( "a = iDontExist\n" )
s = Gaffer.ScriptNode()
s["fileName"].setValue( fileName )
for method in ( s.load, functools.partial( s.executeFile, fileName ) ) :
six.assertRaisesRegex( self,
RuntimeError,
"Line 2 of " + fileName + " : NameError: name 'iDontExist' is not defined",
method
)
with IECore.CapturingMessageHandler() as mh :
method( continueOnError = True )
self.assertEqual( len( mh.messages ), 1 )
self.assertEqual( mh.messages[0].context, "Line 2 of " + fileName )
self.assertTrue( "NameError: name 'iDontExist' is not defined" in mh.messages[0].message )
def testIsExecuting( self ) :
s = Gaffer.ScriptNode()
self.assertFalse( s.isExecuting() )
self.__wasExecuting = []
def f( script, child ) :
self.__wasExecuting.append( script.isExecuting() )
s.childAddedSignal().connect( f, scoped = False )
s["n"] = GafferTest.AddNode()
# add a reference so we guarantee it works with nested loads
s["n1"] = GafferTest.AddNode()
s["n2"] = GafferTest.AddNode()
s["n2"]["op1"].setInput( s["n1"]["sum"] )
b = Gaffer.Box.create( s, Gaffer.StandardSet( [ s["n1"] ] ) )
Gaffer.PlugAlgo.promote( b["n1"]["op1"] )
b.exportForReference( self.temporaryDirectory() + "/test.grf" )
s["r"] = Gaffer.Reference()
s["r"].load( self.temporaryDirectory() + "/test.grf" )
s["r"]["op1"].setInput( s["n"]["sum"] )
s["x"] = GafferTest.AddNode()
self.assertFalse( any( self.__wasExecuting ) )
self.__wasExecuting = []
# connecting n to r and then to x guarantees the order of serialisation
s["x"]["op1"].setInput( s["r"]["sum"] )
ss = s.serialise( filter = Gaffer.StandardSet( [ s["n"], s["r"], s["x"] ] ) )
s.execute( ss )
self.assertTrue( all( self.__wasExecuting ) )
self.__wasExecuting = []
self.assertRaises( RuntimeError, s.execute, ss + "\nsyntaxError" )
self.assertFalse( s.isExecuting() )
def testReconnectionOfChildPlug( self ) :
class NestedPlugsNode( Gaffer.DependencyNode ) :
def __init__( self, name = "NestedOutputNode" ) :
Gaffer.DependencyNode.__init__( self, name )
self["in"] = Gaffer.Plug()
self["in"]["a"] = Gaffer.IntPlug()
self["in"]["b"] = Gaffer.IntPlug()
self["out"] = Gaffer.Plug( direction = Gaffer.Plug.Direction.Out )
self["out"]["a"] = Gaffer.IntPlug( direction = Gaffer.Plug.Direction.Out )
self["out"]["b"] = Gaffer.IntPlug( direction = Gaffer.Plug.Direction.Out )
def correspondingInput( self, output ) :
if output.isSame( self["out"] ) :
return self["in"]
if output.isSame( self["out"]["a"] ) :
return self["in"]["a"]
if output.isSame( self["out"]["b"] ) :
return self["in"]["b"]
return Gaffer.DependencyNode.correspondingInput( self, output )
s = Gaffer.ScriptNode()
s["n1"] = NestedPlugsNode()
s["n2"] = NestedPlugsNode()
s["n3"] = NestedPlugsNode()
# check top level connection
s["n2"]["in"].setInput( s["n1"]["out"] )
s["n3"]["in"].setInput( s["n2"]["out"] )
s.deleteNodes( filter = Gaffer.StandardSet( [ s["n2"] ] ) )
self.assertTrue( s["n3"]["in"].getInput().isSame( s["n1"]["out"] ) )
# check connection for nested plug
s["n4"] = NestedPlugsNode()
s["n3"]["in"].setInput( None )
s["n3"]["in"]["a"].setInput( s["n1"]["out"]["a"] )
s["n4"]["in"]["a"].setInput( s["n3"]["out"]["a"] )
s.deleteNodes( filter = Gaffer.StandardSet( [ s["n3"] ] ) )
self.assertTrue( s["n4"]["in"]["a"].getInput().isSame( s["n1"]["out"]["a"] ) )
def testPasteWithContinueOnError( self ) :
app = Gaffer.ApplicationRoot()
script = Gaffer.ScriptNode()
app["scripts"]["s"] = script
app.setClipboardContents( IECore.StringData(
inspect.cleandoc(
"""
iAmAnError
parent.addChild( Gaffer.Node() )
"""
)
) )
six.assertRaisesRegex( self, RuntimeError, "iAmAnError", script.paste )
self.assertEqual( len( script.children( Gaffer.Node ) ), 0 )
with IECore.CapturingMessageHandler() as mh :
script.paste( continueOnError = True )
self.assertEqual( len( mh.messages ), 1 )
self.assertEqual( mh.messages[0].level, IECore.Msg.Level.Error )
self.assertTrue( "iAmAnError" in mh.messages[0].message )
self.assertEqual( len( script.children( Gaffer.Node ) ), 1 )
def testErrorTolerantExecutionWithSyntaxError( self ) :
script = Gaffer.ScriptNode()
with IECore.CapturingMessageHandler() as mh :
script.execute( "import", continueOnError = True )
self.assertEqual( len( mh.messages ), 1 )
self.assertIn( "SyntaxError: invalid syntax", mh.messages[0].message )
def testImport( self ) :
s1 = Gaffer.ScriptNode()
s1["n1"] = GafferTest.AddNode()
s1["n2"] = GafferTest.AddNode()
s1["n2"]["op1"].setInput( s1["n1"]["sum"] )
s1["p"] = Gaffer.Plug()
s1["frameRange"]["start"].setValue( -10 )
s1["frameRange"]["end"].setValue( 101 )
s1["variables"].addChild( Gaffer.NameValuePlug( "test", "test" ) )
fileName = self.temporaryDirectory() + "/toImport.gfr"
s1.serialiseToFile( fileName )
s2 = Gaffer.ScriptNode()
s2.importFile( fileName )
self.assertIn( "n1", s2 )
self.assertIn( "n2", s2 )
self.assertTrue( s2["n2"]["op1"].getInput().isSame( s2["n1"]["sum"] ) )
self.assertNotIn( "p", s2 )
self.assertEqual( len( s2["variables"] ), 0 )
def testReadOnlyMetadata( self ) :
fileName = self.temporaryDirectory() + "/test.gfr"
s = Gaffer.ScriptNode()
self.assertFalse( Gaffer.MetadataAlgo.getReadOnly( s ) )
s["fileName"].setValue( fileName )
self.assertFalse( Gaffer.MetadataAlgo.getReadOnly( s ) )
s.save()
self.assertFalse( Gaffer.MetadataAlgo.getReadOnly( s ) )
s = Gaffer.ScriptNode()
s["fileName"].setValue( fileName )
s.load()
self.assertFalse( Gaffer.MetadataAlgo.getReadOnly( s ) )
os.chmod( s["fileName"].getValue(), stat.S_IREAD | stat.S_IRGRP | stat.S_IROTH )
s = Gaffer.ScriptNode()
s["fileName"].setValue( fileName )
s.load()
self.assertTrue( Gaffer.MetadataAlgo.getReadOnly( s ) )
s["fileName"].setValue( self.temporaryDirectory() + "/test2.gfr" )
self.assertFalse( Gaffer.MetadataAlgo.getReadOnly( s ) )
def testDisableContextVariable( self ) :
s = Gaffer.ScriptNode()
p = Gaffer.NameValuePlug( "test", 10, defaultEnabled = True, name = "test", flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["variables"].addChild( p )
self.assertEqual( s.context()["test"], 10 )
p["enabled"].setValue( False )
self.assertNotIn( "test", s.context() )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertNotIn( "test", s2.context() )
s2["variables"]["test"]["enabled"].setValue( True )
self.assertEqual( s2.context()["test"], 10 )
def testDeleteContextVariable( self ) :
s = Gaffer.ScriptNode()
p = Gaffer.NameValuePlug( "test", 10, defaultEnabled = True, name = "test", flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["variables"].addChild( p )
self.assertEqual( s.context()["test"], 10 )
s["variables"].removeChild( p )
self.assertNotIn( "test", s.context() )
def testCompoundNumericContextVariable( self ) :
s = Gaffer.ScriptNode()
p = Gaffer.NameValuePlug( "test", imath.V3i( 1, 2, 3 ), flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["variables"].addChild( p )
self.assertEqual( s.context()["test"], imath.V3i( 1, 2, 3 ) )
p["value"]["y"].setValue( 10 )
self.assertEqual( s.context()["test"], imath.V3i( 1, 10, 3 ) )
def testDuplicateContextVariables( self ) :
# We don't want people to specify the same context variable twice,
# but if they do, we want to implement a simple rule : last enabled
# one in the list wins. This is the same rule used for CompoundDataPlugs
# everywhere.
s = Gaffer.ScriptNode()
p1 = Gaffer.NameValuePlug( "test", 1, defaultEnabled = True, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
p2 = Gaffer.NameValuePlug( "test", 2, defaultEnabled = True, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["variables"].addChild( p1 )
s["variables"].addChild( p2 )
self.assertEqual( s.context()["test"], 2 )
p1["value"].setValue( 10 )
self.assertEqual( s.context()["test"], 2 )
p2["enabled"].setValue( False )
self.assertEqual( s.context()["test"], 10 )
p2["enabled"].setValue( True )
self.assertEqual( s.context()["test"], 2 )
s["variables"].removeChild( p2 )
self.assertEqual( s.context()["test"], 10 )
s["variables"].removeChild( p1 )
self.assertNotIn( "test", s.context() )
def testExternalContextVariable( self ) :
s = Gaffer.ScriptNode()
# We don't really want people to manipulate the context directly like
# this; we want them to use `ScriptNode::variablesPlug()` instead. But
# it seems plausible that people would do this to implement a sort of
# non-persistent "context pinning" for interactive use. Until we support
# such a feature natively, make sure that we don't remove variables we
# know nothing about.
s.context()["externalTest"] = 10
p = Gaffer.NameValuePlug( "test", 1, defaultEnabled = True, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["variables"].addChild( p )
self.assertEqual( s.context()["externalTest"], 10 )
self.assertEqual( s.context()["test"], 1 )
p["enabled"].setValue( False )
self.assertEqual( s.context()["externalTest"], 10 )
self.assertNotIn( "test", s.context() )
p["enabled"].setValue( True )
self.assertEqual( s.context()["externalTest"], 10 )
self.assertEqual( s.context()["test"], 1 )
s["variables"].removeChild( p )
self.assertEqual( s.context()["externalTest"], 10 )
self.assertNotIn( "test", s.context() )
def testChangeContextVariableName( self ) :
s = Gaffer.ScriptNode()
p = Gaffer.NameValuePlug( "", 1, defaultEnabled = True, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["variables"].addChild( p )
self.assertNotIn( "", s.context() )
p["name"].setValue( "test" )
self.assertEqual( s.context()["test"], 1 )
p["name"].setValue( "testTwo" )
self.assertEqual( s.context()["testTwo"], 1 )
self.assertNotIn( "test", s.context() )
p["name"].setValue( "" )
self.assertNotIn( "testTwo", s.context() )
def testCancellationDuringLoad( self ) :
s = Gaffer.ScriptNode()
s["fileName"].setValue( os.path.join( os.path.dirname( __file__ ), "scripts", "previousSerialisationVersion.gfr" ) )
context = Gaffer.Context()
canceller = IECore.Canceller()
with Gaffer.Context( context, canceller ) :
canceller.cancel()
with self.assertRaises( IECore.Cancelled ) :
s.load()
def testCancellationDuringExecute( self ) :
s = Gaffer.ScriptNode()
context = Gaffer.Context()
canceller = IECore.Canceller()
with Gaffer.Context( context, canceller ) :
canceller.cancel()
# Execution is done all in one go, and there's no point cancelling
# at the end when we've done all of the work anyway.
s.execute( "script.addChild( Gaffer.Node() )", continueOnError = False )
with self.assertRaises( IECore.Cancelled ) :
# Execution is done line-by-line, so making regular cancellation
# checks makes sense.
s.execute( "script.addChild( Gaffer.Node() )", continueOnError = True )
def testCancellationDuringSerialise( self ) :
s = Gaffer.ScriptNode()
context = Gaffer.Context()
canceller = IECore.Canceller()
with Gaffer.Context( context, canceller ) :
canceller.cancel()
with self.assertRaises( IECore.Cancelled ) :
s.serialise()
def testFrameChangeSignalling( self ) :
s = Gaffer.ScriptNode()
cs = GafferTest.CapturingSlot( s.context().changedSignal() )
s.context().setFrame( 10 )
self.assertEqual( cs, [ ( s.context(), "frame" ) ] )
self.assertEqual( s.context().getFrame(), 10 )
s.context().setFrame( 20 )
self.assertEqual( cs, [ ( s.context(), "frame" ) ] * 2 )
self.assertEqual( s.context().getFrame(), 20 )
s.context().setFrame( 30 )
self.assertEqual( cs, [ ( s.context(), "frame" ) ] * 3 )
self.assertEqual( s.context().getFrame(), 30 )
s.context().setFrame( 30 )
self.assertEqual( cs, [ ( s.context(), "frame" ) ] * 3 )
self.assertEqual( s.context().getFrame(), 30 )
s["frame"].setValue( 40 )
self.assertEqual( cs, [ ( s.context(), "frame" ) ] * 4 )
self.assertEqual( s.context().getFrame(), 40 )
s["frame"].setValue( 50 )
self.assertEqual( cs, [ ( s.context(), "frame" ) ] * 5 )
self.assertEqual( s.context().getFrame(), 50 )
s["frame"].setValue( 50 )
self.assertEqual( cs, [ ( s.context(), "frame" ) ] * 5 )
self.assertEqual( s.context().getFrame(), 50 )
def testDeletingNodeRemovesFocus( self ) :
s = Gaffer.ScriptNode()
n = Gaffer.Node()
s["n"] = n
# Set focus, and check all expected signals are emitted.
focusChanges = GafferTest.CapturingSlot( s.focusChangedSignal() )
memberRemovals = GafferTest.CapturingSlot( s.focusSet().memberRemovedSignal() )
memberAdditions = GafferTest.CapturingSlot( s.focusSet().memberAddedSignal() )
s.setFocus( n )
self.assertEqual( s.getFocus(), n )
self.assertEqual( focusChanges, [ ( s, n ) ] )
self.assertEqual( memberRemovals, [] )
self.assertEqual( memberAdditions, [ ( s.focusSet(), n ) ] )
# Delete focus node, and check focus is lost and all expected signals
# are emitted.
del focusChanges[:]
del memberRemovals[:]
del memberAdditions[:]
del s["n"]
self.assertIsNone( s.getFocus() )
self.assertEqual( focusChanges, [ ( s, None ) ] )
self.assertEqual( memberRemovals, [ ( s.focusSet(), n ) ] )
self.assertEqual( memberAdditions, [] )
del focusChanges[:]
del memberRemovals[:]
del memberAdditions[:]
# Repeat, but this time with focus node inside a box.
s["b"] = Gaffer.Box()
s["b"]["n"] = n
s.setFocus( n )
self.assertEqual( s.getFocus(), n )
self.assertEqual( focusChanges, [ ( s, n ) ] )
self.assertEqual( memberRemovals, [] )
self.assertEqual( memberAdditions, [ ( s.focusSet(), n ) ] )
del focusChanges[:]
del memberRemovals[:]
del memberAdditions[:]
del s["b"]["n"]
self.assertIsNone( s.getFocus() )
self.assertEqual( focusChanges, [ ( s, None ) ] )
self.assertEqual( memberRemovals, [ ( s.focusSet(), n ) ] )
self.assertEqual( memberAdditions, [] )
if __name__ == "__main__":
unittest.main()
|
hradec/gaffer
|
python/GafferTest/ScriptNodeTest.py
|
Python
|
bsd-3-clause
| 52,927
|
import os
import struct
import subprocess
import sys
from pkg_resources import resource_filename
from binascii import hexlify
from binascii import unhexlify
def find_binary(prefixes, name, args):
for prefix in prefixes:
try:
subprocess.call([os.path.join(prefix, name)] + args)
except OSError, e:
continue
return prefix
print >> sys.stderr, prefix, "%r not found in your PATH nor LINUX_TOOLS_PATH" % (name,)
os._exit(-2)
def bpf_compile(assembly):
prefixes = ["",
resource_filename(__name__, "."),
resource_filename(__name__, os.path.join("..","linux_tools")),
resource_filename(__name__, "linux_tools"),
".",
"linux_tools",
os.path.dirname(sys.argv[0]),
os.path.realpath(os.path.dirname(sys.argv[0])),
os.getenv("LINUX_TOOLS_PATH", "."),
]
prefix = find_binary(prefixes, "bpf_asm", ['/dev/null'])
out, err = subprocess.Popen([os.path.join(prefix, "bpf_asm")],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate(assembly)
if set(out) - set(" ,0123456789\n") or not out:
print >> sys.stderr, "Compiling failed with:\n%s\n" % (out.strip() + err.strip())
os._exit(-3)
return out.strip()
def _looks_like_ip(l2, off):
if len(l2) - off >= 20:
ipver, _, total_length = struct.unpack_from('!BBH', l2, off)
if (ipver & 0xF0 == 0x40 and (ipver & 0x0f) >= 5):
return 4
if len(l2) - off >= 40:
vertos, _, _, pay_len, proto, ttl = struct.unpack_from('!BBHHBB', l2, off)
if (vertos & 0xF0 == 0x60 and pay_len + off + 40 == len(l2)
and ttl > 0):
return 6
return None
def find_ip_offset(l2, max_off=40):
# first look for both ethernet and ip header
for off in xrange(2, max_off+2, 2):
if l2[off-2:off] == '\x08\x00' and _looks_like_ip(l2, off) == 4:
return off
if l2[off-2:off] == '\x86\xdd' and _looks_like_ip(l2, off) == 6:
return off
# okay, just look for ip header
for off in xrange(0, max_off, 2):
if _looks_like_ip(l2, off):
return off
return None
def scrub_byte(data, minval, maxval, ip_constant):
if not (ord(data) >= minval and ord(data) < maxval):
return data
ip_byte_str = hexlify(data)
ip_byte_int = int(ip_byte_str, 16)
ip_byte_int = ip_byte_int - minval
obfuscated_byte = (ip_byte_int + ip_constant) % (maxval-minval)
obfuscated_byte = obfuscated_byte + minval
obfuscated_str = format(obfuscated_byte,'x')
if len(obfuscated_str) == 1:
obfuscated_str = "0" + obfuscated_str
obfuscated_str = unhexlify(obfuscated_str.rstrip(b"\n"))
return obfuscated_str
def scrub_dns_name(data, ip_ihl, ip_hdr_off, entropy):
# UDP
dns_hdr_off = ip_ihl + ip_hdr_off + 8 # 8 is UDP header size
str_len_offset = 0
name_offset = 0
while True:
try:
str_len_off = ord(data[dns_hdr_off + 12 + name_offset]) # 12 is the offset inside the DNS packet
except IndexError:
print >> sys.stderr, "OOps, it seems this UDP packet is not properly formed DNS, break while True"
break
if str_len_off == 0:
break
idx = 0
while idx < str_len_off:
try:
rtr = data[dns_hdr_off + 12 + name_offset + idx + 1]
except IndexError:
print >> sys.stderr, "OOps, it seems this UDP packet is not properly formed DNS, break while idx"
break
rtr = scrub_byte(rtr, ord('a'), ord('z') + 1, entropy[name_offset % len(entropy)])
rtr = scrub_byte(rtr, ord('A'), ord('Z') + 1, entropy[name_offset % len(entropy)])
rtr = scrub_byte(rtr, ord('0'), ord('9') + 1, entropy[name_offset % len(entropy)])
data[dns_hdr_off + 12 + name_offset + idx + 1] = rtr
idx = idx + 1
name_offset = name_offset + str_len_off + 1
def do_scrub(l2, ip_hdr_off):
entropy = [11,2,9,7,5,10,17,19,1,3,15]
data = list(l2)
if ip_hdr_off == 18:
#Ethernet with vlan
data[12] = '\x08'
data[13] = '\x00'
del data[14:18]
ip_hdr_off = 14
if ip_hdr_off not in (14, 16):
raise Exception("ip_hdr_off=%i Not ethernet, not sure how to scrub MACS" % ip_hdr_off)
for i in xrange(ip_hdr_off-2):
data[i] = '\x00'
ipver = ord(data[ip_hdr_off])
if ipver & 0xF0 == 0x40:
# IPV4
# Scrubing IPs
ip_ihl = (ipver & 0x0F)*4
for i in xrange(ip_hdr_off+12, ip_hdr_off+12+4+4, 1):
data[i] = scrub_byte(data[i], 0, 256, entropy[i % len(entropy)])
if ord(data[ip_hdr_off+9]) == 0x11:
# UDP
scrub_dns_name(data, ip_ihl, ip_hdr_off, entropy)
elif ipver & 0xF0 == 0x60:
# IPV6
for i in xrange(ip_hdr_off+8, ip_hdr_off+8+16+16, 1):
data[i] = scrub_byte(data[i], 0, 256, entropy[i % len(entropy)])
if ord(data[ip_hdr_off+6]) == 0x11:
# UDP
scrub_dns_name(data, 40, ip_hdr_off, entropy)
return ''.join(data)
|
nivertech/bpftools
|
bpftools/utils.py
|
Python
|
bsd-3-clause
| 5,334
|
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Get-DomainComputer',
'Author': ['@harmj0y'],
'Description': ('Queries the domain for current computer objects. Part of PowerView.'),
'Background' : True,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'https://github.com/PowerShellMafia/PowerSploit/blob/dev/Recon/'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Identity' : {
'Description' : 'A SamAccountName, DistinguishedName, SID, GUID, or a dns host name, wildcards accepted.',
'Required' : False,
'Value' : ''
},
'Unconstrained' : {
'Description' : 'Switch. Return computer objects that have unconstrained delegation.',
'Required' : False,
'Value' : ''
},
'TrustedToAuth' : {
'Description' : 'Switch. Return computer objects that are trusted to authenticate for other principals.',
'Required' : False,
'Value' : ''
},
'Printers' : {
'Description' : 'Switch. Return only printers.',
'Required' : False,
'Value' : ''
},
'SPN' : {
'Description' : 'Return computers with a specific service principal name, wildcards accepted.',
'Required' : False,
'Value' : ''
},
'OperatingSystem' : {
'Description' : 'Return computers with a specific operating system, wildcards accepted.',
'Required' : False,
'Value' : ''
},
'ServicePack' : {
'Description' : 'Return computers with the specified service pack, wildcards accepted.',
'Required' : False,
'Value' : ''
},
'SiteName' : {
'Description' : 'Return computers in the specific AD Site name, wildcards accepted.',
'Required' : False,
'Value' : ''
},
'Ping' : {
'Description' : "Switch. Ping each host to ensure it's up before enumerating.",
'Required' : False,
'Value' : ''
},
'Domain' : {
'Description' : 'The domain to use for the query, defaults to the current domain.',
'Required' : False,
'Value' : ''
},
'LDAPFilter' : {
'Description' : 'Specifies an LDAP query string that is used to filter Active Directory objects.',
'Required' : False,
'Value' : ''
},
'Properties' : {
'Description' : 'Specifies the properties of the output object to retrieve from the server.',
'Required' : False,
'Value' : ''
},
'SearchBase' : {
'Description' : 'The LDAP source to search through, e.g. "LDAP://OU=secret,DC=testlab,DC=local" Useful for OU queries.',
'Required' : False,
'Value' : ''
},
'Server' : {
'Description' : 'Specifies an active directory server (domain controller) to bind to',
'Required' : False,
'Value' : ''
},
'SearchScope' : {
'Description' : 'Specifies the scope to search under, Base/OneLevel/Subtree (default of Subtree)',
'Required' : False,
'Value' : ''
},
'ResultPageSize' : {
'Description' : 'Specifies the PageSize to set for the LDAP searcher object.',
'Required' : False,
'Value' : ''
},
'ServerTimeLimit' : {
'Description' : 'Specifies the maximum amount of time the server spends searching. Default of 120 seconds.',
'Required' : False,
'Value' : ''
},
'Tombstone' : {
'Description' : 'Switch. Specifies that the search should also return deleted/tombstoned objects.',
'Required' : False,
'Value' : 'False'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
moduleName = self.info["Name"]
# read in the common powerview.ps1 module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/situational_awareness/network/powerview.ps1"
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
# get just the code needed for the specified function
script = helpers.generate_dynamic_powershell_script(moduleCode, moduleName)
script += moduleName + " "
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
script += " -" + str(option) + " " + str(values['Value'])
script += ' | Out-String | %{$_ + \"`n\"};"`n'+str(moduleName)+' completed!"'
if obfuscate:
script = helpers.obfuscate(self.mainMenu.installPath, psScript=script, obfuscationCommand=obfuscationCommand)
return script
|
bneg/Empire
|
lib/modules/powershell/situational_awareness/network/powerview/get_computer.py
|
Python
|
bsd-3-clause
| 7,131
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0 that
# can be found in the LICENSE file.
"""Manages subcommands in a script.
Each subcommand should look like this:
@usage('[pet name]')
def CMDpet(parser, args):
'''Prints a pet.
Many people likes pet. This command prints a pet for your pleasure.
'''
parser.add_option('--color', help='color of your pet')
options, args = parser.parse_args(args)
if len(args) != 1:
parser.error('A pet name is required')
pet = args[0]
if options.color:
print('Nice %s %d' % (options.color, pet))
else:
print('Nice %s' % pet)
return 0
Explanation:
- usage decorator alters the 'usage: %prog' line in the command's help.
- docstring is used to both short help line and long help line.
- parser can be augmented with arguments.
- return the exit code.
- Every function in the specified module with a name starting with 'CMD' will
be a subcommand.
- The module's docstring will be used in the default 'help' page.
- If a command has no docstring, it will not be listed in the 'help' page.
Useful to keep compatibility commands around or aliases.
- If a command is an alias to another one, it won't be documented. E.g.:
CMDoldname = CMDnewcmd
will result in oldname not being documented but supported and redirecting to
newcmd. Make it a real function that calls the old function if you want it
to be documented.
"""
import difflib
import sys
import textwrap
def usage(more):
"""Adds a 'usage_more' property to a CMD function."""
def hook(fn):
fn.usage_more = more
return fn
return hook
def epilog(text):
"""Adds an 'epilog' property to a CMD function.
It will be shown in the epilog. Usually useful for examples.
"""
def hook(fn):
fn.epilog = text
return fn
return hook
def CMDhelp(parser, args):
"""Prints list of commands or help for a specific command."""
# This is the default help implementation. It can be disabled or overriden if
# wanted.
if not any(i in ('-h', '--help') for i in args):
args = args + ['--help']
_, args = parser.parse_args(args)
# Never gets there.
assert False
def _get_color_module():
"""Returns the colorama module if available.
If so, assumes colors are supported and return the module handle.
"""
return sys.modules.get('colorama') or sys.modules.get('third_party.colorama')
class CommandDispatcher(object):
def __init__(self, module):
"""module is the name of the main python module where to look for commands.
The python builtin variable __name__ MUST be used for |module|. If the
script is executed in the form 'python script.py', __name__ == '__main__'
and sys.modules['script'] doesn't exist. On the other hand if it is unit
tested, __main__ will be the unit test's module so it has to reference to
itself with 'script'. __name__ always match the right value.
"""
self.module = sys.modules[module]
def enumerate_commands(self):
"""Returns a dict of command and their handling function.
The commands must be in the '__main__' modules. To import a command from a
submodule, use:
from mysubcommand import CMDfoo
Automatically adds 'help' if not already defined.
A command can be effectively disabled by defining a global variable to None,
e.g.:
CMDhelp = None
"""
cmds = dict(
(fn[3:], getattr(self.module, fn))
for fn in dir(self.module) if fn.startswith('CMD'))
cmds.setdefault('help', CMDhelp)
return cmds
def find_nearest_command(self, name):
"""Retrieves the function to handle a command.
It automatically tries to guess the intended command by handling typos or
incomplete names.
"""
commands = self.enumerate_commands()
if name in commands:
return commands[name]
# An exact match was not found. Try to be smart and look if there's
# something similar.
commands_with_prefix = [c for c in commands if c.startswith(name)]
if len(commands_with_prefix) == 1:
return commands[commands_with_prefix[0]]
# A #closeenough approximation of levenshtein distance.
def close_enough(a, b):
return difflib.SequenceMatcher(a=a, b=b).ratio()
hamming_commands = sorted(
((close_enough(c, name), c) for c in commands),
reverse=True)
if (hamming_commands[0][0] - hamming_commands[1][0]) < 0.3:
# Too ambiguous.
return
if hamming_commands[0][0] < 0.8:
# Not similar enough. Don't be a fool and run a random command.
return
return commands[hamming_commands[0][1]]
def _gen_commands_list(self):
"""Generates the short list of supported commands."""
commands = self.enumerate_commands()
docs = sorted(
(name, self._create_command_summary(name, handler))
for name, handler in commands.iteritems())
# Skip commands without a docstring.
docs = [i for i in docs if i[1]]
# Then calculate maximum length for alignment:
length = max(len(c) for c in commands)
# Look if color is supported.
colors = _get_color_module()
green = reset = ''
if colors:
green = colors.Fore.GREEN
reset = colors.Fore.RESET
return (
'Commands are:\n' +
''.join(
' %s%-*s%s %s\n' % (green, length, name, reset, doc)
for name, doc in docs))
def _add_command_usage(self, parser, command):
"""Modifies an OptionParser object with the function's documentation."""
name = command.__name__[3:]
if name == 'help':
name = '<command>'
# Use the module's docstring as the description for the 'help' command if
# available.
parser.description = (self.module.__doc__ or '').rstrip()
if parser.description:
parser.description += '\n\n'
parser.description += self._gen_commands_list()
# Do not touch epilog.
else:
# Use the command's docstring if available. For commands, unlike module
# docstring, realign.
lines = (command.__doc__ or '').rstrip().splitlines()
if lines[:1]:
rest = textwrap.dedent('\n'.join(lines[1:]))
parser.description = '\n'.join((lines[0], rest))
else:
parser.description = lines[0]
if parser.description:
parser.description += '\n'
parser.epilog = getattr(command, 'epilog', None)
if parser.epilog:
parser.epilog = '\n' + parser.epilog.strip() + '\n'
more = getattr(command, 'usage_more', '')
parser.set_usage(
'usage: %%prog %s [options]%s' % (name, '' if not more else ' ' + more))
@staticmethod
def _create_command_summary(name, command):
"""Creates a oneline summary from the command's docstring."""
if name != command.__name__[3:]:
# Skip aliases.
return ''
doc = command.__doc__ or ''
line = doc.split('\n', 1)[0].rstrip('.')
if not line:
return line
return (line[0].lower() + line[1:]).strip()
def execute(self, parser, args):
"""Dispatches execution to the right command.
Fallbacks to 'help' if not disabled.
"""
# Unconditionally disable format_description() and format_epilog().
# Technically, a formatter should be used but it's not worth (yet) the
# trouble.
parser.format_description = lambda _: parser.description or ''
parser.format_epilog = lambda _: parser.epilog or ''
if args:
if args[0] in ('-h', '--help') and len(args) > 1:
# Inverse the argument order so 'tool --help cmd' is rewritten to
# 'tool cmd --help'.
args = [args[1], args[0]] + args[2:]
command = self.find_nearest_command(args[0])
if command:
if command.__name__ == 'CMDhelp' and len(args) > 1:
# Inverse the arguments order so 'tool help cmd' is rewritten to
# 'tool cmd --help'. Do it here since we want 'tool hel cmd' to work
# too.
args = [args[1], '--help'] + args[2:]
command = self.find_nearest_command(args[0]) or command
# "fix" the usage and the description now that we know the subcommand.
self._add_command_usage(parser, command)
return command(parser, args[1:])
cmdhelp = self.enumerate_commands().get('help')
if cmdhelp:
# Not a known command. Default to help.
self._add_command_usage(parser, cmdhelp)
return cmdhelp(parser, args)
# Nothing can be done.
return 2
|
sgraham/nope
|
tools/swarming_client/third_party/depot_tools/subcommand.py
|
Python
|
bsd-3-clause
| 8,534
|
"""
CI, but with that all important Docker twist
"""
|
RickyCook/DockCI
|
dockci/__init__.py
|
Python
|
isc
| 53
|
"""
Support for LimitlessLED bulbs.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.limitlessled/
"""
import logging
import voluptuous as vol
from homeassistant.const import (CONF_NAME, CONF_HOST, CONF_PORT, CONF_TYPE)
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_RGB_COLOR,
ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_WHITE, FLASH_LONG,
SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH,
SUPPORT_RGB_COLOR, SUPPORT_TRANSITION, Light, PLATFORM_SCHEMA)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['limitlessled==1.0.2']
_LOGGER = logging.getLogger(__name__)
CONF_BRIDGES = 'bridges'
CONF_GROUPS = 'groups'
CONF_NUMBER = 'number'
CONF_VERSION = 'version'
DEFAULT_LED_TYPE = 'rgbw'
DEFAULT_PORT = 8899
DEFAULT_TRANSITION = 0
DEFAULT_VERSION = 5
LED_TYPE = ['rgbw', 'white']
RGB_BOUNDARY = 40
WHITE = [255, 255, 255]
SUPPORT_LIMITLESSLED_WHITE = (SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP |
SUPPORT_TRANSITION)
SUPPORT_LIMITLESSLED_RGB = (SUPPORT_BRIGHTNESS | SUPPORT_EFFECT |
SUPPORT_FLASH | SUPPORT_RGB_COLOR |
SUPPORT_TRANSITION)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_BRIDGES): vol.All(cv.ensure_list, [
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_VERSION,
default=DEFAULT_VERSION): cv.positive_int,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Required(CONF_GROUPS): vol.All(cv.ensure_list, [
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_TYPE, default=DEFAULT_LED_TYPE):
vol.In(LED_TYPE),
vol.Required(CONF_NUMBER): cv.positive_int,
}
]),
},
]),
})
def rewrite_legacy(config):
"""Rewrite legacy configuration to new format."""
bridges = config.get(CONF_BRIDGES, [config])
new_bridges = []
for bridge_conf in bridges:
groups = []
if 'groups' in bridge_conf:
groups = bridge_conf['groups']
else:
_LOGGER.warning("Legacy configuration format detected")
for i in range(1, 5):
name_key = 'group_%d_name' % i
if name_key in bridge_conf:
groups.append({
'number': i,
'type': bridge_conf.get('group_%d_type' % i,
DEFAULT_LED_TYPE),
'name': bridge_conf.get(name_key)
})
new_bridges.append({
'host': bridge_conf.get(CONF_HOST),
'groups': groups
})
return {'bridges': new_bridges}
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the LimitlessLED lights."""
from limitlessled.bridge import Bridge
# Two legacy configuration formats are supported to maintain backwards
# compatibility.
config = rewrite_legacy(config)
# Use the expanded configuration format.
lights = []
for bridge_conf in config.get(CONF_BRIDGES):
bridge = Bridge(bridge_conf.get(CONF_HOST),
port=bridge_conf.get(CONF_PORT, DEFAULT_PORT),
version=bridge_conf.get(CONF_VERSION, DEFAULT_VERSION))
for group_conf in bridge_conf.get(CONF_GROUPS):
group = bridge.add_group(
group_conf.get(CONF_NUMBER),
group_conf.get(CONF_NAME),
group_conf.get(CONF_TYPE, DEFAULT_LED_TYPE))
lights.append(LimitlessLEDGroup.factory(group))
add_devices(lights)
def state(new_state):
"""State decorator.
Specify True (turn on) or False (turn off).
"""
def decorator(function):
"""Decorator function."""
# pylint: disable=no-member,protected-access
def wrapper(self, **kwargs):
"""Wrap a group state change."""
from limitlessled.pipeline import Pipeline
pipeline = Pipeline()
transition_time = DEFAULT_TRANSITION
# Stop any repeating pipeline.
if self.repeating:
self.repeating = False
self.group.stop()
# Not on and should be? Turn on.
if not self.is_on and new_state is True:
pipeline.on()
# Set transition time.
if ATTR_TRANSITION in kwargs:
transition_time = kwargs[ATTR_TRANSITION]
# Do group type-specific work.
function(self, transition_time, pipeline, **kwargs)
# Update state.
self._is_on = new_state
self.group.enqueue(pipeline)
self.update_ha_state()
return wrapper
return decorator
class LimitlessLEDGroup(Light):
"""Representation of a LimitessLED group."""
def __init__(self, group):
"""Initialize a group."""
self.group = group
self.repeating = False
self._is_on = False
self._brightness = None
@staticmethod
def factory(group):
"""Produce LimitlessLEDGroup objects."""
from limitlessled.group.rgbw import RgbwGroup
from limitlessled.group.white import WhiteGroup
if isinstance(group, WhiteGroup):
return LimitlessLEDWhiteGroup(group)
elif isinstance(group, RgbwGroup):
return LimitlessLEDRGBWGroup(group)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the group."""
return self.group.name
@property
def is_on(self):
"""Return true if device is on."""
return self._is_on
@property
def brightness(self):
"""Return the brightness property."""
return self._brightness
@state(False)
def turn_off(self, transition_time, pipeline, **kwargs):
"""Turn off a group."""
if self.is_on:
pipeline.transition(transition_time, brightness=0.0).off()
class LimitlessLEDWhiteGroup(LimitlessLEDGroup):
"""Representation of a LimitlessLED White group."""
def __init__(self, group):
"""Initialize White group."""
super().__init__(group)
# Initialize group with known values.
self.group.on = True
self.group.temperature = 1.0
self.group.brightness = 0.0
self._brightness = _to_hass_brightness(1.0)
self._temperature = _to_hass_temperature(self.group.temperature)
self.group.on = False
@property
def color_temp(self):
"""Return the temperature property."""
return self._temperature
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_LIMITLESSLED_WHITE
@state(True)
def turn_on(self, transition_time, pipeline, **kwargs):
"""Turn on (or adjust property of) a group."""
# Check arguments.
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_COLOR_TEMP in kwargs:
self._temperature = kwargs[ATTR_COLOR_TEMP]
# Set up transition.
pipeline.transition(
transition_time,
brightness=_from_hass_brightness(self._brightness),
temperature=_from_hass_temperature(self._temperature)
)
class LimitlessLEDRGBWGroup(LimitlessLEDGroup):
"""Representation of a LimitlessLED RGBW group."""
def __init__(self, group):
"""Initialize RGBW group."""
super().__init__(group)
# Initialize group with known values.
self.group.on = True
self.group.white()
self._color = WHITE
self.group.brightness = 0.0
self._brightness = _to_hass_brightness(1.0)
self.group.on = False
@property
def rgb_color(self):
"""Return the color property."""
return self._color
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_LIMITLESSLED_RGB
@state(True)
def turn_on(self, transition_time, pipeline, **kwargs):
"""Turn on (or adjust property of) a group."""
from limitlessled.presets import COLORLOOP
# Check arguments.
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_RGB_COLOR in kwargs:
self._color = kwargs[ATTR_RGB_COLOR]
# White is a special case.
if min(self._color) > 256 - RGB_BOUNDARY:
pipeline.white()
self._color = WHITE
# Set up transition.
pipeline.transition(
transition_time,
brightness=_from_hass_brightness(self._brightness),
color=_from_hass_color(self._color)
)
# Flash.
if ATTR_FLASH in kwargs:
duration = 0
if kwargs[ATTR_FLASH] == FLASH_LONG:
duration = 1
pipeline.flash(duration=duration)
# Add effects.
if ATTR_EFFECT in kwargs:
if kwargs[ATTR_EFFECT] == EFFECT_COLORLOOP:
self.repeating = True
pipeline.append(COLORLOOP)
if kwargs[ATTR_EFFECT] == EFFECT_WHITE:
pipeline.white()
self._color = WHITE
def _from_hass_temperature(temperature):
"""Convert Home Assistant color temperature units to percentage."""
return (temperature - 154) / 346
def _to_hass_temperature(temperature):
"""Convert percentage to Home Assistant color temperature units."""
return int(temperature * 346) + 154
def _from_hass_brightness(brightness):
"""Convert Home Assistant brightness units to percentage."""
return brightness / 255
def _to_hass_brightness(brightness):
"""Convert percentage to Home Assistant brightness units."""
return int(brightness * 255)
def _from_hass_color(color):
"""Convert Home Assistant RGB list to Color tuple."""
from limitlessled import Color
return Color(*tuple(color))
def _to_hass_color(color):
"""Convert from Color tuple to Home Assistant RGB list."""
return list([int(c) for c in color])
|
srcLurker/home-assistant
|
homeassistant/components/light/limitlessled.py
|
Python
|
mit
| 10,472
|
# EXAMPLE 1:
# ==============================================================================
print myreduce((lambda x, y: x * y), [1, 2, 3, 4])
print myreduce((lambda x, y: x / y), [1, 2, 3, 4])
# EXAMPLE 2:
# ==============================================================================
reduce(lambda x, y: x + y, [47, 11, 42, 13])
# EXAMPLE 3:
# ==============================================================================
f = lambda a, b: a if (a > b) else b
reduce(f, [47, 11, 42, 102, 13])
# EXAMPLE 4:
# ==============================================================================
reduce(lambda x, y: x + y, range(1, 101))
# EXAMPLE 5:
# ==============================================================================
import functools
L = ['Testing ', 'shows ', 'the ', 'presence', ', ','not ', 'the ', 'absence ', 'of ', 'bugs']
print functools.reduce((lambda x,y: x + y), L)
print
# EXAMPLE 6: Simulating 'reduce' with join.
# ==============================================================================
print ''.join(L)
print
# EXAMPLE 7: Simulating 'reduce' with operator.
# ==============================================================================
import functools, operator
print functools.reduce(operator.add, L)
print
|
rolandovillca/python_introduction_basic
|
collections/reducer.py
|
Python
|
mit
| 1,256
|
from asposewords import Settings
from com.aspose.words import Document
from com.aspose.words import SaveFormat
from java.io import ByteArrayOutputStream
from java.io import FileInputStream
from java.io import FileOutputStream
class LoadAndSaveToStream:
def __init__(self):
dataDir = Settings.dataDir + 'quickstart/'
# Open the stream. Read only access is enough for Aspose.Words to load a document.
stream = FileInputStream(dataDir + 'Document.doc')
# Load the entire document into memory.
doc = Document(stream)
# You can close the stream now, it is no longer needed because the document is in memory.
stream.close()
# ... do something with the document
# Convert the document to a different format and save to stream.
dstStream = ByteArrayOutputStream()
doc.save(dstStream, SaveFormat.RTF)
output = FileOutputStream(dataDir + "Document Out.rtf")
output.write(dstStream.toByteArray())
output.close()
print "Document loaded from stream and then saved successfully."
if __name__ == '__main__':
LoadAndSaveToStream()
|
asposewords/Aspose_Words_Java
|
Plugins/Aspose_Words_Java_for_Jython/asposewords/quickstart/LoadAndSaveToStream.py
|
Python
|
mit
| 1,194
|