repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
kerr-huang/SL4A
|
refs/heads/master
|
python/src/Demo/zlib/zlibdemo.py
|
43
|
#!/usr/bin/env python
# Takes an optional filename, defaulting to this file itself.
# Reads the file and compresses the content using level 1 and level 9
# compression, printing a summary of the results.
import zlib, sys
def main():
if len(sys.argv) > 1:
filename = sys.argv[1]
else:
filename = sys.argv[0]
print 'Reading', filename
f = open(filename, 'rb') # Get the data to compress
s = f.read()
f.close()
# First, we'll compress the string in one step
comptext = zlib.compress(s, 1)
decomp = zlib.decompress(comptext)
print '1-step compression: (level 1)'
print ' Original:', len(s), 'Compressed:', len(comptext),
print 'Uncompressed:', len(decomp)
# Now, let's compress the string in stages; set chunk to work in smaller steps
chunk = 256
compressor = zlib.compressobj(9)
decompressor = zlib.decompressobj()
comptext = decomp = ''
for i in range(0, len(s), chunk):
comptext = comptext+compressor.compress(s[i:i+chunk])
# Don't forget to call flush()!!
comptext = comptext + compressor.flush()
for i in range(0, len(comptext), chunk):
decomp = decomp + decompressor.decompress(comptext[i:i+chunk])
decomp=decomp+decompressor.flush()
print 'Progressive compression (level 9):'
print ' Original:', len(s), 'Compressed:', len(comptext),
print 'Uncompressed:', len(decomp)
if __name__ == '__main__':
main()
|
kaffeel/oppia
|
refs/heads/develop
|
extensions/interactions/CodeRepl/CodeRepl.py
|
4
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from extensions.interactions import base
class CodeRepl(base.BaseInteraction):
"""Interaction that allows programs to be input."""
name = 'Code Editor'
description = 'Allows learners to enter code and get it evaluated.'
display_mode = base.DISPLAY_MODE_SUPPLEMENTAL
is_trainable = True
_dependency_ids = ['jsrepl', 'codemirror']
answer_type = 'CodeEvaluation'
# Language options 'lua' and 'scheme' have been removed for possible
# later re-release.
_customization_arg_specs = [{
'name': 'language',
'description': 'Programming language',
'schema': {
'type': 'unicode',
'choices': [
'coffeescript', 'javascript', 'python', 'ruby',
]
},
'default_value': 'python'
}, {
'name': 'placeholder',
'description': 'Initial code displayed',
'schema': {
'type': 'unicode',
'ui_config': {
'coding_mode': 'none',
},
},
'default_value': '# Type your code here.'
}, {
'name': 'preCode',
'description': 'Code to prepend to the learner\'s submission',
'schema': {
'type': 'unicode',
'ui_config': {
'coding_mode': 'none',
},
},
'default_value': ''
}, {
'name': 'postCode',
'description': 'Code to append after the learner\'s submission',
'schema': {
'type': 'unicode',
'ui_config': {
'coding_mode': 'none',
},
},
'default_value': ''
}]
|
carlvlewis/bokeh
|
refs/heads/master
|
bokeh/charts/builder/tests/test_bar_builder.py
|
33
|
""" This is the Bokeh charts testing interface.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2014, Continuum Analytics, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import
from collections import OrderedDict
import unittest
import numpy as np
import pandas as pd
from bokeh.charts import Bar
from bokeh.charts.builder.tests._utils import create_chart
from bokeh.models import Range1d, FactorRange
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
class TestBar(unittest.TestCase):
def test_supported_input(self):
xyvalues = OrderedDict()
xyvalues['python'] = [2, 5]
xyvalues['pypy'] = [12, 40]
xyvalues['jython'] = [22, 30]
for i, _xy in enumerate([xyvalues,
dict(xyvalues),
pd.DataFrame(xyvalues)]):
bar = create_chart(Bar, _xy)
builder = bar._builders[0]
np.testing.assert_array_equal(builder._data['pypy'], np.array(xyvalues['pypy']))
np.testing.assert_array_equal(builder._data['python'], np.array(xyvalues['python']))
np.testing.assert_array_equal(builder._data['jython'], np.array(xyvalues['jython']))
# test mid values, that should always be y/2 ..
np.testing.assert_array_equal(builder._data['midpython'], np.array([1, 2.5]))
np.testing.assert_array_equal(builder._data['midpypy'], np.array([6, 20]))
np.testing.assert_array_equal(builder._data['midjython'], np.array([11, 15]))
# stacked values should be 0 as base and + y/2 of the column
# skipping plain dict case as stacked values randomly fails due to
# dictionary unordered nature
if i != 1:
np.testing.assert_array_equal(builder._data['stackedpython'], np.array([1, 2.5]))
np.testing.assert_array_equal(builder._data['stackedpypy'], np.array([8, 25]))
np.testing.assert_array_equal(builder._data['stackedjython'], np.array([25, 60]))
np.testing.assert_array_equal(builder._data['cat'], np.array(['0', '1']))
np.testing.assert_array_equal(builder._data['width'], np.array([0.8, 0.8]))
np.testing.assert_array_equal(builder._data['width_cat'], np.array([0.2, 0.2]))
lvalues = [[2, 5], [12, 40], [22, 30]]
for i, _xy in enumerate([lvalues, np.array(lvalues)]):
bar = create_chart(Bar, _xy)
builder = bar._builders[0]
np.testing.assert_array_equal(builder._data['0'], np.array(lvalues[0]))
np.testing.assert_array_equal(builder._data['1'], np.array(lvalues[1]))
np.testing.assert_array_equal(builder._data['2'], np.array(lvalues[2]))
# test mid values, that should always be y/2 ..
np.testing.assert_array_equal(builder._data['mid0'], np.array([1, 2.5]))
np.testing.assert_array_equal(builder._data['mid1'], np.array([6, 20]))
np.testing.assert_array_equal(builder._data['mid2'], np.array([11, 15]))
# stacked values should be 0 as base and + y/2 of the column
np.testing.assert_array_equal(builder._data['stacked0'], np.array([1, 2.5]))
np.testing.assert_array_equal(builder._data['stacked1'], np.array([8, 25]))
np.testing.assert_array_equal(builder._data['stacked2'], np.array([25, 60]))
np.testing.assert_array_equal(builder._data['cat'], np.array(['0', '1']))
np.testing.assert_array_equal(builder._data['width'], np.array([0.8, 0.8]))
np.testing.assert_array_equal(builder._data['width_cat'], np.array([0.2, 0.2]))
def test_all_positive_input(self):
source = OrderedDict()
source['percent change 1'] = [1, 13]
source['percent change 2'] = [12, 40]
bar_chart = create_chart(Bar, source)
self.assertEqual(bar_chart._builders[0].y_range.start, 0)
self.assertEqual(bar_chart._builders[0].y_range.end, 40 * 1.1)
def test_all_negative_input(self):
source = OrderedDict()
source['percent change 1'] = [-1, -13]
source['percent change 2'] = [-12, -40]
bar_chart = create_chart(Bar, source)
# We want the start to be negative, so that data points downwards
self.assertEqual(bar_chart._builders[0].y_range.start, -40 * 1.1)
self.assertEqual(bar_chart._builders[0].y_range.end, 0)
def test_mixed_sign_input(self):
source = OrderedDict()
source['percent change 1'] = [-1, -13]
source['percent change 2'] = [12, 40]
bar_chart = create_chart(Bar, source)
self.assertEqual(bar_chart._builders[0].y_range.start, -13 * 1.1)
self.assertEqual(bar_chart._builders[0].y_range.end, 40 * 1.1)
def test_set_custom_continuous_range(self):
# Users can specify their own y_range for cases where the
# default guess is not what's desired.
source = OrderedDict()
source['percent change 1'] = [25, -13]
source['percent change 2'] = [-12, -40]
custom_y_range = Range1d(50, -50)
bar_chart = create_chart(Bar, source, continuous_range=custom_y_range)
self.assertEqual(bar_chart._builders[0].y_range, custom_y_range)
def test_invalid_continuous_range_raises_error(self):
source = OrderedDict({'p': [0, 1]})
bad_y_range = range(0, 50) # Not a Range object
with self.assertRaises(ValueError):
create_chart(Bar, source, continuous_range=bad_y_range)
def test_non_range1d_continuous_range_raises_value_error(self):
source = OrderedDict({'p': [0, 1]})
non_1d_range = FactorRange(factors=['a', 'b'])
with self.assertRaises(ValueError):
create_chart(Bar, source, continuous_range=non_1d_range)
|
eusi/MissionPlanerHM
|
refs/heads/master
|
Lib/site-packages/numpy/matrixlib/tests/test_multiarray.py
|
54
|
import numpy as np
from numpy.testing import *
class TestView(TestCase):
def test_type(self):
x = np.array([1,2,3])
assert(isinstance(x.view(np.matrix),np.matrix))
def test_keywords(self):
x = np.array([(1,2)],dtype=[('a',np.int8),('b',np.int8)])
# We must be specific about the endianness here:
y = x.view(dtype='<i2', type=np.matrix)
assert_array_equal(y,[[513]])
assert(isinstance(y,np.matrix))
assert_equal(y.dtype, np.dtype('<i2'))
if __name__ == "__main__":
run_module_suite()
|
andmos/ansible
|
refs/heads/devel
|
test/units/modules/network/dellos9/test_dellos9_facts.py
|
56
|
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from units.compat.mock import patch
from units.modules.utils import set_module_args
from .dellos9_module import TestDellos9Module, load_fixture
from ansible.modules.network.dellos9 import dellos9_facts
class TestDellos9Facts(TestDellos9Module):
module = dellos9_facts
def setUp(self):
super(TestDellos9Facts, self).setUp()
self.mock_run_command = patch(
'ansible.modules.network.dellos9.dellos9_facts.run_commands')
self.run_command = self.mock_run_command.start()
def tearDown(self):
super(TestDellos9Facts, self).tearDown()
self.mock_run_command.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
obj = json.loads(item)
command = obj['command']
except ValueError:
command = item
if '|' in command:
command = str(command).replace('|', '')
filename = str(command).replace(' ', '_')
filename = filename.replace('/', '7')
output.append(load_fixture(filename))
return output
self.run_command.side_effect = load_from_file
def test_dellos9_facts_gather_subset_default(self):
set_module_args(dict())
result = self.execute_module()
ansible_facts = result['ansible_facts']
self.assertIn('hardware', ansible_facts['ansible_net_gather_subset'])
self.assertIn('default', ansible_facts['ansible_net_gather_subset'])
self.assertIn('interfaces', ansible_facts['ansible_net_gather_subset'])
self.assertEquals('dellos9_sw1', ansible_facts['ansible_net_hostname'])
self.assertIn('fortyGigE 0/24', ansible_facts['ansible_net_interfaces'].keys())
self.assertEquals(3128820, ansible_facts['ansible_net_memtotal_mb'])
self.assertEquals(3125722, ansible_facts['ansible_net_memfree_mb'])
def test_dellos9_facts_gather_subset_config(self):
set_module_args({'gather_subset': 'config'})
result = self.execute_module()
ansible_facts = result['ansible_facts']
self.assertIn('default', ansible_facts['ansible_net_gather_subset'])
self.assertIn('config', ansible_facts['ansible_net_gather_subset'])
self.assertEquals('dellos9_sw1', ansible_facts['ansible_net_hostname'])
self.assertIn('ansible_net_config', ansible_facts)
def test_dellos9_facts_gather_subset_hardware(self):
set_module_args({'gather_subset': 'hardware'})
result = self.execute_module()
ansible_facts = result['ansible_facts']
self.assertIn('default', ansible_facts['ansible_net_gather_subset'])
self.assertIn('hardware', ansible_facts['ansible_net_gather_subset'])
self.assertEquals(['flash', 'fcmfs', 'nfsmount', 'ftp', 'tftp', 'scp', 'http', 'https'], ansible_facts['ansible_net_filesystems'])
self.assertEquals(3128820, ansible_facts['ansible_net_memtotal_mb'])
self.assertEquals(3125722, ansible_facts['ansible_net_memfree_mb'])
def test_dellos9_facts_gather_subset_interfaces(self):
set_module_args({'gather_subset': 'interfaces'})
result = self.execute_module()
ansible_facts = result['ansible_facts']
self.assertIn('default', ansible_facts['ansible_net_gather_subset'])
self.assertIn('interfaces', ansible_facts['ansible_net_gather_subset'])
self.assertIn('fortyGigE 0/24', ansible_facts['ansible_net_interfaces'].keys())
self.assertEquals(['Ma 0/0'], list(ansible_facts['ansible_net_neighbors'].keys()))
self.assertIn('ansible_net_interfaces', ansible_facts)
|
CiscoSystems/tempest
|
refs/heads/master
|
tempest/api/image/__init__.py
|
12133432
| |
ChainsAutomation/chains
|
refs/heads/master
|
lib/chains/services/phidgetskit/__init__.py
|
1
|
from __future__ import absolute_import
import time
from Phidgets.PhidgetException import *
from Phidgets.Events.Events import *
from Phidgets.Services.InterfaceKit import *
from chains.service import Service
from chains.common import log
def bint(arg):
if type(arg) == type(True):
if arg: return 1
else: return 0
elif type(arg) == type('') or type(arg) == type(u''):
if arg == '1' or arg == 'True': return 1
else: return 0
elif type(arg) == type(1):
if arg > 0: return 1
else: return 0
else:
log.info('Unknown bint arg converted to 0: %s (%s)' % (arg, type(arg)))
return 0
class PhidgetsKitService(Service):
pmethods = [
'getServiceLabel', 'getServiceName', 'getServiceType',
'getServiceVersion', 'getInputCount', 'getInputState',
'getLibraryVersion' , 'getOutputCount', 'getOutputState',
'getRatiometric', 'getSensorChangeTrigger', 'getSensorCount',
'getSensorRawValue', 'getSensorValue', 'getSerialNum',
'isAttached', 'setSensorChangeTrigger',
'setRadiometric',
]
# ==== Implementation of Service base ====
def onInit(self):
self.dev = InterfaceKit()
self.setEventHandlers()
def setThresholds(self):
for k in [0,1,2,3,4,5,6,7]:
val = None
try:
val = self.config.get('trigger%s' % k)
except:
pass
if not val:
continue
self.dev.setSensorChangeTrigger(k, int(val))
def onStart(self):
try:
serial = self.config.get('serial')
if serial:
serial = int(serial)
except KeyError:
pass
if not serial:
serial = -1
try:
log.info("Open Phidget with serial: %s" % serial)
self.dev.openPhidget(serial)
# self.dev.openPhidget()
log.info("Waiting for Phidget to be attached...")
self.dev.waitForAttach(100000)
self.phidgetsId = self.dev.getSerialNum()
log.info("Phidget with serial: %s attached" % self.phidgetsId)
self.diginput = self.dev.getInputCount()
log.info("Phidget %s has %s digital inputs" % (self.phidgetsId, self.diginput))
self.digoutput = self.dev.getOutputCount()
log.info("Phidget %s has %s digital outputs" % (self.phidgetsId, self.digoutput))
self.analog = self.dev.getSensorCount()
log.info("Phidget %s has %s analog inputs" % (self.phidgetsId, self.analog))
self.ifname = self.dev.getServiceName()
log.info("Phidget %s has name: %s" % (self.phidgetsId, self.ifname))
self.setThresholds()
# make sure dev is closed again if error
except PhidgetException as e:
self.close()
self.phidgetsId = None
# but still let the exception continue down the stack
# (and set code|message that are not seen with tostring for PE)
raise Exception("PhidgetsException, code: %s, msg: %s" % (e.code, e.message))
def onShutdown(self):
#time.sleep(0.1) # hack for not responding when setting outputs just before close
self.dev.closePhidget()
def runAction(self, cmd, args):
if cmd == 'setOutput':
# arg 1 is f.ex. o2, we only want 2, as an int, not a string
args[0] = int(args[0][1:])
# arg 2 is 0 or 1, as int. but be forgiving (bint)
if len(args) < 2:
args.append(0)
args[1] = bint(args[1])
#log.info("setOutput ARGS: %s" % (args,))
res = self.dev.setOutputState(args[0], args[1])
log.debug("ifkit.setOutputState: %s, %s = %s" % (args[0], args[1], res))
else:
if cmd in self.pmethods:
args2 = []
for a in args:
try: a = int(a)
except: pass
args2.append(a)
fun = getattr(self.dev, cmd)
if fun: return fun(*args2)
raise Exception('Unknown command: %s' % cmd)
def isOpen(self):
return self.dev.isAttached()
def onDescribe(self):
allinputs = ['i0','i1','i2','i3','i4','i5','i6','i7','i8','i9','i10','i11','i12','i13','i14','i5']
allsensors = ['s0','s1','s2','s3','s4','s5','s6','s7','s8','s9','s10','s11','s12','s13','s14','s15']
alloutputs = ['o0','o1','o2','o3','o4','o5','o6','o7','o8','o9','o10','o11','o12','o13','o14','o15']
inputs = allinputs[:self.diginput]
sensors = allsensors[:self.analog]
outputs = alloutputs[:self.digoutput]
events = []
if sensors:
sensorevents = ('sensorChange', ('key','str',sensors,'Sensor port'), ('value','int') )
events.append(sensorevents)
if inputs:
inputevents = ('inputChange', ('key','str',inputs,'Input port'), ('value','bool') )
events.append(inputevents)
desc = {
'info': self.ifname,
#'info': self.dev.getServiceLabel() + ' : ' + self.getServiceType() + ' : ' + self.getServiceName(),
'commands': [
('setOutput', [('key','str',outputs,'Output port'), ('value','bool')], 'Set an input on/off')
],
'events': events,
'serial': self.phidgetsId,
'analog_sensors': self.analog,
'digital_inputs': self.diginput,
'digital_outputs': self.digoutput
}
for f in self.pmethods:
args = [] # todo
desc['commands'].append((f, args, f))
return desc
# ==== Event handlers for PhidgetsEvents ====
def onSensorChange(self, e):
self._onEvent('sensor', e)
return 0
def onInputChange(self, e):
self._onEvent('input', e)
return 0
def onOutputChange(self, e):
self._onEvent('output', e)
return 0
def onAttach(self, e):
log.debug("InterfaceKit attached: %i" % (e.service.getSerialNum()))
self.sendEvent('status', {'value': 'attached'})
self.setThresholds()
return 0
def onDetach(self, e):
log.debug("InterfaceKit attached: %i" % (e.service.getSerialNum()))
self.sendEvent('status', {'value': 'detached'})
return 0
def onError(self, e):
log.error("Phidget Error %s: %s" % (e.eCode, e.description))
return 0
def _onEvent(self, type, e):
pre = ''
if type == 'sensor':
pre = 's'
elif type == 'input':
pre = 'i'
elif type == 'output':
pre = 'o'
else:
raise Exception('Unknown type: %s' % type)
key = '%s%s' % (pre, e.index)
event = {}
# event = {
# 'key': '%s%s' % (pre,e.index)
# #'service': self.config['id'],
# }
if type == 'sensor':
event['value'] = e.value
sensorType = self.config.get('type%s' % e.index)
if sensorType:
event['value'] = self.parseValue(event['value'], sensorType)
event['type'] = sensorType
else:
event['value'] = e.state
self.sendEvent(key, event)
# ==== Helper functions =====
def setEventHandlers(self):
self.dev.setOnAttachHandler(self.onAttach)
self.dev.setOnDetachHandler(self.onDetach)
self.dev.setOnErrorhandler(self.onError)
self.dev.setOnInputChangeHandler(self.onInputChange)
self.dev.setOnOutputChangeHandler(self.onOutputChange)
self.dev.setOnSensorChangeHandler(self.onSensorChange)
def parseValue(self, value, type):
if value is None:
return None
# Temperature - Celcius
if type == 'temperature':
return ( (float(value)/1000) * 222.22 ) - 61.111
# Humidity - Relative humidity percent (RH%)
elif type == 'humidity':
return ( (float(value)/1000) * 190.6 ) - 40.2
# Magnet - Gauss
elif type == 'magnet':
return 500 - value
# DC Current (DC amps)
elif type == 'amp_dc':
return ( (float(value) / 1000) * 50 ) - 25
# AC Current (RMS amps)
elif type == 'amp_ac':
return (float(value) / 1000) * 27.75
# Sonar distance (cm)
elif type == 'sonar':
return float(value) * 1.296
elif type == 'ir5mm':
val = 0
if int(value) > 0:
val = 1
return val
|
t4ls/ShinyServer
|
refs/heads/master
|
ext/node/lib/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/sun_tool.py
|
314
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""These functions are executed via gyp-sun-tool when using the Makefile
generator."""
import fcntl
import os
import struct
import subprocess
import sys
def main(args):
executor = SunTool()
executor.Dispatch(args)
class SunTool(object):
"""This class performs all the SunOS tooling steps. The methods can either be
executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
# Note that the stock python on SunOS has a bug
# where fcntl.flock(fd, LOCK_EX) always fails
# with EBADF, that's why we use this F_SETLK
# hack instead.
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
fcntl.fcntl(fd, fcntl.F_SETLK, op)
return subprocess.call(cmd_list)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
pepsi7959/ProtocolBuffer-c
|
refs/heads/master
|
gtest/test/gtest_filter_unittest.py
|
2826
|
#!/usr/bin/env python
#
# Copyright 2005 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test test filters.
A user can specify which test(s) in a Google Test program to run via either
the GTEST_FILTER environment variable or the --gtest_filter flag.
This script tests such functionality by invoking
gtest_filter_unittest_ (a program written with Google Test) with different
environments and command line flags.
Note that test sharding may also influence which tests are filtered. Therefore,
we test that here also.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sets
import sys
import gtest_test_utils
# Constants.
# Checks if this platform can pass empty environment variables to child
# processes. We set an env variable to an empty string and invoke a python
# script in a subprocess to print whether the variable is STILL in
# os.environ. We then use 'eval' to parse the child's output so that an
# exception is thrown if the input is anything other than 'True' nor 'False'.
os.environ['EMPTY_VAR'] = ''
child = gtest_test_utils.Subprocess(
[sys.executable, '-c', 'import os; print \'EMPTY_VAR\' in os.environ'])
CAN_PASS_EMPTY_ENV = eval(child.output)
# Check if this platform can unset environment variables in child processes.
# We set an env variable to a non-empty string, unset it, and invoke
# a python script in a subprocess to print whether the variable
# is NO LONGER in os.environ.
# We use 'eval' to parse the child's output so that an exception
# is thrown if the input is neither 'True' nor 'False'.
os.environ['UNSET_VAR'] = 'X'
del os.environ['UNSET_VAR']
child = gtest_test_utils.Subprocess(
[sys.executable, '-c', 'import os; print \'UNSET_VAR\' not in os.environ'])
CAN_UNSET_ENV = eval(child.output)
# Checks if we should test with an empty filter. This doesn't
# make sense on platforms that cannot pass empty env variables (Win32)
# and on platforms that cannot unset variables (since we cannot tell
# the difference between "" and NULL -- Borland and Solaris < 5.10)
CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV)
# The environment variable for specifying the test filters.
FILTER_ENV_VAR = 'GTEST_FILTER'
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'
# The command line flag for specifying the test filters.
FILTER_FLAG = 'gtest_filter'
# The command line flag for including disabled tests.
ALSO_RUN_DISABED_TESTS_FLAG = 'gtest_also_run_disabled_tests'
# Command to run the gtest_filter_unittest_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_filter_unittest_')
# Regex for determining whether parameterized tests are enabled in the binary.
PARAM_TEST_REGEX = re.compile(r'/ParamTest')
# Regex for parsing test case names from Google Test's output.
TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)')
# Regex for parsing test names from Google Test's output.
TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)')
# The command line flag to tell Google Test to output the list of tests it
# will run.
LIST_TESTS_FLAG = '--gtest_list_tests'
# Indicates whether Google Test supports death tests.
SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(
[COMMAND, LIST_TESTS_FLAG]).output
# Full names of all tests in gtest_filter_unittests_.
PARAM_TESTS = [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestX/1',
'SeqQ/ParamTest.TestY/0',
'SeqQ/ParamTest.TestY/1',
]
DISABLED_TESTS = [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
]
if SUPPORTS_DEATH_TESTS:
DEATH_TESTS = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
]
else:
DEATH_TESTS = []
# All the non-disabled tests.
ACTIVE_TESTS = [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
] + DEATH_TESTS + PARAM_TESTS
param_tests_present = None
# Utilities.
environ = os.environ.copy()
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def RunAndReturnOutput(args = None):
"""Runs the test program and returns its output."""
return gtest_test_utils.Subprocess([COMMAND] + (args or []),
env=environ).output
def RunAndExtractTestList(args = None):
"""Runs the test program and returns its exit code and a list of tests run."""
p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ)
tests_run = []
test_case = ''
test = ''
for line in p.output.split('\n'):
match = TEST_CASE_REGEX.match(line)
if match is not None:
test_case = match.group(1)
else:
match = TEST_REGEX.match(line)
if match is not None:
test = match.group(1)
tests_run.append(test_case + '.' + test)
return (tests_run, p.exit_code)
def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
"""Runs the given function and arguments in a modified environment."""
try:
original_env = environ.copy()
environ.update(extra_env)
return function(*args, **kwargs)
finally:
environ.clear()
environ.update(original_env)
def RunWithSharding(total_shards, shard_index, command):
"""Runs a test program shard and returns exit code and a list of tests run."""
extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),
TOTAL_SHARDS_ENV_VAR: str(total_shards)}
return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)
# The unit test.
class GTestFilterUnitTest(gtest_test_utils.TestCase):
"""Tests the env variable or the command line flag to filter tests."""
# Utilities.
def AssertSetEqual(self, lhs, rhs):
"""Asserts that two sets are equal."""
for elem in lhs:
self.assert_(elem in rhs, '%s in %s' % (elem, rhs))
for elem in rhs:
self.assert_(elem in lhs, '%s in %s' % (elem, lhs))
def AssertPartitionIsValid(self, set_var, list_of_sets):
"""Asserts that list_of_sets is a valid partition of set_var."""
full_partition = []
for slice_var in list_of_sets:
full_partition.extend(slice_var)
self.assertEqual(len(set_var), len(full_partition))
self.assertEqual(sets.Set(set_var), sets.Set(full_partition))
def AdjustForParameterizedTests(self, tests_to_run):
"""Adjust tests_to_run in case value parameterized tests are disabled."""
global param_tests_present
if not param_tests_present:
return list(sets.Set(tests_to_run) - sets.Set(PARAM_TESTS))
else:
return tests_to_run
def RunAndVerify(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for a given filter."""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# First, tests using the environment variable.
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
# pylint: disable-msg=C6403
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
tests_run = RunAndExtractTestList()[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, tests_to_run)
# pylint: enable-msg=C6403
# Next, tests using the command line flag.
if gtest_filter is None:
args = []
else:
args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)]
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,
args=None, check_exit_0=False):
"""Checks that binary runs correct tests for the given filter and shard.
Runs all shards of gtest_filter_unittest_ with the given filter, and
verifies that the right set of tests were run. The union of tests run
on each shard should be identical to tests_to_run, without duplicates.
Args:
gtest_filter: A filter to apply to the tests.
total_shards: A total number of shards to split test run into.
tests_to_run: A set of tests expected to run.
args : Arguments to pass to the to the test binary.
check_exit_0: When set to a true value, make sure that all shards
return 0.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
# pylint: disable-msg=C6403
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
partition = []
for i in range(0, total_shards):
(tests_run, exit_code) = RunWithSharding(total_shards, i, args)
if check_exit_0:
self.assertEqual(0, exit_code)
partition.append(tests_run)
self.AssertPartitionIsValid(tests_to_run, partition)
SetEnvVar(FILTER_ENV_VAR, None)
# pylint: enable-msg=C6403
def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for the given filter.
Runs gtest_filter_unittest_ with the given filter, and enables
disabled tests. Verifies that the right set of tests were run.
Args:
gtest_filter: A filter to apply to the tests.
tests_to_run: A set of tests expected to run.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Construct the command line.
args = ['--%s' % ALSO_RUN_DISABED_TESTS_FLAG]
if gtest_filter is not None:
args.append('--%s=%s' % (FILTER_FLAG, gtest_filter))
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def setUp(self):
"""Sets up test case.
Determines whether value-parameterized tests are enabled in the binary and
sets the flags accordingly.
"""
global param_tests_present
if param_tests_present is None:
param_tests_present = PARAM_TEST_REGEX.search(
RunAndReturnOutput()) is not None
def testDefaultBehavior(self):
"""Tests the behavior of not specifying the filter."""
self.RunAndVerify(None, ACTIVE_TESTS)
def testDefaultBehaviorWithShards(self):
"""Tests the behavior without the filter, with sharding enabled."""
self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS)
def testEmptyFilter(self):
"""Tests an empty filter."""
self.RunAndVerify('', [])
self.RunAndVerifyWithSharding('', 1, [])
self.RunAndVerifyWithSharding('', 2, [])
def testBadFilter(self):
"""Tests a filter that matches nothing."""
self.RunAndVerify('BadFilter', [])
self.RunAndVerifyAllowingDisabled('BadFilter', [])
def testFullName(self):
"""Tests filtering by full name."""
self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz'])
def testUniversalFilters(self):
"""Tests filters that match everything."""
self.RunAndVerify('*', ACTIVE_TESTS)
self.RunAndVerify('*.*', ACTIVE_TESTS)
self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS)
self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS)
self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS)
def testFilterByTestCase(self):
"""Tests filtering by test case name."""
self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz'])
BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB']
self.RunAndVerify('BazTest.*', BAZ_TESTS)
self.RunAndVerifyAllowingDisabled('BazTest.*',
BAZ_TESTS + ['BazTest.DISABLED_TestC'])
def testFilterByTest(self):
"""Tests filtering by test name."""
self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne'])
def testFilterDisabledTests(self):
"""Select only the disabled tests to run."""
self.RunAndVerify('DISABLED_FoobarTest.Test1', [])
self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1',
['DISABLED_FoobarTest.Test1'])
self.RunAndVerify('*DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS)
self.RunAndVerify('*.DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.DISABLED_Test2',
])
self.RunAndVerify('DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('DISABLED_*', [
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
])
def testWildcardInTestCaseName(self):
"""Tests using wildcard in the test case name."""
self.RunAndVerify('*a*.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS)
def testWildcardInTestName(self):
"""Tests using wildcard in the test name."""
self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testFilterWithoutDot(self):
"""Tests a filter that has no '.' in it."""
self.RunAndVerify('*z*', [
'FooTest.Xyz',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
])
def testTwoPatterns(self):
"""Tests filters that consist of two patterns."""
self.RunAndVerify('Foo*.*:*A*', [
'FooTest.Abc',
'FooTest.Xyz',
'BazTest.TestA',
])
# An empty pattern + a non-empty one
self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testThreePatterns(self):
"""Tests filters that consist of three patterns."""
self.RunAndVerify('*oo*:*A*:*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
'BazTest.TestA',
])
# The 2nd pattern is empty.
self.RunAndVerify('*oo*::*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
])
# The last 2 patterns are empty.
self.RunAndVerify('*oo*::', [
'FooTest.Abc',
'FooTest.Xyz',
])
def testNegativeFilters(self):
self.RunAndVerify('*-BazTest.TestOne', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestA',
'BazTest.TestB',
] + DEATH_TESTS + PARAM_TESTS)
self.RunAndVerify('*-FooTest.Abc:BazTest.*', [
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
] + DEATH_TESTS + PARAM_TESTS)
self.RunAndVerify('BarTest.*-BarTest.TestOne', [
'BarTest.TestTwo',
'BarTest.TestThree',
])
# Tests without leading '*'.
self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
] + DEATH_TESTS + PARAM_TESTS)
# Value parameterized tests.
self.RunAndVerify('*/*', PARAM_TESTS)
# Value parameterized tests filtering by the sequence name.
self.RunAndVerify('SeqP/*', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
])
# Value parameterized tests filtering by the test name.
self.RunAndVerify('*/0', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestY/0',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestY/0',
])
def testFlagOverridesEnvVar(self):
"""Tests that the filter flag overrides the filtering env. variable."""
SetEnvVar(FILTER_ENV_VAR, 'Foo*')
args = ['--%s=%s' % (FILTER_FLAG, '*One')]
tests_run = RunAndExtractTestList(args)[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne'])
def testShardStatusFileIsCreated(self):
"""Tests that the shard file is created if specified in the environment."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)
finally:
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
def testShardStatusFileIsCreatedWithListTests(self):
"""Tests that the shard file is created with the "list_tests" flag."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file2')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
output = InvokeWithModifiedEnv(extra_env,
RunAndReturnOutput,
[LIST_TESTS_FLAG])
finally:
# This assertion ensures that Google Test enumerated the tests as
# opposed to running them.
self.assert_('[==========]' not in output,
'Unexpected output during test enumeration.\n'
'Please ensure that LIST_TESTS_FLAG is assigned the\n'
'correct flag value for listing Google Test tests.')
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
if SUPPORTS_DEATH_TESTS:
def testShardingWorksWithDeathTests(self):
"""Tests integration with death tests and sharding."""
gtest_filter = 'HasDeathTest.*:SeqP/*'
expected_tests = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
]
for flag in ['--gtest_death_test_style=threadsafe',
'--gtest_death_test_style=fast']:
self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,
check_exit_0=True, args=[flag])
self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,
check_exit_0=True, args=[flag])
if __name__ == '__main__':
gtest_test_utils.Main()
|
BaconPancakes/valor
|
refs/heads/master
|
lib/pip/_vendor/requests/packages/chardet/euctwprober.py
|
2993
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCTWDistributionAnalysis
from .mbcssm import EUCTWSMModel
class EUCTWProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(EUCTWSMModel)
self._mDistributionAnalyzer = EUCTWDistributionAnalysis()
self.reset()
def get_charset_name(self):
return "EUC-TW"
|
srajag/nova
|
refs/heads/master
|
nova/tests/virt/vmwareapi/test_imagecache.py
|
1
|
# Copyright (c) 2014 VMware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import datetime
import mock
from oslo.config import cfg
from nova.openstack.common import timeutils
from nova import test
from nova.tests.virt.vmwareapi import fake
from nova.virt.vmwareapi import ds_util
from nova.virt.vmwareapi import imagecache
from nova.virt.vmwareapi import vim_util
from nova.virt.vmwareapi import vmops
CONF = cfg.CONF
class ImageCacheManagerTestCase(test.NoDBTestCase):
def setUp(self):
super(ImageCacheManagerTestCase, self).setUp()
self._session = mock.Mock(name='session')
self._imagecache = imagecache.ImageCacheManager(self._session,
'fake-base-folder')
self._time = datetime.datetime(2012, 11, 22, 12, 00, 00)
self._file_name = 'ts-2012-11-22-12-00-00'
fake.reset()
def tearDown(self):
super(ImageCacheManagerTestCase, self).tearDown()
fake.reset()
def test_timestamp_cleanup(self):
def fake_get_timestamp(ds_browser, ds_path):
self.assertEqual('fake-ds-browser', ds_browser)
self.assertEqual('[fake-ds] fake-path', str(ds_path))
if not self.exists:
return
ts = '%s%s' % (imagecache.TIMESTAMP_PREFIX,
timeutils.strtime(at=self._time,
fmt=imagecache.TIMESTAMP_FORMAT))
return ts
with contextlib.nested(
mock.patch.object(self._imagecache, '_get_timestamp',
fake_get_timestamp),
mock.patch.object(ds_util, 'file_delete')
) as (_get_timestamp, _file_delete):
self.exists = False
self._imagecache.timestamp_cleanup(
'fake-dc-ref', 'fake-ds-browser',
ds_util.DatastorePath('fake-ds', 'fake-path'))
self.assertEqual(0, _file_delete.call_count)
self.exists = True
self._imagecache.timestamp_cleanup(
'fake-dc-ref', 'fake-ds-browser',
ds_util.DatastorePath('fake-ds', 'fake-path'))
expected_ds_path = ds_util.DatastorePath(
'fake-ds', 'fake-path', self._file_name)
_file_delete.assert_called_once_with(self._session,
expected_ds_path, 'fake-dc-ref')
def test_get_timestamp(self):
def fake_get_sub_folders(session, ds_browser, ds_path):
self.assertEqual('fake-ds-browser', ds_browser)
self.assertEqual('[fake-ds] fake-path', str(ds_path))
if self.exists:
files = set()
files.add(self._file_name)
return files
with contextlib.nested(
mock.patch.object(ds_util, 'get_sub_folders',
fake_get_sub_folders)
):
self.exists = True
ts = self._imagecache._get_timestamp(
'fake-ds-browser',
ds_util.DatastorePath('fake-ds', 'fake-path'))
self.assertEqual(self._file_name, ts)
self.exists = False
ts = self._imagecache._get_timestamp(
'fake-ds-browser',
ds_util.DatastorePath('fake-ds', 'fake-path'))
self.assertIsNone(ts)
def test_get_timestamp_filename(self):
timeutils.set_time_override(override_time=self._time)
fn = self._imagecache._get_timestamp_filename()
self.assertEqual(self._file_name, fn)
def test_get_datetime_from_filename(self):
t = self._imagecache._get_datetime_from_filename(self._file_name)
self.assertEqual(self._time, t)
def test_get_ds_browser(self):
cache = self._imagecache._ds_browser
ds_browser = mock.Mock()
moref = fake.ManagedObjectReference('datastore-100')
self.assertIsNone(cache.get(moref.value))
mock_get_method = mock.Mock(return_value=ds_browser)
with mock.patch.object(vim_util, 'get_dynamic_property',
mock_get_method):
ret = self._imagecache._get_ds_browser(moref)
mock_get_method.assert_called_once_with(mock.ANY, moref,
'Datastore', 'browser')
self.assertIs(ds_browser, ret)
self.assertIs(ds_browser, cache.get(moref.value))
def test_list_base_images(self):
def fake_get_dynamic_property(vim, mobj, type, property_name):
return 'fake-ds-browser'
def fake_get_sub_folders(session, ds_browser, ds_path):
files = set()
files.add('image-ref-uuid')
return files
with contextlib.nested(
mock.patch.object(vim_util, 'get_dynamic_property',
fake_get_dynamic_property),
mock.patch.object(ds_util, 'get_sub_folders',
fake_get_sub_folders)
) as (_get_dynamic, _get_sub_folders):
fake_ds_ref = fake.ManagedObjectReference('fake-ds-ref')
datastore = ds_util.Datastore(name='ds', ref=fake_ds_ref)
ds_path = datastore.build_path('base_folder')
images = self._imagecache._list_datastore_images(
ds_path, datastore)
originals = set()
originals.add('image-ref-uuid')
self.assertEqual({'originals': originals,
'unexplained_images': []},
images)
def test_age_cached_images(self):
def fake_get_ds_browser(ds_ref):
return 'fake-ds-browser'
def fake_get_timestamp(ds_browser, ds_path):
self._get_timestamp_called += 1
path = str(ds_path)
if path == '[fake-ds] fake-path/fake-image-1':
# No time stamp exists
return
if path == '[fake-ds] fake-path/fake-image-2':
# Timestamp that will be valid => no deletion
return 'ts-2012-11-22-10-00-00'
if path == '[fake-ds] fake-path/fake-image-3':
# Timestamp that will be invalid => deletion
return 'ts-2012-11-20-12-00-00'
self.fail()
def fake_mkdir(session, ts_path, dc_ref):
self.assertEqual(
'[fake-ds] fake-path/fake-image-1/ts-2012-11-22-12-00-00',
str(ts_path))
def fake_file_delete(session, ds_path, dc_ref):
self.assertEqual('[fake-ds] fake-path/fake-image-3', str(ds_path))
def fake_timestamp_cleanup(dc_ref, ds_browser, ds_path):
self.assertEqual('[fake-ds] fake-path/fake-image-4', str(ds_path))
with contextlib.nested(
mock.patch.object(self._imagecache, '_get_ds_browser',
fake_get_ds_browser),
mock.patch.object(self._imagecache, '_get_timestamp',
fake_get_timestamp),
mock.patch.object(ds_util, 'mkdir',
fake_mkdir),
mock.patch.object(ds_util, 'file_delete',
fake_file_delete),
mock.patch.object(self._imagecache, 'timestamp_cleanup',
fake_timestamp_cleanup),
) as (_get_ds_browser, _get_timestamp, _mkdir, _file_delete,
_timestamp_cleanup):
timeutils.set_time_override(override_time=self._time)
datastore = ds_util.Datastore(name='ds', ref='fake-ds-ref')
dc_info = vmops.DcInfo(ref='dc_ref', name='name',
vmFolder='vmFolder')
self._get_timestamp_called = 0
self._imagecache.originals = set(['fake-image-1', 'fake-image-2',
'fake-image-3', 'fake-image-4'])
self._imagecache.used_images = set(['fake-image-4'])
self._imagecache._age_cached_images(
'fake-context', datastore, dc_info,
ds_util.DatastorePath('fake-ds', 'fake-path'))
self.assertEqual(3, self._get_timestamp_called)
def test_update(self):
def fake_list_datastore_images(ds_path, datastore):
return {'unexplained_images': [],
'originals': self.images}
def fake_age_cached_images(context, datastore,
dc_info, ds_path):
self.assertEqual('[ds] fake-base-folder', str(ds_path))
self.assertEqual(self.images,
self._imagecache.used_images)
self.assertEqual(self.images,
self._imagecache.originals)
with contextlib.nested(
mock.patch.object(self._imagecache, '_list_datastore_images',
fake_list_datastore_images),
mock.patch.object(self._imagecache,
'_age_cached_images',
fake_age_cached_images)
) as (_list_base, _age_and_verify):
instances = [{'image_ref': '1',
'host': CONF.host,
'name': 'inst-1',
'uuid': '123',
'vm_state': '',
'task_state': ''},
{'image_ref': '2',
'host': CONF.host,
'name': 'inst-2',
'uuid': '456',
'vm_state': '',
'task_state': ''}]
self.images = set(['1', '2'])
datastore = ds_util.Datastore(name='ds', ref='fake-ds-ref')
dc_info = vmops.DcInfo(ref='dc_ref', name='name',
vmFolder='vmFolder')
datastores_info = [(datastore, dc_info)]
self._imagecache.update('context', instances, datastores_info)
|
KunihikoKido/sublime-elasticsearch-client
|
refs/heads/master
|
commands/multiple_percolate.py
|
1
|
from .base import BaseCommand
class MultiplePercolateCommand(BaseCommand):
command_name = "elasticsearch:multiple-percolate"
def run_request(self):
options = dict(
index=self.settings.index,
doc_type=self.settings.doc_type,
body=self.get_text()
)
return self.client.mpercolate(**options)
|
carlmw/oscar-wager
|
refs/heads/master
|
django/contrib/localflavor/nl/nl_provinces.py
|
528
|
from django.utils.translation import ugettext_lazy as _
PROVINCE_CHOICES = (
('DR', _('Drenthe')),
('FL', _('Flevoland')),
('FR', _('Friesland')),
('GL', _('Gelderland')),
('GR', _('Groningen')),
('LB', _('Limburg')),
('NB', _('Noord-Brabant')),
('NH', _('Noord-Holland')),
('OV', _('Overijssel')),
('UT', _('Utrecht')),
('ZE', _('Zeeland')),
('ZH', _('Zuid-Holland')),
)
|
scrypter/azure-quickstart-templates
|
refs/heads/master
|
scrapy-on-ubuntu/myspider.py
|
255
|
from scrapy import Spider, Item, Field
class Post(Item):
title = Field()
class BlogSpider(Spider):
name, start_urls = 'blogspider', ['http://blog.scrapinghub.com']
def parse(self, response):
return [Post(title=e.extract()) for e in response.css("h2 a::text")]
|
youprofit/scikit-image
|
refs/heads/master
|
skimage/novice/_novice.py
|
19
|
import os
import imghdr
from collections import namedtuple
import numpy as np
from .. import io, img_as_ubyte
from ..transform import resize
from ..color import color_dict
from ..io.util import file_or_url_context, is_url
import six
from six.moves.urllib import request
urlopen = request.urlopen
# Convert colors from `skimage.color` to uint8 and allow access through
# dict or a named tuple.
color_dict = dict((name, tuple(int(255 * c + 0.5) for c in rgb))
for name, rgb in six.iteritems(color_dict))
colors = namedtuple('colors', color_dict.keys())(**color_dict)
def open(path):
"""Return Picture object from the given image path."""
return Picture(path)
def _verify_picture_index(index):
"""Raise error if picture index is not a 2D index/slice."""
if not (isinstance(index, tuple) and len(index) == 2):
raise IndexError("Expected 2D index but got {0!r}".format(index))
if all(isinstance(i, int) for i in index):
return index
# In case we need to fix the array index, convert tuple to list.
index = list(index)
for i, dim_slice in enumerate(index):
# If either index is a slice, ensure index object returns 2D array.
if isinstance(dim_slice, int):
index[i] = dim_slice = slice(dim_slice, dim_slice + 1)
return tuple(index)
def rgb_transpose(array):
"""Return RGB array with first 2 axes transposed."""
return np.transpose(array, (1, 0, 2))
def array_to_xy_origin(image):
"""Return view of image transformed from array to Cartesian origin."""
return rgb_transpose(image[::-1])
def xy_to_array_origin(image):
"""Return view of image transformed from Cartesian to array origin."""
return rgb_transpose(image[:, ::-1])
class Pixel(object):
"""A single pixel in a Picture.
Attributes
----------
pic : Picture
The Picture object that this pixel references.
array : array_like
Byte array with raw image data (RGB).
x : int
Horizontal coordinate of this pixel (left = 0).
y : int
Vertical coordinate of this pixel (bottom = 0).
rgb : tuple
RGB tuple with red, green, and blue components (0-255)
alpha : int
Transparency component (0-255), 255 (opaque) by default
"""
def __init__(self, pic, array, x, y, rgb, alpha=255):
self._picture = pic
self._x = x
self._y = y
self._red = self._validate(rgb[0])
self._green = self._validate(rgb[1])
self._blue = self._validate(rgb[2])
self._alpha = self._validate(alpha)
@property
def x(self):
"""Horizontal location of this pixel in the parent image(left = 0)."""
return self._x
@property
def y(self):
"""Vertical location of this pixel in the parent image (bottom = 0)."""
return self._y
@property
def red(self):
"""The red component of the pixel (0-255)."""
return self._red
@red.setter
def red(self, value):
self._red = self._validate(value)
self._setpixel()
@property
def green(self):
"""The green component of the pixel (0-255)."""
return self._green
@green.setter
def green(self, value):
self._green = self._validate(value)
self._setpixel()
@property
def blue(self):
"""The blue component of the pixel (0-255)."""
return self._blue
@blue.setter
def blue(self, value):
self._blue = self._validate(value)
self._setpixel()
@property
def alpha(self):
"""The transparency component of the pixel (0-255)."""
return self._alpha
@alpha.setter
def alpha(self, value):
self._alpha = self._validate(value)
self._setpixel()
@property
def rgb(self):
"""The RGB color components of the pixel (3 values 0-255)."""
return (self.red, self.green, self.blue)
@rgb.setter
def rgb(self, value):
if len(value) == 4:
self.rgba = value
else:
self._red, self._green, self._blue \
= (self._validate(v) for v in value)
self._alpha = 255
self._setpixel()
@property
def rgba(self):
"""The RGB color and transparency components of the pixel
(4 values 0-255).
"""
return (self.red, self.green, self.blue, self.alpha)
@rgba.setter
def rgba(self, value):
self._red, self._green, self._blue, self._alpha \
= (self._validate(v) for v in value)
self._setpixel()
def _validate(self, value):
"""Verifies that the pixel value is in [0, 255]."""
try:
value = int(value)
if (value < 0) or (value > 255):
raise ValueError()
except ValueError:
msg = "Expected an integer between 0 and 255, but got {0} instead!"
raise ValueError(msg.format(value))
return value
def _setpixel(self):
# RGB + alpha
self._picture.xy_array[self._x, self._y] = self.rgba
self._picture._array_modified()
def __eq__(self, other):
if isinstance(other, Pixel):
return self.rgba == other.rgba
def __repr__(self):
args = self.red, self.green, self.blue, self.alpha
return "Pixel(red={0}, green={1}, blue={2}, alpha={3})".format(*args)
class Picture(object):
"""A 2-D picture made up of pixels.
Attributes
----------
path : str
Path to an image file to load / URL of an image
array : array
Raw RGB or RGBA image data [0-255], with origin at top-left.
xy_array : array
Raw RGB or RGBA image data [0-255], with origin at bottom-left.
Examples
--------
Load an image from a file:
>>> from skimage import novice
>>> from skimage import data
>>> picture = novice.open(data.data_dir + '/chelsea.png')
Load an image from a URL (the URL must start with ``http(s)://`` or
``ftp(s)://``):
>>> picture = novice.open('http://scikit-image.org/_static/img/logo.png')
Create a blank 100 pixel wide, 200 pixel tall white image:
>>> pic = Picture.from_size((100, 200), color=(255, 255, 255))
Use numpy to make an RGB byte array (shape is height x width x 3):
>>> import numpy as np
>>> data = np.zeros(shape=(200, 100, 3), dtype=np.uint8)
>>> data[:, :, 0] = 255 # Set red component to maximum
>>> pic = Picture(array=data)
Get the bottom-left pixel:
>>> pic[0, 0]
Pixel(red=255, green=0, blue=0, alpha=255)
Get the top row of the picture:
>>> pic[:, pic.height-1]
Picture(100 x 1)
Set the bottom-left pixel to black:
>>> pic[0, 0] = (0, 0, 0)
Set the top row to red:
>>> pic[:, pic.height-1] = (255, 0, 0)
"""
def __init__(self, path=None, array=None, xy_array=None):
self._modified = False
self._path = None
self._format = None
n_args = len([a for a in [path, array, xy_array] if a is not None])
if n_args != 1:
msg = "Must provide a single keyword arg (path, array, xy_array)."
ValueError(msg)
elif path is not None:
if not is_url(path):
path = os.path.abspath(path)
self._path = path
with file_or_url_context(path) as context:
self.array = img_as_ubyte(io.imread(context))
self._format = imghdr.what(context)
elif array is not None:
self.array = array
elif xy_array is not None:
self.xy_array = xy_array
# Force RGBA internally (use max alpha)
if self.array.shape[-1] == 3:
self.array = np.insert(self.array, 3, values=255, axis=2)
@staticmethod
def from_size(size, color='black'):
"""Return a Picture of the specified size and a uniform color.
Parameters
----------
size : tuple
Width and height of the picture in pixels.
color : tuple or str
RGB or RGBA tuple with the fill color for the picture [0-255] or
a valid key in `color_dict`.
"""
if isinstance(color, six.string_types):
color = color_dict[color]
rgb_size = tuple(size) + (len(color),)
color = np.array(color, dtype=np.uint8)
array = np.ones(rgb_size, dtype=np.uint8) * color
# Force RGBA internally (use max alpha)
if array.shape[-1] == 3:
array = np.insert(array, 3, values=255, axis=2)
return Picture(array=array)
@property
def array(self):
"""Image data stored as numpy array."""
return self._array
@array.setter
def array(self, array):
self._array = array
self._xy_array = array_to_xy_origin(array)
@property
def xy_array(self):
"""Image data stored as numpy array with origin at the bottom-left."""
return self._xy_array
@xy_array.setter
def xy_array(self, array):
self._xy_array = array
self._array = xy_to_array_origin(array)
def save(self, path):
"""Saves the picture to the given path.
Parameters
----------
path : str
Path (with file extension) where the picture is saved.
"""
io.imsave(path, self.array)
self._modified = False
self._path = os.path.abspath(path)
self._format = imghdr.what(path)
@property
def path(self):
"""The path to the picture."""
return self._path
@property
def modified(self):
"""True if the picture has changed."""
return self._modified
def _array_modified(self):
self._modified = True
self._path = None
@property
def format(self):
"""The image format of the picture."""
return self._format
@property
def size(self):
"""The size (width, height) of the picture."""
return self.xy_array.shape[:2]
@size.setter
def size(self, value):
# Don't resize if no change in size
if (value[0] != self.width) or (value[1] != self.height):
# skimage dimensions are flipped: y, x
new_size = (int(value[1]), int(value[0]))
new_array = resize(self.array, new_size, order=0,
preserve_range=True)
self.array = new_array.astype(np.uint8)
self._array_modified()
@property
def width(self):
"""The width of the picture."""
return self.size[0]
@width.setter
def width(self, value):
self.size = (value, self.height)
@property
def height(self):
"""The height of the picture."""
return self.size[1]
@height.setter
def height(self, value):
self.size = (self.width, value)
def show(self):
"""Display the image."""
io.imshow(self.array)
io.show()
def _makepixel(self, x, y):
"""Create a Pixel object for a given x, y location."""
rgb = self.xy_array[x, y]
return Pixel(self, self.array, x, y, rgb)
def _get_channel(self, channel):
"""Return a specific dimension out of the raw image data slice."""
return self._array[:, :, channel]
def _set_channel(self, channel, value):
"""Set a specific dimension in the raw image data slice."""
self._array[:, :, channel] = value
@property
def red(self):
"""The red component of the pixel (0-255)."""
return self._get_channel(0).ravel()
@red.setter
def red(self, value):
self._set_channel(0, value)
@property
def green(self):
"""The green component of the pixel (0-255)."""
return self._get_channel(1).ravel()
@green.setter
def green(self, value):
self._set_channel(1, value)
@property
def blue(self):
"""The blue component of the pixel (0-255)."""
return self._get_channel(2).ravel()
@blue.setter
def blue(self, value):
self._set_channel(2, value)
@property
def alpha(self):
"""The transparency component of the pixel (0-255)."""
return self._get_channel(3).ravel()
@alpha.setter
def alpha(self, value):
self._set_channel(3, value)
@property
def rgb(self):
"""The RGB color components of the pixel (3 values 0-255)."""
return self.xy_array[:, :, :3]
@rgb.setter
def rgb(self, value):
self.xy_array[:, :, :3] = value
@property
def rgba(self):
"""The RGBA color components of the pixel (4 values 0-255)."""
return self.xy_array
@rgba.setter
def rgba(self, value):
self.xy_array[:] = value
def __iter__(self):
"""Iterates over all pixels in the image."""
for x in range(self.width):
for y in range(self.height):
yield self._makepixel(x, y)
def __getitem__(self, xy_index):
"""Return `Picture`s for slices and `Pixel`s for indexes."""
xy_index = _verify_picture_index(xy_index)
if all(isinstance(index, int) for index in xy_index):
return self._makepixel(*xy_index)
else:
return Picture(xy_array=self.xy_array[xy_index])
def __setitem__(self, xy_index, value):
xy_index = _verify_picture_index(xy_index)
if isinstance(value, tuple):
self[xy_index].rgb = value
elif isinstance(value, Picture):
self.xy_array[xy_index] = value.xy_array
else:
raise TypeError("Invalid value type")
self._array_modified()
def __eq__(self, other):
if not isinstance(other, Picture):
raise NotImplementedError()
return np.all(self.array == other.array)
def __repr__(self):
return "Picture({0} x {1})".format(*self.size)
def _repr_png_(self):
return self._repr_image_format('png')
def _repr_jpeg_(self):
return self._repr_image_format('jpeg')
def _repr_image_format(self, format_str):
str_buffer = six.BytesIO()
io.imsave(str_buffer, self.array, format_str=format_str)
return_str = str_buffer.getvalue()
str_buffer.close()
return return_str
if __name__ == '__main__':
import doctest
doctest.testmod()
|
adlius/osf.io
|
refs/heads/develop
|
api_tests/requests/views/__init__.py
|
12133432
| |
nkoech/csacompendium
|
refs/heads/master
|
csacompendium/research/api/nitrogenapplied/__init__.py
|
12133432
| |
balloob/home-assistant
|
refs/heads/dev
|
tests/components/cloudflare/conftest.py
|
5
|
"""Define fixtures available for all tests."""
from pytest import fixture
from . import _get_mock_cfupdate
from tests.async_mock import patch
@fixture
def cfupdate(hass):
"""Mock the CloudflareUpdater for easier testing."""
mock_cfupdate = _get_mock_cfupdate()
with patch(
"homeassistant.components.cloudflare.CloudflareUpdater",
return_value=mock_cfupdate,
) as mock_api:
yield mock_api
@fixture
def cfupdate_flow(hass):
"""Mock the CloudflareUpdater for easier config flow testing."""
mock_cfupdate = _get_mock_cfupdate()
with patch(
"homeassistant.components.cloudflare.config_flow.CloudflareUpdater",
return_value=mock_cfupdate,
) as mock_api:
yield mock_api
|
aericson/Djax
|
refs/heads/master
|
example/triggermap.py
|
2
|
"""
Example of trigger integration for Djax.
"""
from djax.triggers import trigger, affinity_trigger
from example.models import Article
triggers = (
trigger(r'^$','pageview','home'),
affnity_trigger(r'^article/(?P<article_id>\d+)/$','affinity','article',pk='article_id',model=Article),
trigger(r'^topic/(?P<topic_slug>[\w-]+)/$','affinity','rtag',tag='$topic_slug'),
)
|
SimeonFritz/aima-python
|
refs/heads/master
|
submissions/Conklin/myCSPs.py
|
18
|
import csp
rgb = ['R', 'G', 'B']
# domains = {
# 'AM': rgb,
# 'ES': rgb,
# 'LK': rgb,
# 'RB': rgb,
# 'FL': rgb,
# 'G': rgb,
# 'S': rgb,
# 'M': rgb,
# 'BL': rgb,
# 'C': rgb,
# 'H': rgb
# }
domains = {
'Dragonblight': rgb,
'Borean Tundra': rgb,
'Scholozar Basin': rgb,
'Wintergrasp': rgb,
'Crystalsong': rgb,
'Icecrown': rgb,
'Storm Peaks': rgb,
'Zul Drak': rgb,
'Grizzly Hills': rgb,
'Howling Fjord': rgb
}
variables = domains.keys()
# neighbors = {
# 'AM': ['LK', 'ES'],
# 'ES': ['BL', 'M'],
# 'LK': ['RB', 'FL', 'AM'],
# 'RB': ['LK', 'FL', 'H'],
# 'FL': ['G', 'LK', 'RB'],
# 'G': ['FL', 'S'],
# 'S': ['G', 'M'],
# 'M': ['ES', 'BL', 'S'],
# 'BL': ['ES', 'C', 'M'],
# 'C': ['BL', 'H'],
# 'H': ['C', 'RB']
# }
neighbors = {
'Borean Tundra': {'Scholozar Basin', 'Wintergrasp', 'Dragonblight'},
'Dragonblight': {'Wintergrasp', 'Icecrown', 'Crystalsong', 'Zul Drak', 'Grizzly Hills'},
'Wintergrasp': {'Borean Tundra', 'Scholozar Basin', 'Icecrown', 'Dragonblight'},
'Scholozar Basin': {'Borean Tundra', 'Icecrown', 'Wintergrasp'},
'Crystalsong': {'Icecrown', 'Dragonblight', 'Storm Peaks', 'Zul Drak'},
'Icecrown': {'Scholozar Basin', 'Wintergrasp', 'Dragonblight', 'Crystalsong', 'Storm Peaks'},
'Storm Peaks': {'Icecrown', 'Crystalsong', 'Zul Drak'},
'Zul Drak': {'Storm Peaks', 'Crystalsong', 'Dragonblight', 'Grizzly Hills'},
'Grizzly Hills': {'Zul Drak', 'Dragonblight', 'Howling Fjord'},
'Howling Fjord': {'Grizzly Hills'}
}
def constraints(A, a, B, b):
if A == B: # e.g. NSW == NSW
return True
if a == b: # e.g. WA = G and SA = G
return False
return True
myNorthrend = csp.CSP(variables, domains, neighbors, constraints)
myCSPs = [
{'csp': myNorthrend,
# 'select_unassigned_variable':csp.mrv,
}
]
|
dnozay/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.3/django/conf/locale/sk/formats.py
|
232
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'G:i:s'
DATETIME_FORMAT = 'j. F Y G:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y G:i:s'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' '
NUMBER_GROUPING = 3
|
petercable/mi-dataset
|
refs/heads/master
|
mi/dataset/parser/flort_dj_cspp.py
|
3
|
#!/usr/bin/env python
"""
@package mi.dataset.parser
@file marine-integrations/mi/dataset/parser/flort_dj_cspp.py
@author Jeremy Amundson
@brief Parser for the flort_dj_cspp dataset driver
Release notes:
initial release
"""
__author__ = 'Jeremy Amundson'
__license__ = 'Apache 2.0'
import numpy
from mi.core.log import get_logger
log = get_logger()
from mi.core.common import BaseEnum
import re
from mi.core.instrument.data_particle import DataParticle
from mi.dataset.parser.common_regexes import INT_REGEX, FLOAT_REGEX, MULTIPLE_TAB_REGEX, END_OF_LINE_REGEX
from mi.dataset.parser.cspp_base import \
CsppParser, \
CsppMetadataDataParticle, \
MetadataRawDataKey, \
Y_OR_N_REGEX, encode_y_or_n
# A regex to match a date in MM/DD/YY format
FORMATTED_DATE_REGEX = r'\d{2}/\d{2}/\d{2}'
# A regex to match a time stamp in HH:MM:SS format
TIME_REGEX = r'\d{2}:\d{2}:\d{2}'
# A regular expression that should match a flort_dj data record
DATA_REGEX = '(' + FLOAT_REGEX + ')' + MULTIPLE_TAB_REGEX # Profiler Timestamp
DATA_REGEX += '(' + FLOAT_REGEX + ')' + MULTIPLE_TAB_REGEX # Depth
DATA_REGEX += '(' + Y_OR_N_REGEX + ')' + MULTIPLE_TAB_REGEX # suspect timestamp
DATA_REGEX += '(' + FORMATTED_DATE_REGEX + ')' + MULTIPLE_TAB_REGEX # date string
DATA_REGEX += '(' + TIME_REGEX + ')' + MULTIPLE_TAB_REGEX # time string
DATA_REGEX += '(' + INT_REGEX + ')' + MULTIPLE_TAB_REGEX # measurement_wavelength_beta
DATA_REGEX += '(' + INT_REGEX + ')' + MULTIPLE_TAB_REGEX # raw_signal_beta
DATA_REGEX += '(' + INT_REGEX + ')' + MULTIPLE_TAB_REGEX # measurement_wavelength_chl
DATA_REGEX += '(' + INT_REGEX + ')' + MULTIPLE_TAB_REGEX # raw_signal_chl
DATA_REGEX += '(' + INT_REGEX + ')' + MULTIPLE_TAB_REGEX # measurement_wavelength_cdom
DATA_REGEX += '(' + INT_REGEX + ')' + MULTIPLE_TAB_REGEX # raw_signal_cdom
DATA_REGEX += '(' + INT_REGEX + ')' + END_OF_LINE_REGEX # raw_internal_temp
IGNORE_REGEX = FLOAT_REGEX + MULTIPLE_TAB_REGEX # Profiler Timestamp
IGNORE_REGEX += FLOAT_REGEX + MULTIPLE_TAB_REGEX # Depth
IGNORE_REGEX += Y_OR_N_REGEX + MULTIPLE_TAB_REGEX # Suspect Timestamp
IGNORE_REGEX += r'[^\t]*' + END_OF_LINE_REGEX # any text after the Suspect
IGNORE_MATCHER = re.compile(IGNORE_REGEX)
class DataMatchesGroupNumber(BaseEnum):
"""
An enum for group match indices for a data record only chunk.
"""
PROFILER_TIMESTAMP = 1
PRESSURE = 2
SUSPECT_TIMESTAMP = 3
DATE = 4
TIME = 5
BETA = 6
RAW_BETA = 7
CHLOROPHYLL = 8
RAW_CHLOROPHYLL = 9
CDOM = 10
RAW_CDOM = 11
TEMP = 12
class DataParticleType(BaseEnum):
"""
The data particle types that a flort_dj_cspp parser could generate
"""
METADATA_RECOVERED = 'flort_dj_cspp_metadata_recovered'
INSTRUMENT_RECOVERED = 'flort_dj_cspp_instrument_recovered'
METADATA_TELEMETERED = 'flort_dj_cspp_metadata'
INSTRUMENT_TELEMETERED = 'flort_dj_cspp_instrument'
class FlortDjCsppParserDataParticleKey(BaseEnum):
"""
The data particle keys associated with flort_dj_cspp data particle parameters
"""
PROFILER_TIMESTAMP = 'profiler_timestamp'
PRESSURE = 'pressure_depth'
SUSPECT_TIMESTAMP = 'suspect_timestamp'
DATE = 'date_string'
TIME = 'time_string'
BETA = 'measurement_wavelength_beta'
RAW_BETA = 'raw_signal_beta'
CHLOROPHYLL = 'measurement_wavelength_chl'
RAW_CHLOROPHYLL = 'raw_signal_chl'
CDOM = 'measurement_wavelength_cdom'
RAW_CDOM = 'raw_signal_cdom'
TEMP = 'raw_internal_temp'
# A group of instrument data particle encoding rules used to simplify encoding using a loop
INSTRUMENT_PARTICLE_ENCODING_RULES = [
(FlortDjCsppParserDataParticleKey.PROFILER_TIMESTAMP, DataMatchesGroupNumber.PROFILER_TIMESTAMP, numpy.float),
(FlortDjCsppParserDataParticleKey.PRESSURE, DataMatchesGroupNumber.PRESSURE, float),
(FlortDjCsppParserDataParticleKey.SUSPECT_TIMESTAMP, DataMatchesGroupNumber.SUSPECT_TIMESTAMP, encode_y_or_n),
(FlortDjCsppParserDataParticleKey.DATE, DataMatchesGroupNumber.DATE, str),
(FlortDjCsppParserDataParticleKey.TIME, DataMatchesGroupNumber.TIME, str),
(FlortDjCsppParserDataParticleKey.BETA, DataMatchesGroupNumber.BETA, int),
(FlortDjCsppParserDataParticleKey.RAW_BETA, DataMatchesGroupNumber.RAW_BETA, int),
(FlortDjCsppParserDataParticleKey.CHLOROPHYLL, DataMatchesGroupNumber.CHLOROPHYLL, int),
(FlortDjCsppParserDataParticleKey.RAW_CHLOROPHYLL, DataMatchesGroupNumber.RAW_CHLOROPHYLL, int),
(FlortDjCsppParserDataParticleKey.CDOM, DataMatchesGroupNumber.CDOM, int),
(FlortDjCsppParserDataParticleKey.RAW_CDOM, DataMatchesGroupNumber.RAW_CDOM, int),
(FlortDjCsppParserDataParticleKey.TEMP, DataMatchesGroupNumber.RAW_CDOM, int)
]
class FlortDjCsppMetadataDataParticle(CsppMetadataDataParticle):
"""
Class for building a flort_dj_cspp metadata particle
"""
def _build_parsed_values(self):
"""
Take something in the data format and turn it into
an array of dictionaries defining the data in the particle
with the appropriate tag.
@throws SampleException If there is a problem with sample creation
"""
results = []
# Append the base metadata parsed values to the results to return
results += self._build_metadata_parsed_values()
data_match = self.raw_data[MetadataRawDataKey.DATA_MATCH]
internal_timestamp_unix = numpy.float(data_match.group(
DataMatchesGroupNumber.PROFILER_TIMESTAMP))
self.set_internal_timestamp(unix_time=internal_timestamp_unix)
return results
class FlortDjCsppMetadataRecoveredDataParticle(FlortDjCsppMetadataDataParticle):
"""
Class for building a flort_dj_cspp recovered metadata particle
"""
_data_particle_type = DataParticleType.METADATA_RECOVERED
class FlortDjCsppMetadataTelemeteredDataParticle(FlortDjCsppMetadataDataParticle):
"""
Class for building a flort_dj_cspp telemetered metadata particle
"""
_data_particle_type = DataParticleType.METADATA_TELEMETERED
class FlortDjCsppInstrumentDataParticle(DataParticle):
"""
Class for building a flort_dj_cspp instrument data particle
"""
def _build_parsed_values(self):
"""
Take something in the data format and turn it into
an array of dictionaries defining the data in the particle
with the appropriate tag.
@throws SampleException If there is a problem with sample creation
"""
results = []
# Process each of the instrument particle parameters
for (name, index, encoding) in INSTRUMENT_PARTICLE_ENCODING_RULES:
results.append(self._encode_value(name, self.raw_data.group(index), encoding))
# # Set the internal timestamp
internal_timestamp_unix = numpy.float(self.raw_data.group(
DataMatchesGroupNumber.PROFILER_TIMESTAMP))
self.set_internal_timestamp(unix_time=internal_timestamp_unix)
return results
class FlortDjCsppInstrumentRecoveredDataParticle(FlortDjCsppInstrumentDataParticle):
"""
Class for building a flort_dj_cspp recovered instrument data particle
"""
_data_particle_type = DataParticleType.INSTRUMENT_RECOVERED
class FlortDjCsppInstrumentTelemeteredDataParticle(FlortDjCsppInstrumentDataParticle):
"""
Class for building a flort_dj_cspp telemetered instrument data particle
"""
_data_particle_type = DataParticleType.INSTRUMENT_TELEMETERED
class FlortDjCsppParser(CsppParser):
def __init__(self,
config,
stream_handle,
exception_callback):
"""
This method is a constructor that will instantiate an FlortDjCsppParser object.
@param config The configuration for this FlortDjCsppParser parser
@param stream_handle The handle to the data stream containing the flort_dj_cspp data
@param exception_callback The function to call to report exceptions
"""
# Call the superclass constructor
super(FlortDjCsppParser, self).__init__(config,
stream_handle,
exception_callback,
DATA_REGEX,
ignore_matcher=IGNORE_MATCHER)
|
SimVascular/VTK
|
refs/heads/master
|
Accelerators/Piston/Testing/Python/TestSlice.py
|
21
|
#!/usr/bin/env python
"""
This tests VTK's use of Piston's slice operator.
"""
import sys
import vtk
from vtk.test import Testing
from PistonTestCommon import *
def widgetCallBack(obj,event):
global plane, filter
obj.GetPlane(plane)
filter.Update() #TODO: Why is this necessary?
class TestSlice(Testing.vtkTest):
def testSlice(self):
global plane, filter, args
writefiles = "SaveData" in args
renderer = vtk.vtkRenderer()
renwin = vtk.vtkRenderWindow()
renwin.AddRenderer(renderer)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renwin)
renwin.Render()
if "GPURender" in args:
vtk.vtkPistonMapper.InitCUDAGL(renwin)
src = vtk.vtkImageMandelbrotSource()
src.SetWholeExtent(0,20,0,20,0,20)
#scale and bias until piston understands origin and spacing
src.Update()
inputdata = src.GetOutput()
if "Normalize" in args:
testdata1 = inputdata.NewInstance()
testdata1.ShallowCopy(inputdata)
testdata1.SetSpacing(1,1,1)
testdata1.SetOrigin(0,0,0)
inputdata = testdata1
bounds = inputdata.GetBounds()
center = [(bounds[1]-bounds[0])/2+bounds[0],
(bounds[3]-bounds[2])/2+bounds[2],
(bounds[5]-bounds[4])/2+bounds[4]]
d2p = vtk.vtkDataSetToPiston()
d2p.SetInputData(inputdata)
#d2p.SetInputConnection(src.GetOutputPort())
plane = vtk.vtkPlane()
plane.SetOrigin(center)
plane.SetNormal(0,0,1)
filter = vtk.vtkPistonSlice()
filter.SetInputConnection(d2p.GetOutputPort())
filter.SetClippingPlane(plane)
filter.SetOffset(0.0)
p2d = vtk.vtkPistonToDataSet()
p2d.SetOutputDataSetType(vtk.VTK_POLY_DATA)
p2d.SetInputConnection(filter.GetOutputPort())
if writefiles:
writeFile(p2d, "piston_slice.vtk")
mapper = vtk.vtkPistonMapper()
mapper.SetInputConnection(filter.GetOutputPort())
mapper.Update() #TODO why is this necessary
actor = vtk.vtkActor()
actor.SetMapper(mapper)
renderer.AddActor(actor)
widget = vtk.vtkImplicitPlaneWidget()
widget.PlaceWidget(bounds)
widget.SetOrigin([plane.GetOrigin()[x] for x in 0,1,2])
widget.SetNormal([plane.GetNormal()[x] for x in 0,1,2])
widget.SetInteractor(iren)
widget.AddObserver("InteractionEvent", widgetCallBack)
widget.SetEnabled(1)
widget.DrawPlaneOff()
renderer.ResetCamera()
renwin.Render()
img_file = "TestSlice.png"
Testing.compareImage(renwin, Testing.getAbsImagePath(img_file))
if Testing.isInteractive():
widget.DrawPlaneOn()
iren.Start()
if __name__ == "__main__":
global args
args = parseArgs()
Testing.main([(TestSlice, 'test')])
|
ZhaoCJ/django
|
refs/heads/master
|
tests/sites_framework/models.py
|
115
|
from django.contrib.sites.managers import CurrentSiteManager
from django.contrib.sites.models import Site
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AbstractArticle(models.Model):
title = models.CharField(max_length=50)
objects = models.Manager()
on_site = CurrentSiteManager()
class Meta:
abstract = True
def __str__(self):
return self.title
class SyndicatedArticle(AbstractArticle):
sites = models.ManyToManyField(Site)
class ExclusiveArticle(AbstractArticle):
site = models.ForeignKey(Site)
class CustomArticle(AbstractArticle):
places_this_article_should_appear = models.ForeignKey(Site)
objects = models.Manager()
on_site = CurrentSiteManager("places_this_article_should_appear")
class InvalidArticle(AbstractArticle):
site = models.ForeignKey(Site)
objects = models.Manager()
on_site = CurrentSiteManager("places_this_article_should_appear")
class ConfusedArticle(AbstractArticle):
site = models.IntegerField()
|
kanagasabapathi/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/logging/config.py
|
45
|
# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
Copyright (C) 2001-2010 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, logging, logging.handlers, socket, struct, os, traceback, re
import types, io
try:
import _thread as thread
import threading
except ImportError:
thread = None
from socketserver import ThreadingTCPServer, StreamRequestHandler
DEFAULT_LOGGING_CONFIG_PORT = 9030
if sys.platform == "win32":
RESET_ERROR = 10054 #WSAECONNRESET
else:
RESET_ERROR = 104 #ECONNRESET
#
# The following code implements a socket listener for on-the-fly
# reconfiguration of logging.
#
# _listener holds the server object doing the listening
_listener = None
def fileConfig(fname, defaults=None, disable_existing_loggers=True):
"""
Read the logging configuration from a ConfigParser-format file.
This can be called several times from an application, allowing an end user
the ability to select from various pre-canned configurations (if the
developer provides a mechanism to present the choices and load the chosen
configuration).
"""
import configparser
cp = configparser.ConfigParser(defaults)
if hasattr(fname, 'readline'):
cp.read_file(fname)
else:
cp.read(fname)
formatters = _create_formatters(cp)
# critical section
logging._acquireLock()
try:
logging._handlers.clear()
del logging._handlerList[:]
# Handlers add themselves to logging._handlers
handlers = _install_handlers(cp, formatters)
_install_loggers(cp, handlers, disable_existing_loggers)
finally:
logging._releaseLock()
def _resolve(name):
"""Resolve a dotted name to a global object."""
name = name.split('.')
used = name.pop(0)
found = __import__(used)
for n in name:
used = used + '.' + n
try:
found = getattr(found, n)
except AttributeError:
__import__(used)
found = getattr(found, n)
return found
def _strip_spaces(alist):
return map(lambda x: x.strip(), alist)
def _encoded(s):
return s if isinstance(s, str) else s.encode('utf-8')
def _create_formatters(cp):
"""Create and return formatters"""
flist = cp["formatters"]["keys"]
if not len(flist):
return {}
flist = flist.split(",")
flist = _strip_spaces(flist)
formatters = {}
for form in flist:
sectname = "formatter_%s" % form
fs = cp.get(sectname, "format", raw=True, fallback=None)
dfs = cp.get(sectname, "datefmt", raw=True, fallback=None)
c = logging.Formatter
class_name = cp[sectname].get("class")
if class_name:
c = _resolve(class_name)
f = c(fs, dfs)
formatters[form] = f
return formatters
def _install_handlers(cp, formatters):
"""Install and return handlers"""
hlist = cp["handlers"]["keys"]
if not len(hlist):
return {}
hlist = hlist.split(",")
hlist = _strip_spaces(hlist)
handlers = {}
fixups = [] #for inter-handler references
for hand in hlist:
section = cp["handler_%s" % hand]
klass = section["class"]
fmt = section.get("formatter", "")
try:
klass = eval(klass, vars(logging))
except (AttributeError, NameError):
klass = _resolve(klass)
args = section["args"]
args = eval(args, vars(logging))
h = klass(*args)
if "level" in section:
level = section["level"]
h.setLevel(logging._levelNames[level])
if len(fmt):
h.setFormatter(formatters[fmt])
if issubclass(klass, logging.handlers.MemoryHandler):
target = section.get("target", "")
if len(target): #the target handler may not be loaded yet, so keep for later...
fixups.append((h, target))
handlers[hand] = h
#now all handlers are loaded, fixup inter-handler references...
for h, t in fixups:
h.setTarget(handlers[t])
return handlers
def _handle_existing_loggers(existing, child_loggers, disable_existing):
"""
When (re)configuring logging, handle loggers which were in the previous
configuration but are not in the new configuration. There's no point
deleting them as other threads may continue to hold references to them;
and by disabling them, you stop them doing any logging.
However, don't disable children of named loggers, as that's probably not
what was intended by the user. Also, allow existing loggers to NOT be
disabled if disable_existing is false.
"""
root = logging.root
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
logger.level = logging.NOTSET
logger.handlers = []
logger.propagate = True
elif disable_existing:
logger.disabled = True
def _install_loggers(cp, handlers, disable_existing):
"""Create and install loggers"""
# configure the root first
llist = cp["loggers"]["keys"]
llist = llist.split(",")
llist = list(map(lambda x: x.strip(), llist))
llist.remove("root")
section = cp["logger_root"]
root = logging.root
log = root
if "level" in section:
level = section["level"]
log.setLevel(logging._levelNames[level])
for h in root.handlers[:]:
root.removeHandler(h)
hlist = section["handlers"]
if len(hlist):
hlist = hlist.split(",")
hlist = _strip_spaces(hlist)
for hand in hlist:
log.addHandler(handlers[hand])
#and now the others...
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
existing = list(root.manager.loggerDict.keys())
#The list needs to be sorted so that we can
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
existing.sort(key=_encoded)
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
#now set up the new ones...
for log in llist:
section = cp["logger_%s" % log]
qn = section["qualname"]
propagate = section.getint("propagate", fallback=1)
logger = logging.getLogger(qn)
if qn in existing:
i = existing.index(qn) + 1 # start with the entry after qn
prefixed = qn + "."
pflen = len(prefixed)
num_existing = len(existing)
while i < num_existing:
if existing[i][:pflen] == prefixed:
child_loggers.append(existing[i])
i += 1
existing.remove(qn)
if "level" in section:
level = section["level"]
logger.setLevel(logging._levelNames[level])
for h in logger.handlers[:]:
logger.removeHandler(h)
logger.propagate = propagate
logger.disabled = 0
hlist = section["handlers"]
if len(hlist):
hlist = hlist.split(",")
hlist = _strip_spaces(hlist)
for hand in hlist:
logger.addHandler(handlers[hand])
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
#However, don't disable children of named loggers, as that's
#probably not what was intended by the user.
#for log in existing:
# logger = root.manager.loggerDict[log]
# if log in child_loggers:
# logger.level = logging.NOTSET
# logger.handlers = []
# logger.propagate = 1
# elif disable_existing_loggers:
# logger.disabled = 1
_handle_existing_loggers(existing, child_loggers, disable_existing)
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def get(self, key, default=None):
value = dict.get(self, key, default)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, key, default=None):
value = dict.pop(self, key, default)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class ConvertingList(list):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, idx=-1):
value = list.pop(self, idx)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
class ConvertingTuple(tuple):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = staticmethod(__import__)
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
#print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
#rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, str): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not hasattr(c, '__call__'):
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
configuration.
"""
def configure(self):
"""Do the configuration."""
config = self.config
if 'version' not in config:
raise ValueError("dictionary doesn't specify a version")
if config['version'] != 1:
raise ValueError("Unsupported version: %s" % config['version'])
incremental = config.pop('incremental', False)
EMPTY_DICT = {}
logging._acquireLock()
try:
if incremental:
handlers = config.get('handlers', EMPTY_DICT)
for name in handlers:
if name not in logging._handlers:
raise ValueError('No handler found with '
'name %r' % name)
else:
try:
handler = logging._handlers[name]
handler_config = handlers[name]
level = handler_config.get('level', None)
if level:
handler.setLevel(logging._checkLevel(level))
except Exception as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
try:
self.configure_logger(name, loggers[name], True)
except Exception as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
root = config.get('root', None)
if root:
try:
self.configure_root(root, True)
except Exception as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
else:
disable_existing = config.pop('disable_existing_loggers', True)
logging._handlers.clear()
del logging._handlerList[:]
# Do formatters first - they don't refer to anything else
formatters = config.get('formatters', EMPTY_DICT)
for name in formatters:
try:
formatters[name] = self.configure_formatter(
formatters[name])
except Exception as e:
raise ValueError('Unable to configure '
'formatter %r: %s' % (name, e))
# Next, do filters - they don't refer to anything else, either
filters = config.get('filters', EMPTY_DICT)
for name in filters:
try:
filters[name] = self.configure_filter(filters[name])
except Exception as e:
raise ValueError('Unable to configure '
'filter %r: %s' % (name, e))
# Next, do handlers - they refer to formatters and filters
# As handlers can refer to other handlers, sort the keys
# to allow a deterministic order of configuration
handlers = config.get('handlers', EMPTY_DICT)
for name in sorted(handlers):
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except Exception as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Next, do loggers - they refer to handlers and filters
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
root = logging.root
existing = list(root.manager.loggerDict.keys())
#The list needs to be sorted so that we can
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
existing.sort(key=_encoded)
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
#now set up the new ones...
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
if name in existing:
i = existing.index(name) + 1 # look after name
prefixed = name + "."
pflen = len(prefixed)
num_existing = len(existing)
while i < num_existing:
if existing[i][:pflen] == prefixed:
child_loggers.append(existing[i])
i += 1
existing.remove(name)
try:
self.configure_logger(name, loggers[name])
except Exception as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
#However, don't disable children of named loggers, as that's
#probably not what was intended by the user.
#for log in existing:
# logger = root.manager.loggerDict[log]
# if log in child_loggers:
# logger.level = logging.NOTSET
# logger.handlers = []
# logger.propagate = True
# elif disable_existing:
# logger.disabled = True
_handle_existing_loggers(existing, child_loggers,
disable_existing)
# And finally, do the root logger
root = config.get('root', None)
if root:
try:
self.configure_root(root)
except Exception as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
finally:
logging._releaseLock()
def configure_formatter(self, config):
"""Configure a formatter from a dictionary."""
if '()' in config:
factory = config['()'] # for use in exception handler
try:
result = self.configure_custom(config)
except TypeError as te:
if "'format'" not in str(te):
raise
#Name of parameter changed from fmt to format.
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
config['fmt'] = config.pop('format')
config['()'] = factory
result = self.configure_custom(config)
else:
fmt = config.get('format', None)
dfmt = config.get('datefmt', None)
result = logging.Formatter(fmt, dfmt)
return result
def configure_filter(self, config):
"""Configure a filter from a dictionary."""
if '()' in config:
result = self.configure_custom(config)
else:
name = config.get('name', '')
result = logging.Filter(name)
return result
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except Exception as e:
raise ValueError('Unable to add filter %r: %s' % (f, e))
def configure_handler(self, config):
"""Configure a handler from a dictionary."""
formatter = config.pop('formatter', None)
if formatter:
try:
formatter = self.config['formatters'][formatter]
except Exception as e:
raise ValueError('Unable to set formatter '
'%r: %s' % (formatter, e))
level = config.pop('level', None)
filters = config.pop('filters', None)
if '()' in config:
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
factory = c
else:
klass = self.resolve(config.pop('class'))
#Special case for handler which refers to another handler
if issubclass(klass, logging.handlers.MemoryHandler) and\
'target' in config:
try:
config['target'] = self.config['handlers'][config['target']]
except Exception as e:
raise ValueError('Unable to set target handler '
'%r: %s' % (config['target'], e))
elif issubclass(klass, logging.handlers.SMTPHandler) and\
'mailhost' in config:
config['mailhost'] = self.as_tuple(config['mailhost'])
elif issubclass(klass, logging.handlers.SysLogHandler) and\
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
try:
result = factory(**kwargs)
except TypeError as te:
if "'stream'" not in str(te):
raise
#The argument name changed from strm to stream
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
kwargs['strm'] = kwargs.pop('stream')
result = factory(**kwargs)
if formatter:
result.setFormatter(formatter)
if level is not None:
result.setLevel(logging._checkLevel(level))
if filters:
self.add_filters(result, filters)
return result
def add_handlers(self, logger, handlers):
"""Add handlers to a logger from a list of names."""
for h in handlers:
try:
logger.addHandler(self.config['handlers'][h])
except Exception as e:
raise ValueError('Unable to add handler %r: %s' % (h, e))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(logging._checkLevel(level))
if not incremental:
#Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def configure_logger(self, name, config, incremental=False):
"""Configure a non-root logger from a dictionary."""
logger = logging.getLogger(name)
self.common_logger_config(logger, config, incremental)
propagate = config.get('propagate', None)
if propagate is not None:
logger.propagate = propagate
def configure_root(self, config, incremental=False):
"""Configure a root logger from a dictionary."""
root = logging.getLogger()
self.common_logger_config(root, config, incremental)
dictConfigClass = DictConfigurator
def dictConfig(config):
"""Configure logging using a dictionary."""
dictConfigClass(config).configure()
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
"""
Start up a socket server on the specified port, and listen for new
configurations.
These will be sent as a file suitable for processing by fileConfig().
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
"""
if not thread:
raise NotImplementedError("listen() needs threading to work")
class ConfigStreamHandler(StreamRequestHandler):
"""
Handler for a logging configuration request.
It expects a completely new logging configuration and uses fileConfig
to install it.
"""
def handle(self):
"""
Handle a request.
Each request is expected to be a 4-byte length, packed using
struct.pack(">L", n), followed by the config file.
Uses fileConfig() to do the grunt work.
"""
import tempfile
try:
conn = self.connection
chunk = conn.recv(4)
if len(chunk) == 4:
slen = struct.unpack(">L", chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
chunk = chunk.decode("utf-8")
try:
import json
d =json.loads(chunk)
assert isinstance(d, dict)
dictConfig(d)
except:
#Apply new configuration.
file = io.StringIO(chunk)
try:
fileConfig(file)
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
if self.server.ready:
self.server.ready.set()
except socket.error as e:
if not isinstance(e.args, tuple):
raise
else:
errcode = e.args[0]
if errcode != RESET_ERROR:
raise
class ConfigSocketReceiver(ThreadingTCPServer):
"""
A simple TCP socket-based logging config receiver.
"""
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
handler=None, ready=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
self.ready = ready
def serve_until_stopped(self):
import select
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
logging._acquireLock()
abort = self.abort
logging._releaseLock()
self.socket.close()
class Server(threading.Thread):
def __init__(self, rcvr, hdlr, port):
super(Server, self).__init__()
self.rcvr = rcvr
self.hdlr = hdlr
self.port = port
self.ready = threading.Event()
def run(self):
server = self.rcvr(port=self.port, handler=self.hdlr,
ready=self.ready)
if self.port == 0:
self.port = server.server_address[1]
self.ready.set()
global _listener
logging._acquireLock()
_listener = server
logging._releaseLock()
server.serve_until_stopped()
return Server(ConfigSocketReceiver, ConfigStreamHandler, port)
def stopListening():
"""
Stop the listening server which was created with a call to listen().
"""
global _listener
logging._acquireLock()
try:
if _listener:
_listener.abort = 1
_listener = None
finally:
logging._releaseLock()
|
apixandru/intellij-community
|
refs/heads/master
|
python/testData/addImport/ignoreImportedAsModule.after.py
|
35
|
import numpy
import numpy as np
np.ones(3)
numpy.asarray([1,2,3]) # <-- invoke qfix here
|
tgsmith61591/pyramid
|
refs/heads/master
|
pmdarima/preprocessing/__init__.py
|
1
|
# -*- coding: utf-8 -*-
from .base import *
from .endog import *
from .exog import *
__all__ = [s for s in dir() if not s.startswith("_")]
|
crosswalk-project/chromium-crosswalk-efl
|
refs/heads/efl/crosswalk-10/39.0.2171.19
|
tools/telemetry/telemetry/core/backends/webdriver/webdriver_tab_backend.py
|
33
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from telemetry.core import bitmap
class WebDriverTabBackend(object):
def __init__(self, browser_backend, window_handle):
self._browser_backend = browser_backend
self._window_handle = window_handle
@property
def browser(self):
return self._browser_backend.browser
@property
def window_handle(self):
return self._window_handle
@property
def url(self):
self._browser_backend.driver.switch_to_window(self._window_handle)
return self._browser_backend.driver.current_url
def Activate(self):
# Webdriver doesn't support tab control.
raise NotImplementedError()
def Close(self):
self._browser_backend.driver.switch_to_window(self._window_handle)
self._browser_backend.driver.close()
def WaitForDocumentReadyStateToBeComplete(self, timeout=None):
# TODO(chrisgao): Double check of document state.
pass
def WaitForDocumentReadyStateToBeInteractiveOrBetter(self, timeout=None):
# TODO(chrisgao): Double check of document state.
pass
@property
def screenshot_supported(self):
return True
def Screenshot(self, timeout=None): # pylint: disable=W0613
if timeout:
logging.warning('timeout is not supported')
self._browser_backend.driver.switch_to_window(self._window_handle)
snap = self._browser_backend.driver.get_screenshot_as_base64()
if snap:
return bitmap.Bitmap.FromPng(snap)
return None
@property
def message_output_stream(self):
# Webdriver has no API for grabbing console messages.
raise NotImplementedError()
@message_output_stream.setter
def message_output_stream(self, stream):
raise NotImplementedError()
def GetDOMStats(self, timeout=None):
# Webdriver has no API for DOM status.
raise NotImplementedError()
def WaitForNavigate(self):
raise NotImplementedError()
def Navigate(self, url, script_to_evaluate_on_commit=None, timeout=None):
if script_to_evaluate_on_commit:
raise NotImplementedError('script_to_evaluate_on_commit is NOT supported')
self._browser_backend.driver.switch_to_window(self._window_handle)
if timeout:
self._browser_backend.driver.set_page_load_timeout(timeout * 1000)
self._browser_backend.driver.get(url)
def GetCookieByName(self, name, timeout=None):
if timeout:
logging.warning('timeout is not supported')
self._browser_backend.driver.switch_to_window(self._window_handle)
cookie = self._browser_backend.driver.get_cookie(name)
if cookie:
return cookie['value']
return None
def ExecuteJavaScript(self, expr, timeout=None):
self._browser_backend.driver.switch_to_window(self._window_handle)
if timeout:
logging.warning('timeout is not supported')
self._browser_backend.driver.execute_script(expr)
def EvaluateJavaScript(self, expr, timeout=None):
self._browser_backend.driver.switch_to_window(self._window_handle)
if timeout:
logging.warning('timeout is not supported')
return self._browser_backend.driver.execute_script(
'return eval(\'%s\')' % expr.replace('\'', '\\\'').replace('\n', ' '))
@property
def timeline_model(self):
# IE/Firefox has no timeline.
raise NotImplementedError()
@property
def id(self):
raise NotImplementedError()
def StartTimelineRecording(self):
raise NotImplementedError()
def StopTimelineRecording(self):
raise NotImplementedError()
def ClearCache(self):
# Can't find a way to clear cache of a tab in IE/Firefox.
raise NotImplementedError()
|
laborautonomo/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/vimeo.py
|
4
|
# encoding: utf-8
from __future__ import unicode_literals
import json
import re
import itertools
from .common import InfoExtractor
from .subtitles import SubtitlesInfoExtractor
from ..utils import (
compat_HTTPError,
compat_urllib_parse,
compat_urllib_request,
clean_html,
get_element_by_attribute,
ExtractorError,
RegexNotFoundError,
std_headers,
unsmuggle_url,
)
class VimeoIE(SubtitlesInfoExtractor):
"""Information extractor for vimeo.com."""
# _VALID_URL matches Vimeo URLs
_VALID_URL = r'''(?x)
(?P<proto>(?:https?:)?//)?
(?:(?:www|(?P<player>player))\.)?
vimeo(?P<pro>pro)?\.com/
(?:.*?/)?
(?:(?:play_redirect_hls|moogaloop\.swf)\?clip_id=)?
(?:videos?/)?
(?P<id>[0-9]+)
/?(?:[?&].*)?(?:[#].*)?$'''
_NETRC_MACHINE = 'vimeo'
IE_NAME = 'vimeo'
_TESTS = [
{
'url': 'http://vimeo.com/56015672#at=0',
'md5': '8879b6cc097e987f02484baf890129e5',
'info_dict': {
'id': '56015672',
'ext': 'mp4',
"upload_date": "20121220",
"description": "This is a test case for youtube-dl.\nFor more information, see github.com/rg3/youtube-dl\nTest chars: \u2605 \" ' \u5e78 / \\ \u00e4 \u21ad \U0001d550",
"uploader_id": "user7108434",
"uploader": "Filippo Valsorda",
"title": "youtube-dl test video - \u2605 \" ' \u5e78 / \\ \u00e4 \u21ad \U0001d550",
},
},
{
'url': 'http://vimeopro.com/openstreetmapus/state-of-the-map-us-2013/video/68093876',
'file': '68093876.mp4',
'md5': '3b5ca6aa22b60dfeeadf50b72e44ed82',
'note': 'Vimeo Pro video (#1197)',
'info_dict': {
'uploader_id': 'openstreetmapus',
'uploader': 'OpenStreetMap US',
'title': 'Andy Allan - Putting the Carto into OpenStreetMap Cartography',
},
},
{
'url': 'http://player.vimeo.com/video/54469442',
'file': '54469442.mp4',
'md5': '619b811a4417aa4abe78dc653becf511',
'note': 'Videos that embed the url in the player page',
'info_dict': {
'title': 'Kathy Sierra: Building the minimum Badass User, Business of Software',
'uploader': 'The BLN & Business of Software',
'uploader_id': 'theblnbusinessofsoftware',
},
},
{
'url': 'http://vimeo.com/68375962',
'file': '68375962.mp4',
'md5': 'aaf896bdb7ddd6476df50007a0ac0ae7',
'note': 'Video protected with password',
'info_dict': {
'title': 'youtube-dl password protected test video',
'upload_date': '20130614',
'uploader_id': 'user18948128',
'uploader': 'Jaime Marquínez Ferrándiz',
},
'params': {
'videopassword': 'youtube-dl',
},
},
{
'url': 'http://vimeo.com/76979871',
'md5': '3363dd6ffebe3784d56f4132317fd446',
'note': 'Video with subtitles',
'info_dict': {
'id': '76979871',
'ext': 'mp4',
'title': 'The New Vimeo Player (You Know, For Videos)',
'description': 'md5:2ec900bf97c3f389378a96aee11260ea',
'upload_date': '20131015',
'uploader_id': 'staff',
'uploader': 'Vimeo Staff',
}
},
]
@classmethod
def suitable(cls, url):
if VimeoChannelIE.suitable(url):
# Otherwise channel urls like http://vimeo.com/channels/31259 would
# match
return False
else:
return super(VimeoIE, cls).suitable(url)
def _login(self):
(username, password) = self._get_login_info()
if username is None:
return
self.report_login()
login_url = 'https://vimeo.com/log_in'
webpage = self._download_webpage(login_url, None, False)
token = self._search_regex(r'xsrft: \'(.*?)\'', webpage, 'login token')
data = compat_urllib_parse.urlencode({'email': username,
'password': password,
'action': 'login',
'service': 'vimeo',
'token': token,
})
login_request = compat_urllib_request.Request(login_url, data)
login_request.add_header('Content-Type', 'application/x-www-form-urlencoded')
login_request.add_header('Cookie', 'xsrft=%s' % token)
self._download_webpage(login_request, None, False, 'Wrong login info')
def _verify_video_password(self, url, video_id, webpage):
password = self._downloader.params.get('videopassword', None)
if password is None:
raise ExtractorError('This video is protected by a password, use the --video-password option')
token = self._search_regex(r'xsrft: \'(.*?)\'', webpage, 'login token')
data = compat_urllib_parse.urlencode({'password': password,
'token': token})
# I didn't manage to use the password with https
if url.startswith('https'):
pass_url = url.replace('https','http')
else:
pass_url = url
password_request = compat_urllib_request.Request(pass_url+'/password', data)
password_request.add_header('Content-Type', 'application/x-www-form-urlencoded')
password_request.add_header('Cookie', 'xsrft=%s' % token)
self._download_webpage(password_request, video_id,
'Verifying the password',
'Wrong password')
def _verify_player_video_password(self, url, video_id):
password = self._downloader.params.get('videopassword', None)
if password is None:
raise ExtractorError('This video is protected by a password, use the --video-password option')
data = compat_urllib_parse.urlencode({'password': password})
pass_url = url + '/check-password'
password_request = compat_urllib_request.Request(pass_url, data)
password_request.add_header('Content-Type', 'application/x-www-form-urlencoded')
return self._download_json(
password_request, video_id,
'Verifying the password',
'Wrong password')
def _real_initialize(self):
self._login()
def _real_extract(self, url):
url, data = unsmuggle_url(url)
headers = std_headers
if data is not None:
headers = headers.copy()
headers.update(data)
# Extract ID from URL
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
if mobj.group('pro') or mobj.group('player'):
url = 'http://player.vimeo.com/video/' + video_id
else:
url = 'https://vimeo.com/' + video_id
# Retrieve video webpage to extract further information
request = compat_urllib_request.Request(url, None, headers)
try:
webpage = self._download_webpage(request, video_id)
except ExtractorError as ee:
if isinstance(ee.cause, compat_HTTPError) and ee.cause.code == 403:
errmsg = ee.cause.read()
if b'Because of its privacy settings, this video cannot be played here' in errmsg:
raise ExtractorError(
'Cannot download embed-only video without embedding '
'URL. Please call youtube-dl with the URL of the page '
'that embeds this video.',
expected=True)
raise
# Now we begin extracting as much information as we can from what we
# retrieved. First we extract the information common to all extractors,
# and latter we extract those that are Vimeo specific.
self.report_extraction(video_id)
# Extract the config JSON
try:
try:
config_url = self._html_search_regex(
r' data-config-url="(.+?)"', webpage, 'config URL')
config_json = self._download_webpage(config_url, video_id)
config = json.loads(config_json)
except RegexNotFoundError:
# For pro videos or player.vimeo.com urls
# We try to find out to which variable is assigned the config dic
m_variable_name = re.search('(\w)\.video\.id', webpage)
if m_variable_name is not None:
config_re = r'%s=({.+?});' % re.escape(m_variable_name.group(1))
else:
config_re = [r' = {config:({.+?}),assets:', r'(?:[abc])=({.+?});']
config = self._search_regex(config_re, webpage, 'info section',
flags=re.DOTALL)
config = json.loads(config)
except Exception as e:
if re.search('The creator of this video has not given you permission to embed it on this domain.', webpage):
raise ExtractorError('The author has restricted the access to this video, try with the "--referer" option')
if re.search('<form[^>]+?id="pw_form"', webpage) is not None:
self._verify_video_password(url, video_id, webpage)
return self._real_extract(url)
else:
raise ExtractorError('Unable to extract info section',
cause=e)
else:
if config.get('view') == 4:
config = self._verify_player_video_password(url, video_id)
# Extract title
video_title = config["video"]["title"]
# Extract uploader and uploader_id
video_uploader = config["video"]["owner"]["name"]
video_uploader_id = config["video"]["owner"]["url"].split('/')[-1] if config["video"]["owner"]["url"] else None
# Extract video thumbnail
video_thumbnail = config["video"].get("thumbnail")
if video_thumbnail is None:
video_thumbs = config["video"].get("thumbs")
if video_thumbs and isinstance(video_thumbs, dict):
_, video_thumbnail = sorted((int(width), t_url) for (width, t_url) in video_thumbs.items())[-1]
# Extract video description
video_description = None
try:
video_description = get_element_by_attribute("itemprop", "description", webpage)
if video_description: video_description = clean_html(video_description)
except AssertionError as err:
# On some pages like (http://player.vimeo.com/video/54469442) the
# html tags are not closed, python 2.6 cannot handle it
if err.args[0] == 'we should not get here!':
pass
else:
raise
# Extract upload date
video_upload_date = None
mobj = re.search(r'<meta itemprop="dateCreated" content="(\d{4})-(\d{2})-(\d{2})T', webpage)
if mobj is not None:
video_upload_date = mobj.group(1) + mobj.group(2) + mobj.group(3)
try:
view_count = int(self._search_regex(r'UserPlays:(\d+)', webpage, 'view count'))
like_count = int(self._search_regex(r'UserLikes:(\d+)', webpage, 'like count'))
comment_count = int(self._search_regex(r'UserComments:(\d+)', webpage, 'comment count'))
except RegexNotFoundError:
# This info is only available in vimeo.com/{id} urls
view_count = None
like_count = None
comment_count = None
# Vimeo specific: extract request signature and timestamp
sig = config['request']['signature']
timestamp = config['request']['timestamp']
# Vimeo specific: extract video codec and quality information
# First consider quality, then codecs, then take everything
codecs = [('vp6', 'flv'), ('vp8', 'flv'), ('h264', 'mp4')]
files = {'hd': [], 'sd': [], 'other': []}
config_files = config["video"].get("files") or config["request"].get("files")
for codec_name, codec_extension in codecs:
for quality in config_files.get(codec_name, []):
format_id = '-'.join((codec_name, quality)).lower()
key = quality if quality in files else 'other'
video_url = None
if isinstance(config_files[codec_name], dict):
file_info = config_files[codec_name][quality]
video_url = file_info.get('url')
else:
file_info = {}
if video_url is None:
video_url = "http://player.vimeo.com/play_redirect?clip_id=%s&sig=%s&time=%s&quality=%s&codecs=%s&type=moogaloop_local&embed_location=" \
%(video_id, sig, timestamp, quality, codec_name.upper())
files[key].append({
'ext': codec_extension,
'url': video_url,
'format_id': format_id,
'width': file_info.get('width'),
'height': file_info.get('height'),
})
formats = []
for key in ('other', 'sd', 'hd'):
formats += files[key]
if len(formats) == 0:
raise ExtractorError('No known codec found')
subtitles = {}
text_tracks = config['request'].get('text_tracks')
if text_tracks:
for tt in text_tracks:
subtitles[tt['lang']] = 'http://vimeo.com' + tt['url']
video_subtitles = self.extract_subtitles(video_id, subtitles)
if self._downloader.params.get('listsubtitles', False):
self._list_available_subtitles(video_id, subtitles)
return
return {
'id': video_id,
'uploader': video_uploader,
'uploader_id': video_uploader_id,
'upload_date': video_upload_date,
'title': video_title,
'thumbnail': video_thumbnail,
'description': video_description,
'formats': formats,
'webpage_url': url,
'view_count': view_count,
'like_count': like_count,
'comment_count': comment_count,
'subtitles': video_subtitles,
}
class VimeoChannelIE(InfoExtractor):
IE_NAME = 'vimeo:channel'
_VALID_URL = r'(?:https?://)?vimeo\.com/channels/(?P<id>[^/]+)/?(\?.*)?$'
_MORE_PAGES_INDICATOR = r'<a.+?rel="next"'
_TITLE_RE = r'<link rel="alternate"[^>]+?title="(.*?)"'
def _page_url(self, base_url, pagenum):
return '%s/videos/page:%d/' % (base_url, pagenum)
def _extract_list_title(self, webpage):
return self._html_search_regex(self._TITLE_RE, webpage, 'list title')
def _extract_videos(self, list_id, base_url):
video_ids = []
for pagenum in itertools.count(1):
webpage = self._download_webpage(
self._page_url(base_url, pagenum) ,list_id,
'Downloading page %s' % pagenum)
video_ids.extend(re.findall(r'id="clip_(\d+?)"', webpage))
if re.search(self._MORE_PAGES_INDICATOR, webpage, re.DOTALL) is None:
break
entries = [self.url_result('http://vimeo.com/%s' % video_id, 'Vimeo')
for video_id in video_ids]
return {'_type': 'playlist',
'id': list_id,
'title': self._extract_list_title(webpage),
'entries': entries,
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
channel_id = mobj.group('id')
return self._extract_videos(channel_id, 'http://vimeo.com/channels/%s' % channel_id)
class VimeoUserIE(VimeoChannelIE):
IE_NAME = 'vimeo:user'
_VALID_URL = r'(?:https?://)?vimeo\.com/(?P<name>[^/]+)(?:/videos|[#?]|$)'
_TITLE_RE = r'<a[^>]+?class="user">([^<>]+?)</a>'
@classmethod
def suitable(cls, url):
if VimeoChannelIE.suitable(url) or VimeoIE.suitable(url) or VimeoAlbumIE.suitable(url) or VimeoGroupsIE.suitable(url):
return False
return super(VimeoUserIE, cls).suitable(url)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
name = mobj.group('name')
return self._extract_videos(name, 'http://vimeo.com/%s' % name)
class VimeoAlbumIE(VimeoChannelIE):
IE_NAME = 'vimeo:album'
_VALID_URL = r'(?:https?://)?vimeo\.com/album/(?P<id>\d+)'
_TITLE_RE = r'<header id="page_header">\n\s*<h1>(.*?)</h1>'
def _page_url(self, base_url, pagenum):
return '%s/page:%d/' % (base_url, pagenum)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
album_id = mobj.group('id')
return self._extract_videos(album_id, 'http://vimeo.com/album/%s' % album_id)
class VimeoGroupsIE(VimeoAlbumIE):
IE_NAME = 'vimeo:group'
_VALID_URL = r'(?:https?://)?vimeo\.com/groups/(?P<name>[^/]+)'
def _extract_list_title(self, webpage):
return self._og_search_title(webpage)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
name = mobj.group('name')
return self._extract_videos(name, 'http://vimeo.com/groups/%s' % name)
class VimeoReviewIE(InfoExtractor):
IE_NAME = 'vimeo:review'
IE_DESC = 'Review pages on vimeo'
_VALID_URL = r'(?:https?://)?vimeo\.com/[^/]+/review/(?P<id>[^/]+)'
_TEST = {
'url': 'https://vimeo.com/user21297594/review/75524534/3c257a1b5d',
'file': '75524534.mp4',
'md5': 'c507a72f780cacc12b2248bb4006d253',
'info_dict': {
'title': "DICK HARDWICK 'Comedian'",
'uploader': 'Richard Hardwick',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
player_url = 'https://player.vimeo.com/player/' + video_id
return self.url_result(player_url, 'Vimeo', video_id)
|
liikGit/MissionPlanner
|
refs/heads/master
|
Lib/io.py
|
53
|
"""The io module provides the Python interfaces to stream handling. The
builtin open function is defined in this module.
At the top of the I/O hierarchy is the abstract base class IOBase. It
defines the basic interface to a stream. Note, however, that there is no
separation between reading and writing to streams; implementations are
allowed to throw an IOError if they do not support a given operation.
Extending IOBase is RawIOBase which deals simply with the reading and
writing of raw bytes to a stream. FileIO subclasses RawIOBase to provide
an interface to OS files.
BufferedIOBase deals with buffering on a raw byte stream (RawIOBase). Its
subclasses, BufferedWriter, BufferedReader, and BufferedRWPair buffer
streams that are readable, writable, and both respectively.
BufferedRandom provides a buffered interface to random access
streams. BytesIO is a simple stream of in-memory bytes.
Another IOBase subclass, TextIOBase, deals with the encoding and decoding
of streams into text. TextIOWrapper, which extends it, is a buffered text
interface to a buffered raw stream (`BufferedIOBase`). Finally, StringIO
is a in-memory stream for text.
Argument names are not part of the specification, and only the arguments
of open() are intended to be used as keyword arguments.
data:
DEFAULT_BUFFER_SIZE
An int containing the default buffer size used by the module's buffered
I/O classes. open() uses the file's blksize (as obtained by os.stat) if
possible.
"""
# New I/O library conforming to PEP 3116.
# XXX edge cases when switching between reading/writing
# XXX need to support 1 meaning line-buffered
# XXX whenever an argument is None, use the default value
# XXX read/write ops should check readable/writable
# XXX buffered readinto should work with arbitrary buffer objects
# XXX use incremental encoder for text output, at least for UTF-16 and UTF-8-SIG
# XXX check writable, readable and seekable in appropriate places
__author__ = ("Guido van Rossum <guido@python.org>, "
"Mike Verdone <mike.verdone@gmail.com>, "
"Mark Russell <mark.russell@zen.co.uk>, "
"Antoine Pitrou <solipsis@pitrou.net>, "
"Amaury Forgeot d'Arc <amauryfa@gmail.com>, "
"Benjamin Peterson <benjamin@python.org>")
__all__ = ["BlockingIOError", "open", "IOBase", "RawIOBase", "FileIO",
"BytesIO", "StringIO", "BufferedIOBase",
"BufferedReader", "BufferedWriter", "BufferedRWPair",
"BufferedRandom", "TextIOBase", "TextIOWrapper",
"UnsupportedOperation", "SEEK_SET", "SEEK_CUR", "SEEK_END"]
import _io
import abc
from _io import (DEFAULT_BUFFER_SIZE, BlockingIOError, UnsupportedOperation,
open, FileIO, BytesIO, StringIO, BufferedReader,
BufferedWriter, BufferedRWPair, BufferedRandom,
IncrementalNewlineDecoder, TextIOWrapper)
OpenWrapper = _io.open # for compatibility with _pyio
# for seek()
SEEK_SET = 0
SEEK_CUR = 1
SEEK_END = 2
# Declaring ABCs in C is tricky so we do it here.
# Method descriptions and default implementations are inherited from the C
# version however.
class IOBase(_io._IOBase):
__metaclass__ = abc.ABCMeta
class RawIOBase(_io._RawIOBase, IOBase):
pass
class BufferedIOBase(_io._BufferedIOBase, IOBase):
pass
class TextIOBase(_io._TextIOBase, IOBase):
pass
RawIOBase.register(FileIO)
for klass in (BytesIO, BufferedReader, BufferedWriter, BufferedRandom,
BufferedRWPair):
BufferedIOBase.register(klass)
for klass in (StringIO, TextIOWrapper):
TextIOBase.register(klass)
del klass
|
astronaut1712/taiga-back
|
refs/heads/master
|
taiga/userstorage/models.py
|
20
|
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django_pgjson.fields import JsonField
class StorageEntry(models.Model):
owner = models.ForeignKey(settings.AUTH_USER_MODEL, blank=False, null=False,
related_name="storage_entries", verbose_name=_("owner"))
created_date = models.DateTimeField(auto_now_add=True, null=False, blank=False,
verbose_name=_("created date"))
modified_date = models.DateTimeField(auto_now=True, null=False, blank=False,
verbose_name=_("modified date"))
key = models.CharField(max_length=255, null=False, blank=False, verbose_name=_("key"))
value = JsonField(blank=True, default=None, null=True, verbose_name=_("value"))
class Meta:
verbose_name = "storage entry"
verbose_name_plural = "storages entries"
unique_together = ("owner", "key")
ordering = ["owner", "key"]
|
dongritengfei/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/QueueStatusServer/model/attachment.py
|
119
|
# Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
from model.queues import Queue
from model.queuestatus import QueueStatus
from model.workitems import WorkItems
class Attachment(object):
@classmethod
def recent(cls, limit=1):
statuses = QueueStatus.all().order("-date")
# Notice that we use both a set and a list here to keep the -date ordering.
ids = []
visited_ids = set()
for status in statuses:
attachment_id = status.active_patch_id
if not attachment_id:
continue
if attachment_id in visited_ids:
continue
visited_ids.add(attachment_id)
ids.append(attachment_id)
if len(visited_ids) >= limit:
break
return map(cls, ids)
def __init__(self, attachment_id):
self.id = attachment_id
self._summary = None
self._cached_queue_positions = None
def summary(self):
if self._summary:
return self._summary
self._summary = self._fetch_summary()
return self._summary
def state_from_queue_status(self, status):
table = {
"Pass" : "pass",
"Fail" : "fail",
}
state = table.get(status.message)
if state:
return state
if status.message.startswith("Error:"):
return "error"
if status:
return "pending"
return None
def position_in_queue(self, queue):
return self._queue_positions().get(queue.name())
def status_for_queue(self, queue):
# summary() is a horrible API and should be killed.
queue_summary = self.summary().get(queue.name_with_underscores())
if not queue_summary:
return None
return queue_summary.get("status")
def bug_id(self):
return self.summary().get("bug_id")
def _queue_positions(self):
if self._cached_queue_positions:
return self._cached_queue_positions
# FIXME: Should we be mem-caching this?
self._cached_queue_positions = self._calculate_queue_positions()
return self._cached_queue_positions
def _calculate_queue_positions(self):
all_work_items = WorkItems.all().fetch(limit=len(Queue.all()))
return dict([(items.queue.name(), items.display_position_for_attachment(self.id)) for items in all_work_items])
# FIXME: This is controller/view code and does not belong in a model.
def _fetch_summary(self):
summary = { "attachment_id" : self.id }
first_status = QueueStatus.all().filter('active_patch_id =', self.id).get()
if not first_status:
# We don't have any record of this attachment.
return summary
summary["bug_id"] = first_status.active_bug_id
for queue in Queue.all():
summary[queue.name_with_underscores()] = None
status = QueueStatus.all().filter('queue_name =', queue.name()).filter('active_patch_id =', self.id).order('-date').get()
if status:
# summary() is a horrible API and should be killed.
summary[queue.name_with_underscores()] = {
"state": self.state_from_queue_status(status),
"status": status,
}
return summary
|
hyowon/servo
|
refs/heads/master
|
tests/wpt/harness/wptrunner/config.py
|
196
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import ConfigParser
import os
import sys
from collections import OrderedDict
here = os.path.split(__file__)[0]
class ConfigDict(dict):
def __init__(self, base_path, *args, **kwargs):
self.base_path = base_path
dict.__init__(self, *args, **kwargs)
def get_path(self, key, default=None):
if key not in self:
return default
path = self[key]
os.path.expanduser(path)
return os.path.abspath(os.path.join(self.base_path, path))
def read(config_path):
config_path = os.path.abspath(config_path)
config_root = os.path.split(config_path)[0]
parser = ConfigParser.SafeConfigParser()
success = parser.read(config_path)
assert config_path in success, success
subns = {"pwd": os.path.abspath(os.path.curdir)}
rv = OrderedDict()
for section in parser.sections():
rv[section] = ConfigDict(config_root)
for key in parser.options(section):
rv[section][key] = parser.get(section, key, False, subns)
return rv
def path(argv=None):
if argv is None:
argv = []
path = None
for i, arg in enumerate(argv):
if arg == "--config":
if i + 1 < len(argv):
path = argv[i + 1]
elif arg.startswith("--config="):
path = arg.split("=", 1)[1]
if path is not None:
break
if path is None:
if os.path.exists("wptrunner.ini"):
path = os.path.abspath("wptrunner.ini")
else:
path = os.path.join(here, "..", "wptrunner.default.ini")
return os.path.abspath(path)
def load():
return read(path(sys.argv))
|
kevalds51/sympy
|
refs/heads/master
|
sympy/physics/quantum/circuitutils.py
|
99
|
"""Primitive circuit operations on quantum circuits."""
from __future__ import print_function, division
from sympy import Symbol, Tuple, Mul, sympify, default_sort_key
from sympy.utilities import numbered_symbols
from sympy.core.compatibility import reduce
from sympy.physics.quantum.gate import Gate
__all__ = [
'kmp_table',
'find_subcircuit',
'replace_subcircuit',
'convert_to_symbolic_indices',
'convert_to_real_indices',
'random_reduce',
'random_insert'
]
def kmp_table(word):
"""Build the 'partial match' table of the Knuth-Morris-Pratt algorithm.
Note: This is applicable to strings or
quantum circuits represented as tuples.
"""
# Current position in subcircuit
pos = 2
# Beginning position of candidate substring that
# may reappear later in word
cnd = 0
# The 'partial match' table that helps one determine
# the next location to start substring search
table = list()
table.append(-1)
table.append(0)
while pos < len(word):
if word[pos - 1] == word[cnd]:
cnd = cnd + 1
table.append(cnd)
pos = pos + 1
elif cnd > 0:
cnd = table[cnd]
else:
table.append(0)
pos = pos + 1
return table
def find_subcircuit(circuit, subcircuit, start=0, end=0):
"""Finds the subcircuit in circuit, if it exists.
If the subcircuit exists, the index of the start of
the subcircuit in circuit is returned; otherwise,
-1 is returned. The algorithm that is implemented
is the Knuth-Morris-Pratt algorithm.
Parameters
==========
circuit : tuple, Gate or Mul
A tuple of Gates or Mul representing a quantum circuit
subcircuit : tuple, Gate or Mul
A tuple of Gates or Mul to find in circuit
start : int
The location to start looking for subcircuit.
If start is the same or past end, -1 is returned.
end : int
The last place to look for a subcircuit. If end
is less than 1 (one), then the length of circuit
is taken to be end.
Examples
========
Find the first instance of a subcircuit:
>>> from sympy.physics.quantum.circuitutils import find_subcircuit
>>> from sympy.physics.quantum.gate import X, Y, Z, H
>>> circuit = X(0)*Z(0)*Y(0)*H(0)
>>> subcircuit = Z(0)*Y(0)
>>> find_subcircuit(circuit, subcircuit)
1
Find the first instance starting at a specific position:
>>> find_subcircuit(circuit, subcircuit, start=1)
1
>>> find_subcircuit(circuit, subcircuit, start=2)
-1
>>> circuit = circuit*subcircuit
>>> find_subcircuit(circuit, subcircuit, start=2)
4
Find the subcircuit within some interval:
>>> find_subcircuit(circuit, subcircuit, start=2, end=2)
-1
"""
if isinstance(circuit, Mul):
circuit = circuit.args
if isinstance(subcircuit, Mul):
subcircuit = subcircuit.args
if len(subcircuit) == 0 or len(subcircuit) > len(circuit):
return -1
if end < 1:
end = len(circuit)
# Location in circuit
pos = start
# Location in the subcircuit
index = 0
# 'Partial match' table
table = kmp_table(subcircuit)
while (pos + index) < end:
if subcircuit[index] == circuit[pos + index]:
index = index + 1
else:
pos = pos + index - table[index]
index = table[index] if table[index] > -1 else 0
if index == len(subcircuit):
return pos
return -1
def replace_subcircuit(circuit, subcircuit, replace=None, pos=0):
"""Replaces a subcircuit with another subcircuit in circuit,
if it exists.
If multiple instances of subcircuit exists, the first instance is
replaced. The position to being searching from (if different from
0) may be optionally given. If subcircuit can't be found, circuit
is returned.
Parameters
==========
circuit : tuple, Gate or Mul
A quantum circuit
subcircuit : tuple, Gate or Mul
The circuit to be replaced
replace : tuple, Gate or Mul
The replacement circuit
pos : int
The location to start search and replace
subcircuit, if it exists. This may be used
if it is known beforehand that multiple
instances exist, and it is desirable to
replace a specific instance. If a negative number
is given, pos will be defaulted to 0.
Examples
========
Find and remove the subcircuit:
>>> from sympy.physics.quantum.circuitutils import replace_subcircuit
>>> from sympy.physics.quantum.gate import X, Y, Z, H
>>> circuit = X(0)*Z(0)*Y(0)*H(0)*X(0)*H(0)*Y(0)
>>> subcircuit = Z(0)*Y(0)
>>> replace_subcircuit(circuit, subcircuit)
(X(0), H(0), X(0), H(0), Y(0))
Remove the subcircuit given a starting search point:
>>> replace_subcircuit(circuit, subcircuit, pos=1)
(X(0), H(0), X(0), H(0), Y(0))
>>> replace_subcircuit(circuit, subcircuit, pos=2)
(X(0), Z(0), Y(0), H(0), X(0), H(0), Y(0))
Replace the subcircuit:
>>> replacement = H(0)*Z(0)
>>> replace_subcircuit(circuit, subcircuit, replace=replacement)
(X(0), H(0), Z(0), H(0), X(0), H(0), Y(0))
"""
if pos < 0:
pos = 0
if isinstance(circuit, Mul):
circuit = circuit.args
if isinstance(subcircuit, Mul):
subcircuit = subcircuit.args
if isinstance(replace, Mul):
replace = replace.args
elif replace is None:
replace = ()
# Look for the subcircuit starting at pos
loc = find_subcircuit(circuit, subcircuit, start=pos)
# If subcircuit was found
if loc > -1:
# Get the gates to the left of subcircuit
left = circuit[0:loc]
# Get the gates to the right of subcircuit
right = circuit[loc + len(subcircuit):len(circuit)]
# Recombine the left and right side gates into a circuit
circuit = left + replace + right
return circuit
def _sympify_qubit_map(mapping):
new_map = {}
for key in mapping:
new_map[key] = sympify(mapping[key])
return new_map
def convert_to_symbolic_indices(seq, start=None, gen=None, qubit_map=None):
"""Returns the circuit with symbolic indices and the
dictionary mapping symbolic indices to real indices.
The mapping is 1 to 1 and onto (bijective).
Parameters
==========
seq : tuple, Gate/Integer/tuple or Mul
A tuple of Gate, Integer, or tuple objects, or a Mul
start : Symbol
An optional starting symbolic index
gen : object
An optional numbered symbol generator
qubit_map : dict
An existing mapping of symbolic indices to real indices
All symbolic indices have the format 'i#', where # is
some number >= 0.
"""
if isinstance(seq, Mul):
seq = seq.args
# A numbered symbol generator
index_gen = numbered_symbols(prefix='i', start=-1)
cur_ndx = next(index_gen)
# keys are symbolic indices; values are real indices
ndx_map = {}
def create_inverse_map(symb_to_real_map):
rev_items = lambda item: tuple([item[1], item[0]])
return dict(map(rev_items, symb_to_real_map.items()))
if start is not None:
if not isinstance(start, Symbol):
msg = 'Expected Symbol for starting index, got %r.' % start
raise TypeError(msg)
cur_ndx = start
if gen is not None:
if not isinstance(gen, numbered_symbols().__class__):
msg = 'Expected a generator, got %r.' % gen
raise TypeError(msg)
index_gen = gen
if qubit_map is not None:
if not isinstance(qubit_map, dict):
msg = ('Expected dict for existing map, got ' +
'%r.' % qubit_map)
raise TypeError(msg)
ndx_map = qubit_map
ndx_map = _sympify_qubit_map(ndx_map)
# keys are real indices; keys are symbolic indices
inv_map = create_inverse_map(ndx_map)
sym_seq = ()
for item in seq:
# Nested items, so recurse
if isinstance(item, Gate):
result = convert_to_symbolic_indices(item.args,
qubit_map=ndx_map,
start=cur_ndx,
gen=index_gen)
sym_item, new_map, cur_ndx, index_gen = result
ndx_map.update(new_map)
inv_map = create_inverse_map(ndx_map)
elif isinstance(item, tuple) or isinstance(item, Tuple):
result = convert_to_symbolic_indices(item,
qubit_map=ndx_map,
start=cur_ndx,
gen=index_gen)
sym_item, new_map, cur_ndx, index_gen = result
ndx_map.update(new_map)
inv_map = create_inverse_map(ndx_map)
elif item in inv_map:
sym_item = inv_map[item]
else:
cur_ndx = next(gen)
ndx_map[cur_ndx] = item
inv_map[item] = cur_ndx
sym_item = cur_ndx
if isinstance(item, Gate):
sym_item = item.__class__(*sym_item)
sym_seq = sym_seq + (sym_item,)
return sym_seq, ndx_map, cur_ndx, index_gen
def convert_to_real_indices(seq, qubit_map):
"""Returns the circuit with real indices.
Parameters
==========
seq : tuple, Gate/Integer/tuple or Mul
A tuple of Gate, Integer, or tuple objects or a Mul
qubit_map : dict
A dictionary mapping symbolic indices to real indices.
Examples
========
Change the symbolic indices to real integers:
>>> from sympy import symbols
>>> from sympy.physics.quantum.circuitutils import convert_to_real_indices
>>> from sympy.physics.quantum.gate import X, Y, Z, H
>>> i0, i1 = symbols('i:2')
>>> index_map = {i0 : 0, i1 : 1}
>>> convert_to_real_indices(X(i0)*Y(i1)*H(i0)*X(i1), index_map)
(X(0), Y(1), H(0), X(1))
"""
if isinstance(seq, Mul):
seq = seq.args
if not isinstance(qubit_map, dict):
msg = 'Expected dict for qubit_map, got %r.' % qubit_map
raise TypeError(msg)
qubit_map = _sympify_qubit_map(qubit_map)
real_seq = ()
for item in seq:
# Nested items, so recurse
if isinstance(item, Gate):
real_item = convert_to_real_indices(item.args, qubit_map)
elif isinstance(item, tuple) or isinstance(item, Tuple):
real_item = convert_to_real_indices(item, qubit_map)
else:
real_item = qubit_map[item]
if isinstance(item, Gate):
real_item = item.__class__(*real_item)
real_seq = real_seq + (real_item,)
return real_seq
def random_reduce(circuit, gate_ids, seed=None):
"""Shorten the length of a quantum circuit.
random_reduce looks for circuit identities in circuit, randomly chooses
one to remove, and returns a shorter yet equivalent circuit. If no
identities are found, the same circuit is returned.
Parameters
==========
circuit : Gate tuple of Mul
A tuple of Gates representing a quantum circuit
gate_ids : list, GateIdentity
List of gate identities to find in circuit
seed : int or list
seed used for _randrange; to override the random selection, provide a
list of integers: the elements of gate_ids will be tested in the order
given by the list
"""
from sympy.utilities.randtest import _randrange
if not gate_ids:
return circuit
if isinstance(circuit, Mul):
circuit = circuit.args
ids = flatten_ids(gate_ids)
# Create the random integer generator with the seed
randrange = _randrange(seed)
# Look for an identity in the circuit
while ids:
i = randrange(len(ids))
id = ids.pop(i)
if find_subcircuit(circuit, id) != -1:
break
else:
# no identity was found
return circuit
# return circuit with the identity removed
return replace_subcircuit(circuit, id)
def random_insert(circuit, choices, seed=None):
"""Insert a circuit into another quantum circuit.
random_insert randomly chooses a location in the circuit to insert
a randomly selected circuit from amongst the given choices.
Parameters
==========
circuit : Gate tuple or Mul
A tuple or Mul of Gates representing a quantum circuit
choices : list
Set of circuit choices
seed : int or list
seed used for _randrange; to override the random selections, give
a list two integers, [i, j] where i is the circuit location where
choice[j] will be inserted.
Notes
=====
Indices for insertion should be [0, n] if n is the length of the
circuit.
"""
from sympy.utilities.randtest import _randrange
if not choices:
return circuit
if isinstance(circuit, Mul):
circuit = circuit.args
# get the location in the circuit and the element to insert from choices
randrange = _randrange(seed)
loc = randrange(len(circuit) + 1)
choice = choices[randrange(len(choices))]
circuit = list(circuit)
circuit[loc: loc] = choice
return tuple(circuit)
# Flatten the GateIdentity objects (with gate rules) into one single list
def flatten_ids(ids):
collapse = lambda acc, an_id: acc + sorted(an_id.equivalent_ids,
key=default_sort_key)
ids = reduce(collapse, ids, [])
ids.sort(key=default_sort_key)
return ids
|
tmenjo/cinder-2015.1.0
|
refs/heads/master
|
cinder/api/contrib/volume_image_metadata.py
|
5
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Volume Image Metadata API extension."""
import logging
import six
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder.api import xmlutil
from cinder import volume
LOG = logging.getLogger(__name__)
authorize = extensions.soft_extension_authorizer('volume',
'volume_image_metadata')
class VolumeImageMetadataController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(VolumeImageMetadataController, self).__init__(*args, **kwargs)
self.volume_api = volume.API()
def _get_all_images_metadata(self, context):
"""Returns the image metadata for all volumes."""
try:
all_metadata = self.volume_api.get_volumes_image_metadata(context)
except Exception as e:
LOG.debug('Problem retrieving volume image metadata. '
'It will be skipped. Error: %s', six.text_type(e))
all_metadata = {}
return all_metadata
def _add_image_metadata(self, context, resp_volume, image_meta=None):
"""Appends the image metadata to the given volume.
:param context: the request context
:param resp_volume: the response volume
:param image_meta: The image metadata to append, if None is provided it
will be retrieved from the database. An empty dict
means there is no metadata and it should not be
retrieved from the db.
"""
if image_meta is None:
try:
image_meta = self.volume_api.get_volume_image_metadata(
context, resp_volume)
except Exception:
return
if image_meta:
resp_volume['volume_image_metadata'] = dict(
image_meta.iteritems())
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['cinder.context']
if authorize(context):
resp_obj.attach(xml=VolumeImageMetadataTemplate())
self._add_image_metadata(context, resp_obj.obj['volume'])
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['cinder.context']
if authorize(context):
resp_obj.attach(xml=VolumesImageMetadataTemplate())
all_meta = self._get_all_images_metadata(context)
for vol in list(resp_obj.obj.get('volumes', [])):
image_meta = all_meta.get(vol['id'], {})
self._add_image_metadata(context, vol, image_meta)
class Volume_image_metadata(extensions.ExtensionDescriptor):
"""Show image metadata associated with the volume."""
name = "VolumeImageMetadata"
alias = "os-vol-image-meta"
namespace = ("http://docs.openstack.org/volume/ext/"
"volume_image_metadata/api/v1")
updated = "2012-12-07T00:00:00+00:00"
def get_controller_extensions(self):
controller = VolumeImageMetadataController()
extension = extensions.ControllerExtension(self, 'volumes', controller)
return [extension]
class VolumeImageMetadataMetadataTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('volume_image_metadata',
selector='volume_image_metadata')
elem = xmlutil.SubTemplateElement(root, 'meta',
selector=xmlutil.get_items)
elem.set('key', 0)
elem.text = 1
return xmlutil.MasterTemplate(root, 1)
class VolumeImageMetadataTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('volume', selector='volume')
root.append(VolumeImageMetadataMetadataTemplate())
alias = Volume_image_metadata.alias
namespace = Volume_image_metadata.namespace
return xmlutil.SlaveTemplate(root, 1, nsmap={alias: namespace})
class VolumesImageMetadataTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('volumes')
elem = xmlutil.SubTemplateElement(root, 'volume', selector='volume')
elem.append(VolumeImageMetadataMetadataTemplate())
alias = Volume_image_metadata.alias
namespace = Volume_image_metadata.namespace
return xmlutil.SlaveTemplate(root, 1, nsmap={alias: namespace})
|
rhndg/openedx
|
refs/heads/master
|
common/djangoapps/edxmako/templatetag_helpers.py
|
250
|
from django.template import loader
from django.template.base import Template, Context
from django.template.loader import get_template, select_template
def django_template_include(file_name, mako_context):
"""
This can be used within a mako template to include a django template
in the way that a django-style {% include %} does. Pass it context
which can be the mako context ('context') or a dictionary.
"""
dictionary = dict(mako_context)
return loader.render_to_string(file_name, dictionary=dictionary)
def render_inclusion(func, file_name, takes_context, django_context, *args, **kwargs):
"""
This allows a mako template to call a template tag function (written
for django templates) that is an "inclusion tag". These functions are
decorated with @register.inclusion_tag.
-func: This is the function that is registered as an inclusion tag.
You must import it directly using a python import statement.
-file_name: This is the filename of the template, passed into the
@register.inclusion_tag statement.
-takes_context: This is a parameter of the @register.inclusion_tag.
-django_context: This is an instance of the django context. If this
is a mako template rendered through the regular django rendering calls,
a copy of the django context is available as 'django_context'.
-*args and **kwargs are the arguments to func.
"""
if takes_context:
args = [django_context] + list(args)
_dict = func(*args, **kwargs)
if isinstance(file_name, Template):
t = file_name
elif not isinstance(file_name, basestring) and is_iterable(file_name):
t = select_template(file_name)
else:
t = get_template(file_name)
nodelist = t.nodelist
new_context = Context(_dict)
csrf_token = django_context.get('csrf_token', None)
if csrf_token is not None:
new_context['csrf_token'] = csrf_token
return nodelist.render(new_context)
|
farhaanbukhsh/junction
|
refs/heads/master
|
settings/test_settings.py
|
4
|
# -*- coding: utf-8 -*-# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
from .common import * # noqa
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(ROOT_DIR, 'test.sqlite3'),
}
}
TEMPLATE_CONTEXT_PROCESSORS += (
"django.core.context_processors.debug",
)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += ('django_extensions',)
DEVICE_VERIFICATION_CODE = 11111
|
marrow/mongo
|
refs/heads/develop
|
marrow/mongo/core/__init__.py
|
1
|
# encoding: utf-8
from __future__ import unicode_literals
from .release import version as __version__
from .field import Field
from .index import Index
from .document import Document
__all__ = ['__version__', 'Field', 'Index', 'Document']
|
demarle/VTK
|
refs/heads/master
|
ThirdParty/Twisted/twisted/conch/error.py
|
67
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An error to represent bad things happening in Conch.
Maintainer: Paul Swartz
"""
from twisted.cred.error import UnauthorizedLogin
class ConchError(Exception):
def __init__(self, value, data = None):
Exception.__init__(self, value, data)
self.value = value
self.data = data
class NotEnoughAuthentication(Exception):
"""
This is thrown if the authentication is valid, but is not enough to
successfully verify the user. i.e. don't retry this type of
authentication, try another one.
"""
class ValidPublicKey(UnauthorizedLogin):
"""
Raised by public key checkers when they receive public key credentials
that don't contain a signature at all, but are valid in every other way.
(e.g. the public key matches one in the user's authorized_keys file).
Protocol code (eg
L{SSHUserAuthServer<twisted.conch.ssh.userauth.SSHUserAuthServer>}) which
attempts to log in using
L{ISSHPrivateKey<twisted.cred.credentials.ISSHPrivateKey>} credentials
should be prepared to handle a failure of this type by telling the user to
re-authenticate using the same key and to include a signature with the new
attempt.
See U{http://www.ietf.org/rfc/rfc4252.txt} section 7 for more details.
"""
class IgnoreAuthentication(Exception):
"""
This is thrown to let the UserAuthServer know it doesn't need to handle the
authentication anymore.
"""
class MissingKeyStoreError(Exception):
"""
Raised if an SSHAgentServer starts receiving data without its factory
providing a keys dict on which to read/write key data.
"""
class UserRejectedKey(Exception):
"""
The user interactively rejected a key.
"""
class InvalidEntry(Exception):
"""
An entry in a known_hosts file could not be interpreted as a valid entry.
"""
class HostKeyChanged(Exception):
"""
The host key of a remote host has changed.
@ivar offendingEntry: The entry which contains the persistent host key that
disagrees with the given host key.
@type offendingEntry: L{twisted.conch.interfaces.IKnownHostEntry}
@ivar path: a reference to the known_hosts file that the offending entry
was loaded from
@type path: L{twisted.python.filepath.FilePath}
@ivar lineno: The line number of the offending entry in the given path.
@type lineno: L{int}
"""
def __init__(self, offendingEntry, path, lineno):
Exception.__init__(self)
self.offendingEntry = offendingEntry
self.path = path
self.lineno = lineno
|
shubhdev/edx-platform
|
refs/heads/master
|
common/__init__.py
|
12133432
| |
mammique/django
|
refs/heads/tp_alpha
|
tests/regressiontests/test_runner/__init__.py
|
12133432
| |
jamesls/boto
|
refs/heads/develop
|
tests/integration/elasticache/__init__.py
|
12133432
| |
wikiteams/github-gender-studies
|
refs/heads/master
|
sources/gender_api/__init__.py
|
12133432
| |
kaedroho/wagtail
|
refs/heads/master
|
wagtail/admin/signal_handlers.py
|
6
|
from wagtail.admin.mail import (
GroupApprovalTaskStateSubmissionEmailNotifier, WorkflowStateApprovalEmailNotifier,
WorkflowStateRejectionEmailNotifier, WorkflowStateSubmissionEmailNotifier)
from wagtail.core.models import TaskState, WorkflowState
from wagtail.core.signals import (
task_submitted, workflow_approved, workflow_rejected, workflow_submitted)
task_submission_email_notifier = GroupApprovalTaskStateSubmissionEmailNotifier()
workflow_submission_email_notifier = WorkflowStateSubmissionEmailNotifier()
workflow_approval_email_notifier = WorkflowStateApprovalEmailNotifier()
workflow_rejection_email_notifier = WorkflowStateRejectionEmailNotifier()
def register_signal_handlers():
task_submitted.connect(task_submission_email_notifier, sender=TaskState, dispatch_uid='group_approval_task_submitted_email_notification')
workflow_submitted.connect(workflow_submission_email_notifier, sender=WorkflowState, dispatch_uid='workflow_state_submitted_email_notification')
workflow_rejected.connect(workflow_rejection_email_notifier, sender=WorkflowState, dispatch_uid='workflow_state_rejected_email_notification')
workflow_approved.connect(workflow_approval_email_notifier, sender=WorkflowState, dispatch_uid='workflow_state_approved_email_notification')
|
commshare/APE_Server
|
refs/heads/master
|
deps/js/src/tests/manifest.py
|
7
|
# Library for JSTest manifests.
#
# This includes classes for representing and parsing JS manifests.
import os, re, sys
from subprocess import *
from tests import TestCase
class XULInfo:
def __init__(self, abi, os, isdebug):
self.abi = abi
self.os = os
self.isdebug = isdebug
def as_js(self):
"""Return JS that when executed sets up variables so that JS expression
predicates on XUL build info evaluate properly."""
return 'var xulRuntime = { OS: "%s", XPCOMABI: "%s", shell: true }; var isDebugBuild=%s;' % (
self.os,
self.abi,
str(self.isdebug).lower())
@classmethod
def create(cls, jsdir):
"""Create a XULInfo based on the current platform's characteristics."""
# Our strategy is to find the autoconf.mk generated for the build and
# read the values from there.
# Find config/autoconf.mk.
dir = jsdir
while True:
path = os.path.join(dir, 'config/autoconf.mk')
if os.path.isfile(path):
break
if os.path.dirname(dir) == dir:
print "Can't find config/autoconf.mk on a directory containing the JS shell (searched from %s)"%jsdir
sys.exit(1)
dir = os.path.dirname(dir)
# Read the values.
val_re = re.compile(r'(TARGET_XPCOM_ABI|OS_TARGET|MOZ_DEBUG)\s*=\s*(.*)')
kw = {}
for line in open(path):
m = val_re.match(line)
if m:
key, val = m.groups()
val = val.rstrip()
if key == 'TARGET_XPCOM_ABI':
kw['abi'] = val
if key == 'OS_TARGET':
kw['os'] = val
if key == 'MOZ_DEBUG':
kw['isdebug'] = (val == '1')
return cls(**kw)
class XULInfoTester:
def __init__(self, xulinfo, js_bin):
self.js_prolog = xulinfo.as_js()
self.js_bin = js_bin
# Maps JS expr to evaluation result.
self.cache = {}
def test(self, cond):
"""Test a XUL predicate condition against this local info."""
ans = self.cache.get(cond, None)
if ans is None:
cmd = [ self.js_bin, '-e', self.js_prolog, '-e', 'print(!!(%s))'%cond ]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
out, err = p.communicate()
if out in ('true\n', 'true\r\n'):
ans = True
elif out in ('false\n', 'false\r\n'):
ans = False
else:
raise Exception("Failed to test XUL condition '%s'"%cond)
self.cache[cond] = ans
return ans
class NullXULInfoTester:
"""Can be used to parse manifests without a JS shell."""
def test(self, cond):
return False
def parse(filename, xul_tester, reldir = ''):
ans = []
comment_re = re.compile(r'#.*')
dir = os.path.dirname(filename)
try:
f = open(filename)
except IOError:
print "warning: include file not found: '%s'"%filename
return ans
for line in f:
sline = comment_re.sub('', line)
parts = sline.split()
if len(parts) == 0:
# line is empty or just a comment, skip
pass
elif parts[0] == 'include':
include_file = parts[1]
include_reldir = os.path.join(reldir, os.path.dirname(include_file))
ans += parse(os.path.join(dir, include_file), xul_tester, include_reldir)
elif parts[0] == 'url-prefix':
# Doesn't apply to shell tests
pass
else:
script = None
enable = True
expect = True
random = False
slow = False
pos = 0
while pos < len(parts):
if parts[pos] == 'fails':
expect = False
pos += 1
elif parts[pos] == 'skip':
expect = enable = False
pos += 1
elif parts[pos] == 'random':
random = True
pos += 1
elif parts[pos].startswith('fails-if'):
cond = parts[pos][len('fails-if('):-1]
if xul_tester.test(cond):
expect = False
pos += 1
elif parts[pos].startswith('asserts-if'):
# This directive means we may flunk some number of
# NS_ASSERTIONs in the browser. For the shell, ignore it.
pos += 1
elif parts[pos].startswith('skip-if'):
cond = parts[pos][len('skip-if('):-1]
if xul_tester.test(cond):
expect = enable = False
pos += 1
elif parts[pos].startswith('random-if'):
cond = parts[pos][len('random-if('):-1]
if xul_tester.test(cond):
random = True
pos += 1
elif parts[pos] == 'script':
script = parts[pos+1]
pos += 2
elif parts[pos] == 'slow':
slow = True
pos += 1
elif parts[pos] == 'silentfail':
# silentfails use tons of memory, and Darwin doesn't support ulimit.
if xul_tester.test("xulRuntime.OS == 'Darwin'"):
expect = enable = False
pos += 1
else:
print 'warning: invalid manifest line element "%s"'%parts[pos]
pos += 1
assert script is not None
ans.append(TestCase(os.path.join(reldir, script),
enable, expect, random, slow))
return ans
|
libvirt/autotest
|
refs/heads/master
|
client/virt/virt_video_maker.py
|
9
|
"""
Video Maker transforms screenshots taken during a test into a HTML 5
compatible video, so that one can watch the screen activity of the
whole test from inside your own browser.
This relies on generally available multimedia libraries, frameworks
and tools.
"""
import os, time, glob, logging
__all__ = ['GstPythonVideoMaker', 'video_maker']
#
# Check what kind of video libraries tools we have available
#
# Gstreamer python bindings are our first choice
try:
import gst
GST_PYTHON_INSTALLED = True
except ImportError:
GST_PYTHON_INSTALLED = False
#
# PIL is also required to normalize images
#
try:
import PIL.Image
PIL_INSTALLED = True
except ImportError:
PIL_INSTALLED = False
#
# We only do video
#
CONTAINER_PREFERENCE = ['ogg', 'webm']
ENCODER_PREFERENCE = ['theora', 'vp8']
class GstPythonVideoMaker(object):
'''
Makes a movie out of screendump images using gstreamer-python
'''
CONTAINER_MAPPING = {'ogg' : 'oggmux',
'webm' : 'webmmux'}
ENCODER_MAPPING = {'theora' : 'theoraenc',
'vp8' : 'vp8enc'}
CONTAINER_ENCODER_MAPPING = {'ogg' : 'theora',
'webm' : 'vp8'}
def __init__(self, verbose=False):
if not GST_PYTHON_INSTALLED:
raise ValueError('gstreamer-python library was not found')
if not PIL_INSTALLED:
raise ValueError('python-imaging library was not found')
self.verbose = verbose
def get_most_common_image_size(self, input_dir):
'''
Find the most common image size
'''
image_sizes = {}
image_files = glob.glob(os.path.join(input_dir, '*.jpg'))
for f in image_files:
i = PIL.Image.open(f)
if not image_sizes.has_key(i.size):
image_sizes[i.size] = 1
else:
image_sizes[i.size] += 1
most_common_size_counter = 0
most_common_size = None
for image_size, image_counter in image_sizes.items():
if image_counter > most_common_size_counter:
most_common_size_counter = image_counter
most_common_size = image_size
return most_common_size
def normalize_images(self, input_dir):
'''
GStreamer requires all images to be the same size, so we do it here
'''
image_size = self.get_most_common_image_size(input_dir)
if image_size is None:
image_size = (800, 600)
if self.verbose:
logging.debug('Normalizing image files to size: %s', image_size)
image_files = glob.glob(os.path.join(input_dir, '*.jpg'))
for f in image_files:
i = PIL.Image.open(f)
if i.size != image_size:
i.resize(image_size).save(f)
def has_element(self, kind):
'''
Returns True if a gstreamer element is available
'''
return gst.element_factory_find(kind) is not None
def get_container_name(self):
'''
Gets the video container available that is the best based on preference
'''
for c in CONTAINER_PREFERENCE:
element_kind = self.CONTAINER_MAPPING.get(c, c)
if self.has_element(element_kind):
return element_kind
raise ValueError('No suitable container format was found')
def get_encoder_name(self):
'''
Gets the video encoder available that is the best based on preference
'''
for c in ENCODER_PREFERENCE:
element_kind = self.ENCODER_MAPPING.get(c, c)
if self.has_element(element_kind):
return element_kind
raise ValueError('No suitable encoder format was found')
def get_element(self, name):
'''
Makes and returns and element from the gst factory interface
'''
if self.verbose:
logging.debug('GStreamer element requested: %s', name)
return gst.element_factory_make(name, name)
def start(self, input_dir, output_file):
'''
Process the input files and output the video file
'''
self.normalize_images(input_dir)
no_files = len(glob.glob(os.path.join(input_dir, '*.jpg')))
if self.verbose:
logging.debug('Number of files to encode as video: %s', no_files)
pipeline = gst.Pipeline("pipeline")
source = self.get_element("multifilesrc")
source_location = os.path.join(input_dir, "%04d.jpg")
if self.verbose:
logging.debug("Source location: %s", source_location)
source.set_property('location', source_location)
source.set_property('index', 1)
source_caps = gst.Caps()
source_caps.append('image/jpeg,framerate=(fraction)4/1')
source.set_property('caps', source_caps)
decoder = self.get_element("jpegdec")
# Attempt to auto detect the chosen encoder/mux based on output_file
encoder = None
container = None
for container_name in self.CONTAINER_ENCODER_MAPPING:
if output_file.endswith('.%s' % container_name):
enc_name = self.CONTAINER_ENCODER_MAPPING[container_name]
enc_name_gst = self.ENCODER_MAPPING[enc_name]
encoder = self.get_element(enc_name_gst)
cont_name_gst = self.CONTAINER_MAPPING[container_name]
container = self.get_element(cont_name_gst)
# If auto detection fails, choose from the list of preferred codec/mux
if encoder is None:
encoder = self.get_element(self.get_encoder_name())
if container is None:
container = self.get_element(self.get_container_name())
output = self.get_element("filesink")
output.set_property('location', output_file)
pipeline.add_many(source, decoder, encoder, container, output)
gst.element_link_many(source, decoder, encoder, container, output)
pipeline.set_state(gst.STATE_PLAYING)
while True:
if source.get_property('index') <= no_files:
if self.verbose:
logging.debug("Currently processing image number: %s",
source.get_property('index'))
time.sleep(1)
else:
break
time.sleep(3)
pipeline.set_state(gst.STATE_NULL)
def video_maker(input_dir, output_file):
'''
Instantiates and runs a video maker
'''
v = GstPythonVideoMaker()
v.start(input_dir, output_file)
if __name__ == '__main__':
import sys
if len(sys.argv) < 3:
print 'Usage: %s <input_dir> <output_file>' % sys.argv[0]
else:
video_maker(sys.argv[1], sys.argv[2])
|
sachinpro/sachinpro.github.io
|
refs/heads/master
|
tensorflow/python/ops/histogram_ops_test.py
|
10
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.histogram_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class HistogramFixedWidthTest(tf.test.TestCase):
def setUp(self):
self.rng = np.random.RandomState(0)
def test_empty_input_gives_all_zero_counts(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values = []
expected_bin_counts = [0, 0, 0, 0, 0]
with self.test_session():
hist = tf.histogram_fixed_width(values, value_range, nbins=5)
# Hist should start "fresh" with every eval.
self.assertAllClose(expected_bin_counts, hist.eval())
self.assertAllClose(expected_bin_counts, hist.eval())
def test_one_update_on_constant_input(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
expected_bin_counts = [2, 1, 1, 0, 2]
with self.test_session():
hist = tf.histogram_fixed_width(values, value_range, nbins=5)
# Hist should start "fresh" with every eval.
self.assertAllClose(expected_bin_counts, hist.eval())
self.assertAllClose(expected_bin_counts, hist.eval())
def test_one_update_on_constant_2d_input(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values = [[-1.0, 0.0, 1.5], [2.0, 5.0, 15]]
expected_bin_counts = [2, 1, 1, 0, 2]
with self.test_session():
hist = tf.histogram_fixed_width(values, value_range, nbins=5)
# Hist should start "fresh" with every eval.
self.assertAllClose(expected_bin_counts, hist.eval())
self.assertAllClose(expected_bin_counts, hist.eval())
def test_two_updates_on_constant_input(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values_1 = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
values_2 = [1.5, 4.5, 4.5, 4.5, 0.0, 0.0]
expected_bin_counts_1 = [2, 1, 1, 0, 2]
expected_bin_counts_2 = [2, 1, 0, 0, 3]
with self.test_session():
values = tf.placeholder(tf.float32, shape=[6])
hist = tf.histogram_fixed_width(values, value_range, nbins=5)
# The values in hist should depend on the current feed and nothing else.
self.assertAllClose(expected_bin_counts_1,
hist.eval(feed_dict={values: values_1}))
self.assertAllClose(expected_bin_counts_2,
hist.eval(feed_dict={values: values_2}))
self.assertAllClose(expected_bin_counts_1,
hist.eval(feed_dict={values: values_1}))
self.assertAllClose(expected_bin_counts_1,
hist.eval(feed_dict={values: values_1}))
def test_two_updates_on_scalar_input(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values_1 = 1.5
values_2 = 2.5
expected_bin_counts_1 = [0, 1, 0, 0, 0]
expected_bin_counts_2 = [0, 0, 1, 0, 0]
with self.test_session():
values = tf.placeholder(tf.float32, shape=[])
hist = tf.histogram_fixed_width(values, value_range, nbins=5)
# The values in hist should depend on the current feed and nothing else.
self.assertAllClose(expected_bin_counts_2,
hist.eval(feed_dict={values: values_2}))
self.assertAllClose(expected_bin_counts_1,
hist.eval(feed_dict={values: values_1}))
self.assertAllClose(expected_bin_counts_1,
hist.eval(feed_dict={values: values_1}))
self.assertAllClose(expected_bin_counts_2,
hist.eval(feed_dict={values: values_2}))
def test_multiple_random_accumulating_updates_results_in_right_dist(self):
# Accumulate the updates in a new variable. Resultant
# histogram should be uniform. Use only 3 bins because with many bins it
# would be unlikely that all would be close to 1/n. If someone ever wants
# to test that, it would be better to check that the cdf was linear.
value_range = [1.0, 4.14159]
with self.test_session() as sess:
values = tf.placeholder(tf.float32, shape=[4, 4, 4])
hist = tf.histogram_fixed_width(values,
value_range,
nbins=3,
dtype=tf.int64)
hist_accum = tf.Variable(tf.zeros_initializer([3], dtype=tf.int64))
hist_accum = hist_accum.assign_add(hist)
tf.initialize_all_variables().run()
for _ in range(100):
# Map the rv: U[0, 1] --> U[value_range[0], value_range[1]].
values_arr = (
value_range[0] +
(value_range[1] - value_range[0]) * self.rng.rand(4, 4, 4))
hist_accum_arr = sess.run(hist_accum, feed_dict={values: values_arr})
pmf = hist_accum_arr / float(hist_accum_arr.sum())
np.testing.assert_allclose(1 / 3, pmf, atol=0.02)
if __name__ == '__main__':
tf.test.main()
|
ThiagoGarciaAlves/intellij-community
|
refs/heads/master
|
python/testData/testRunner/env/createConfigurationTest/test_with_src/foo/src/test_test.py
|
30
|
def test_test():
pass
|
yuruofeifei/mxnet
|
refs/heads/master
|
example/neural-style/nstyle.py
|
52
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import find_mxnet
import mxnet as mx
import numpy as np
import importlib
import logging
logging.basicConfig(level=logging.DEBUG)
import argparse
from collections import namedtuple
from skimage import io, transform
from skimage.restoration import denoise_tv_chambolle
CallbackData = namedtuple('CallbackData', field_names=['eps','epoch','img','filename'])
def get_args(arglist=None):
parser = argparse.ArgumentParser(description='neural style')
parser.add_argument('--model', type=str, default='vgg19',
choices = ['vgg'],
help = 'the pretrained model to use')
parser.add_argument('--content-image', type=str, default='input/IMG_4343.jpg',
help='the content image')
parser.add_argument('--style-image', type=str, default='input/starry_night.jpg',
help='the style image')
parser.add_argument('--stop-eps', type=float, default=.005,
help='stop if the relative chanage is less than eps')
parser.add_argument('--content-weight', type=float, default=10,
help='the weight for the content image')
parser.add_argument('--style-weight', type=float, default=1,
help='the weight for the style image')
parser.add_argument('--tv-weight', type=float, default=1e-2,
help='the magtitute on TV loss')
parser.add_argument('--max-num-epochs', type=int, default=1000,
help='the maximal number of training epochs')
parser.add_argument('--max-long-edge', type=int, default=600,
help='resize the content image')
parser.add_argument('--lr', type=float, default=.001,
help='the initial learning rate')
parser.add_argument('--gpu', type=int, default=0,
help='which gpu card to use, -1 means using cpu')
parser.add_argument('--output_dir', type=str, default='output/',
help='the output image')
parser.add_argument('--save-epochs', type=int, default=50,
help='save the output every n epochs')
parser.add_argument('--remove-noise', type=float, default=.02,
help='the magtitute to remove noise')
parser.add_argument('--lr-sched-delay', type=int, default=75,
help='how many epochs between decreasing learning rate')
parser.add_argument('--lr-sched-factor', type=int, default=0.9,
help='factor to decrease learning rate on schedule')
if arglist is None:
return parser.parse_args()
else:
return parser.parse_args(arglist)
def PreprocessContentImage(path, long_edge):
img = io.imread(path)
logging.info("load the content image, size = %s", img.shape[:2])
factor = float(long_edge) / max(img.shape[:2])
new_size = (int(img.shape[0] * factor), int(img.shape[1] * factor))
resized_img = transform.resize(img, new_size)
sample = np.asarray(resized_img) * 256
# swap axes to make image from (224, 224, 3) to (3, 224, 224)
sample = np.swapaxes(sample, 0, 2)
sample = np.swapaxes(sample, 1, 2)
# sub mean
sample[0, :] -= 123.68
sample[1, :] -= 116.779
sample[2, :] -= 103.939
logging.info("resize the content image to %s", new_size)
return np.resize(sample, (1, 3, sample.shape[1], sample.shape[2]))
def PreprocessStyleImage(path, shape):
img = io.imread(path)
resized_img = transform.resize(img, (shape[2], shape[3]))
sample = np.asarray(resized_img) * 256
sample = np.swapaxes(sample, 0, 2)
sample = np.swapaxes(sample, 1, 2)
sample[0, :] -= 123.68
sample[1, :] -= 116.779
sample[2, :] -= 103.939
return np.resize(sample, (1, 3, sample.shape[1], sample.shape[2]))
def PostprocessImage(img):
img = np.resize(img, (3, img.shape[2], img.shape[3]))
img[0, :] += 123.68
img[1, :] += 116.779
img[2, :] += 103.939
img = np.swapaxes(img, 1, 2)
img = np.swapaxes(img, 0, 2)
img = np.clip(img, 0, 255)
return img.astype('uint8')
def SaveImage(img, filename, remove_noise=0.):
logging.info('save output to %s', filename)
out = PostprocessImage(img)
if remove_noise != 0.0:
out = denoise_tv_chambolle(out, weight=remove_noise, multichannel=True)
io.imsave(filename, out)
def style_gram_symbol(input_size, style):
_, output_shapes, _ = style.infer_shape(data=(1, 3, input_size[0], input_size[1]))
gram_list = []
grad_scale = []
for i in range(len(style.list_outputs())):
shape = output_shapes[i]
x = mx.sym.Reshape(style[i], target_shape=(int(shape[1]), int(np.prod(shape[2:]))))
# use fully connected to quickly do dot(x, x^T)
gram = mx.sym.FullyConnected(x, x, no_bias=True, num_hidden=shape[1])
gram_list.append(gram)
grad_scale.append(np.prod(shape[1:]) * shape[1])
return mx.sym.Group(gram_list), grad_scale
def get_loss(gram, content):
gram_loss = []
for i in range(len(gram.list_outputs())):
gvar = mx.sym.Variable("target_gram_%d" % i)
gram_loss.append(mx.sym.sum(mx.sym.square(gvar - gram[i])))
cvar = mx.sym.Variable("target_content")
content_loss = mx.sym.sum(mx.sym.square(cvar - content))
return mx.sym.Group(gram_loss), content_loss
def get_tv_grad_executor(img, ctx, tv_weight):
"""create TV gradient executor with input binded on img
"""
if tv_weight <= 0.0:
return None
nchannel = img.shape[1]
simg = mx.sym.Variable("img")
skernel = mx.sym.Variable("kernel")
channels = mx.sym.SliceChannel(simg, num_outputs=nchannel)
out = mx.sym.Concat(*[
mx.sym.Convolution(data=channels[i], weight=skernel,
num_filter=1,
kernel=(3, 3), pad=(1,1),
no_bias=True, stride=(1,1))
for i in range(nchannel)])
kernel = mx.nd.array(np.array([[0, -1, 0],
[-1, 4, -1],
[0, -1, 0]])
.reshape((1, 1, 3, 3)),
ctx) / 8.0
out = out * tv_weight
return out.bind(ctx, args={"img": img,
"kernel": kernel})
def train_nstyle(args, callback=None):
"""Train a neural style network.
Args are from argparse and control input, output, hyper-parameters.
callback allows for display of training progress.
"""
# input
dev = mx.gpu(args.gpu) if args.gpu >= 0 else mx.cpu()
content_np = PreprocessContentImage(args.content_image, args.max_long_edge)
style_np = PreprocessStyleImage(args.style_image, shape=content_np.shape)
size = content_np.shape[2:]
# model
Executor = namedtuple('Executor', ['executor', 'data', 'data_grad'])
model_module = importlib.import_module('model_' + args.model)
style, content = model_module.get_symbol()
gram, gscale = style_gram_symbol(size, style)
model_executor = model_module.get_executor(gram, content, size, dev)
model_executor.data[:] = style_np
model_executor.executor.forward()
style_array = []
for i in range(len(model_executor.style)):
style_array.append(model_executor.style[i].copyto(mx.cpu()))
model_executor.data[:] = content_np
model_executor.executor.forward()
content_array = model_executor.content.copyto(mx.cpu())
# delete the executor
del model_executor
style_loss, content_loss = get_loss(gram, content)
model_executor = model_module.get_executor(
style_loss, content_loss, size, dev)
grad_array = []
for i in range(len(style_array)):
style_array[i].copyto(model_executor.arg_dict["target_gram_%d" % i])
grad_array.append(mx.nd.ones((1,), dev) * (float(args.style_weight) / gscale[i]))
grad_array.append(mx.nd.ones((1,), dev) * (float(args.content_weight)))
print([x.asscalar() for x in grad_array])
content_array.copyto(model_executor.arg_dict["target_content"])
# train
# initialize img with random noise
img = mx.nd.zeros(content_np.shape, ctx=dev)
img[:] = mx.rnd.uniform(-0.1, 0.1, img.shape)
lr = mx.lr_scheduler.FactorScheduler(step=args.lr_sched_delay,
factor=args.lr_sched_factor)
optimizer = mx.optimizer.NAG(
learning_rate = args.lr,
wd = 0.0001,
momentum=0.95,
lr_scheduler = lr)
optim_state = optimizer.create_state(0, img)
logging.info('start training arguments %s', args)
old_img = img.copyto(dev)
clip_norm = 1 * np.prod(img.shape)
tv_grad_executor = get_tv_grad_executor(img, dev, args.tv_weight)
for e in range(args.max_num_epochs):
img.copyto(model_executor.data)
model_executor.executor.forward()
model_executor.executor.backward(grad_array)
gnorm = mx.nd.norm(model_executor.data_grad).asscalar()
if gnorm > clip_norm:
model_executor.data_grad[:] *= clip_norm / gnorm
if tv_grad_executor is not None:
tv_grad_executor.forward()
optimizer.update(0, img,
model_executor.data_grad + tv_grad_executor.outputs[0],
optim_state)
else:
optimizer.update(0, img, model_executor.data_grad, optim_state)
new_img = img
eps = (mx.nd.norm(old_img - new_img) / mx.nd.norm(new_img)).asscalar()
old_img = new_img.copyto(dev)
logging.info('epoch %d, relative change %f', e, eps)
if eps < args.stop_eps:
logging.info('eps < args.stop_eps, training finished')
break
if callback:
cbdata = {
'eps': eps,
'epoch': e+1,
}
if (e+1) % args.save_epochs == 0:
outfn = args.output_dir + 'e_'+str(e+1)+'.jpg'
npimg = new_img.asnumpy()
SaveImage(npimg, outfn, args.remove_noise)
if callback:
cbdata['filename'] = outfn
cbdata['img'] = npimg
if callback:
callback(cbdata)
final_fn = args.output_dir + '/final.jpg'
SaveImage(new_img.asnumpy(), final_fn)
if __name__ == "__main__":
args = get_args()
train_nstyle(args)
|
caot/intellij-community
|
refs/heads/master
|
python/testData/mover/insideDocComment.py
|
80
|
def fcn(self, foo, bar):
"""
:type <caret>foo: int
:type bar: str
"""
self.foo = foo
self.bar = bar
|
bak1an/django
|
refs/heads/master
|
tests/i18n/utils.py
|
75
|
import os
import re
import shutil
import tempfile
source_code_dir = os.path.dirname(__file__)
def copytree(src, dst):
shutil.copytree(src, dst, ignore=shutil.ignore_patterns('__pycache__'))
class POFileAssertionMixin:
def _assertPoKeyword(self, keyword, expected_value, haystack, use_quotes=True):
q = '"'
if use_quotes:
expected_value = '"%s"' % expected_value
q = "'"
needle = '%s %s' % (keyword, expected_value)
expected_value = re.escape(expected_value)
return self.assertTrue(
re.search('^%s %s' % (keyword, expected_value), haystack, re.MULTILINE),
'Could not find %(q)s%(n)s%(q)s in generated PO file' % {'n': needle, 'q': q}
)
def assertMsgId(self, msgid, haystack, use_quotes=True):
return self._assertPoKeyword('msgid', msgid, haystack, use_quotes=use_quotes)
class RunInTmpDirMixin:
"""
Allow i18n tests that need to generate .po/.mo files to run in an isolated
temporary filesystem tree created by tempfile.mkdtemp() that contains a
clean copy of the relevant test code.
Test classes using this mixin need to define a `work_subdir` attribute
which designates the subdir under `tests/i18n/` that will be copied to the
temporary tree from which its test cases will run.
The setUp() method sets the current working dir to the temporary tree.
It'll be removed when cleaning up.
"""
def setUp(self):
self._cwd = os.getcwd()
self.work_dir = tempfile.mkdtemp(prefix='i18n_')
# Resolve symlinks, if any, in test directory paths.
self.test_dir = os.path.realpath(os.path.join(self.work_dir, self.work_subdir))
copytree(os.path.join(source_code_dir, self.work_subdir), self.test_dir)
# Step out of the temporary working tree before removing it to avoid
# deletion problems on Windows. Cleanup actions registered with
# addCleanup() are called in reverse so preserve this ordering.
self.addCleanup(self._rmrf, self.test_dir)
self.addCleanup(os.chdir, self._cwd)
os.chdir(self.test_dir)
def _rmrf(self, dname):
if os.path.commonprefix([self.test_dir, os.path.abspath(dname)]) != self.test_dir:
return
shutil.rmtree(dname)
def rmfile(self, filepath):
if os.path.exists(filepath):
os.remove(filepath)
|
CVML/pybrain
|
refs/heads/master
|
pybrain/rl/environments/mazes/polarmaze.py
|
25
|
__author__ = 'Tom Schaul, tom@idsia.ch'
from scipy import zeros
from random import choice, random
from .maze import Maze
class PolarMaze(Maze):
""" Mazes with the emphasis on Perseus: allow him to turn, go forward or backward.
Thus there are 4 states per position.
"""
actions = 5
Stay = 0
Forward = 1
TurnAround = 2
TurnLeft = 3
TurnRight = 4
allActions = [Stay, Forward, TurnAround, TurnLeft, TurnRight]
def reset(self):
Maze.reset(self)
self.perseusDir = choice(list(range(4)))
def performAction(self, action):
if self.stochAction > 0:
if random() < self.stochAction:
action = choice(list(range(len(PolarMaze.allActions))))
act = PolarMaze.allActions[action]
self.bang = False
if act == self.Forward:
tmp = self._moveInDir(self.perseus, Maze.allActions[self.perseusDir])
if self.mazeTable[tmp] == False:
self.perseus = tmp
else:
self.bang = True
elif act == self.TurnLeft:
self.perseusDir = (self.perseusDir + 1) % 4
elif act == self.TurnRight:
self.perseusDir = (self.perseusDir - 1) % 4
elif act == self.TurnAround:
self.perseusDir = (self.perseusDir + 2) % 4
def getSensors(self):
obs = Maze.getSensors(self)
res = zeros(4)
res[:4 - self.perseusDir] = obs[self.perseusDir:]
res[4 - self.perseusDir:] = obs[:self.perseusDir]
return res
def __str__(self):
return Maze.__str__(self) + '(dir:' + str(self.perseusDir) + ')'
|
evaautomation/findutils
|
refs/heads/master
|
build-aux/src-sniff.py
|
5
|
#! /usr/bin/env python
# src-sniff.py: checks source code for patterns that look like common errors.
# Copyright (C) 2007, 2010, 2011 Free Software Foundation, Inc.
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Many of these would probably be better as gnulib syntax checks, because
# gnulib provides a way of disabling checks for particular files, and
# has a wider range of checks. Indeed, many of these checks do in fact
# check the same thing as "make syntax-check".
import os.path
import re
import sys
C_ISH_FILENAME = "\.(c|cc|h|cpp|cxx|hxx)$"
C_ISH_FILENAME_RE = re.compile(C_ISH_FILENAME)
C_MODULE_FILENAME_RE = re.compile("\.(c|cc|cpp|cxx)$")
FIRST_INCLUDE = 'config.h'
problems = 0
def Problem(**kwargs):
global problems
problems += 1
msg = kwargs['message']
if kwargs['line']:
location = "%(filename)s:%(line)d" % kwargs
else:
location = "%(filename)s" % kwargs
detail = msg % kwargs
print >>sys.stderr, "error: %s: %s" % (location, detail)
class RegexSniffer(object):
def __init__(self, source, message, regexflags=0):
super(RegexSniffer, self).__init__()
self._regex = re.compile(source, regexflags)
self._msg = message
def Sniff(self, text, filename, line):
#print >>sys.stderr, ("Matching %s against %s"
# % (text, self._regex.pattern))
m = self._regex.search(text)
if m:
if line is None:
line = 1 + m.string.count('\n', 1, m.start(0))
args = {
'filename' : filename,
'line' : line,
'fulltext' : text,
'matchtext': m.group(0),
'message' : self._msg
}
Problem(**args)
class RegexChecker(object):
def __init__(self, regex, line_smells, file_smells):
super(RegexChecker, self).__init__()
self._regex = re.compile(regex)
self._line_sniffers = [RegexSniffer(s[0],s[1]) for s in line_smells]
self._file_sniffers = [RegexSniffer(s[0],s[1],re.S|re.M) for s in file_smells]
def Check(self, filename, lines, fulltext):
if self._regex.search(filename):
# We recognise this type of file.
for line_number, line_text in lines:
for sniffer in self._line_sniffers:
sniffer.Sniff(line_text, filename, line_number)
for sniffer in self._file_sniffers:
sniffer.Sniff(fulltext, filename, None)
else:
# We don't know how to check this file. Skip it.
pass
class MakefileRegexChecker(object):
MAKEFILE_PRIORITY_LIST = ['Makefile.am', 'Makefile.in', 'Makefile']
MAKEFILE_REGEX = ''.join(
'|'.join(['(%s)' % pattern for pattern in MAKEFILE_PRIORITY_LIST]))
def __init__(self, line_smells, file_smells):
self._file_regex = re.compile(self.MAKEFILE_REGEX)
self._rxc = RegexChecker(self.MAKEFILE_REGEX, line_smells, file_smells)
def WantToCheck(self, filename):
if not self._file_regex.search(filename):
return False
makefile_base = os.path.basename(filename)
makefile_dir = os.path.dirname(filename)
for base in self.MAKEFILE_PRIORITY_LIST:
path = os.path.join(makefile_dir, base)
if os.path.exists(path):
if path == filename:
# The first existing name in MAKEFILE_PRIORITY_LIST
# is actually this file, so we want to check it.
return True
else:
# These is another (source) Makefile we want to check
# instead.
return False
# If we get to here we were asked about a file which either
# doesn't exist or which doesn't look like anything in
# MAKEFILE_PRIORITY_LIST. So give the go-ahead to check it.
return True
def Check(self, filename, lines, fulltext):
if self.WantToCheck(filename):
self._rxc.Check(filename, lines, fulltext)
checkers = [
# Check C-like languages for C code smells.
RegexChecker(C_ISH_FILENAME_RE,
# line smells
[
[r'^\s*#\s*define\s+(_[A-Z_]+)', "Don't use reserved macro names"],
[r'(?<!\w)free \(\(', "don't cast the argument to free()"],
[r'\*\) *x(m|c|re)alloc(?!\w)',"don't cast the result of x*alloc"],
[r'\*\) *alloca(?!\w)',"don't cast the result of alloca"],
[r'[ ] ',"found SPACE-TAB; remove the space"],
[r'(?<!\w)([fs]?scanf|ato([filq]|ll))(?!\w)', 'do not use %(matchtext)s'],
[r'error \(EXIT_SUCCESS',"passing EXIT_SUCCESS to error is confusing"],
[r'file[s]ystem', "prefer writing 'file system' to 'filesystem'"],
[r'HAVE''_CONFIG_H', "Avoid checking HAVE_CONFIG_H"],
[r'HAVE_FCNTL_H', "Avoid checking HAVE_FCNTL_H"],
[r'O_NDELAY', "Avoid using O_NDELAY"],
[r'the\s*the', "'the"+" the' is probably not deliberate"],
[r'(?<!\w)error \([^_"]*[^_]"[^"]*[a-z]{3}', "untranslated error message"],
[r'^# *if\s+defined *\(', "useless parentheses in '#if defined'"],
],
[
[r'# *include <assert.h>(?!.*assert \()',
"If you include <assert.h>, use assert()."],
[r'# *include "quotearg.h"(?!.*(?<!\w)quotearg(_[^ ]+)? \()',
"If you include \"quotearg.h\", use one of its functions."],
[r'# *include "quote.h"(?!.*(?<!\w)quote(_[^ ]+)? \()',
"If you include \"quote.h\", use one of its functions."],
]),
# Check Makefiles for Makefile code smells.
MakefileRegexChecker([ [r'^ ', "Spaces at start of makefile line"], ],
[]),
# Check everything for whitespace problems.
RegexChecker('', [], [[r'[ ]$',
"trailing whitespace '%(matchtext)s'"],]),
# Check everything for out of date addresses.
RegexChecker('', [], [
[r'675\s*Mass\s*Ave,\s*02139[^a-zA-Z]*USA',
"out of date FSF address"],
[r'59 Temple Place.*02111-?1307\s*USA',
"out of date FSF address %(matchtext)s"],
]),
# Check everything for GPL version regression
RegexChecker('',
[],
[[r'G(nu |eneral )?P(ublic )?L(icense)?.{1,200}version [12]',
"Out of date GPL version: %(matchtext)s"],
]),
# Bourne shell code smells
RegexChecker('\.sh$',
[
['for\s*\w+\s*in.*;\s*do',
# Solaris 10 /bin/sh rejects this, see Autoconf manual
"for loops should not contain a 'do' on the same line."],
], []),
]
# missing check: ChangeLog prefixes
# missing: sc_always_defined_macros from coreutils
# missing: sc_tight_scope
def Warning(filename, desc):
print >> sys.stderr, "warning: %s: %s" % (filename, desc)
def BuildIncludeList(text):
"""Build a list of included files, with line numbers.
Args:
text: the full text of the source file
Returns:
[ ('config.h',32), ('assert.h',33), ... ]
"""
include_re = re.compile(r'# *include +[<"](.*)[>"]')
includes = []
last_include_pos = 1
line = 1
for m in include_re.finditer(text):
header = m.group(1)
# Count only the number of lines between the last include and
# this one. Counting them from the beginning would be quadratic.
line += m.string.count('\n', last_include_pos, m.start(0))
last_include_pos = m.end()
includes.append( (header,line) )
return includes
def CheckStatHeader(filename, lines, fulltext):
stat_hdr_re = re.compile(r'# *include .*<sys/stat.h>')
# It's OK to have a pointer though.
stat_use_re = re.compile(r'struct stat\W *[^*]')
for line in lines:
m = stat_use_re.search(line[1])
if m:
msg = "If you use struct stat, you must #include <sys/stat.h> first"
Problem(filename = filename, line = line[0], message = msg)
# Diagnose only once
break
m = stat_hdr_re.search(line[1])
if m:
break
def CheckFirstInclude(filename, lines, fulltext):
includes = BuildIncludeList(fulltext)
#print "Include map:"
#for name, line in includes:
# print "%s:%d: %s" % (filename, line, name)
if includes:
actual_first_include = includes[0][0]
else:
actual_first_include = None
if actual_first_include and actual_first_include != FIRST_INCLUDE:
if FIRST_INCLUDE in [inc[0] for inc in includes]:
msg = ("%(actual_first_include)s is the first included file, "
"but %(required_first_include)s should be included first")
Problem(filename=filename, line=includes[0][1], message=msg,
actual_first_include=actual_first_include,
required_first_include = FIRST_INCLUDE)
if FIRST_INCLUDE not in [inc[0] for inc in includes]:
Warning(filename,
"%s should be included by most files" % FIRST_INCLUDE)
def SniffSourceFile(filename, lines, fulltext):
if C_MODULE_FILENAME_RE.search(filename):
CheckFirstInclude(filename, lines, fulltext)
CheckStatHeader (filename, lines, fulltext)
for checker in checkers:
checker.Check(filename, lines, fulltext)
def main(args):
"main program"
for srcfile in args[1:]:
f = open(srcfile)
line_number = 1
lines = []
for line in f.readlines():
lines.append( (line_number, line) )
line_number += 1
fulltext = ''.join([line[1] for line in lines])
SniffSourceFile(srcfile, lines, fulltext)
f.close()
if problems:
return 1
else:
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
tysonclugg/django
|
refs/heads/master
|
django/contrib/gis/db/models/__init__.py
|
123
|
from django.db.models import * # NOQA isort:skip
from django.db.models import __all__ as models_all # isort:skip
import django.contrib.gis.db.models.functions # NOQA
import django.contrib.gis.db.models.lookups # NOQA
from django.contrib.gis.db.models.aggregates import * # NOQA
from django.contrib.gis.db.models.aggregates import __all__ as aggregates_all
from django.contrib.gis.db.models.fields import (
GeometryCollectionField, GeometryField, LineStringField,
MultiLineStringField, MultiPointField, MultiPolygonField, PointField,
PolygonField, RasterField,
)
__all__ = models_all + aggregates_all
__all__ += [
'GeometryCollectionField', 'GeometryField', 'LineStringField',
'MultiLineStringField', 'MultiPointField', 'MultiPolygonField', 'PointField',
'PolygonField', 'RasterField',
]
|
linaro-technologies/jobserv
|
refs/heads/master
|
jobserv/storage/local_storage.py
|
1
|
# Copyright (C) 2017 Linaro Limited
# Author: Andy Doan <andy.doan@linaro.org>
import hmac
import os
import mimetypes
import shutil
from flask import Blueprint, request, send_file, url_for
from jobserv.jsend import get_or_404
from jobserv.models import Build, Project, Run
from jobserv.settings import INTERNAL_API_KEY, LOCAL_ARTIFACTS_DIR
from jobserv.storage.base import BaseStorage
blueprint = Blueprint('local_storage', __name__, url_prefix='/local-storage')
class Storage(BaseStorage):
blueprint = blueprint
def __init__(self):
super().__init__()
self.artifacts = LOCAL_ARTIFACTS_DIR
def _get_local(self, storage_path):
assert storage_path[0] != '/'
path = os.path.join(self.artifacts, storage_path)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
return path
def _create_from_string(self, storage_path, contents):
path = self._get_local(storage_path)
with open(path, 'w') as f:
f.write(contents)
def _create_from_file(self, storage_path, filename, content_type):
path = self._get_local(storage_path)
with open(filename, 'rb') as fin, open(path, 'wb') as fout:
shutil.copyfileobj(fin, fout)
def _get_as_string(self, storage_path):
assert storage_path[0] != '/'
path = os.path.join(self.artifacts, storage_path)
with open(path, 'r') as f:
return f.read()
def list_artifacts(self, run):
path = '%s/%s/%s/' % (
run.build.project.name, run.build.build_id, run.name)
path = os.path.join(self.artifacts, path)
for base, _, names in os.walk(path):
for name in names:
if name != '.rundef.json':
yield os.path.join(base, name)[len(path):]
def get_download_response(self, request, run, path):
try:
p = os.path.join(self.artifacts, self._get_run_path(run), path)
mt = mimetypes.guess_type(p)[0]
return send_file(open(p, 'rb'), mimetype=mt)
except FileNotFoundError:
return 'File not found', 404
def _generate_put_url(self, run, path, expiration, content_type):
p = os.path.join(self.artifacts, self._get_run_path(run), path)
msg = '%s,%s,%s' % ('PUT', p, content_type)
sig = hmac.new(INTERNAL_API_KEY, msg.encode(), 'sha1').hexdigest()
return url_for(
'local_storage.run_upload_artifact', sig=sig,
proj=run.build.project.name, build_id=run.build.build_id,
run=run.name, path=path, _external=True)
def _get_run(proj, build_id, run):
p = get_or_404(Project.query.filter_by(name=proj))
b = get_or_404(Build.query.filter_by(project=p, build_id=build_id))
return Run.query.filter_by(
name=run
).filter(
Run.build.has(Build.id == b.id)
).first_or_404()
@blueprint.route('/<sig>/<proj>/builds/<int:build_id>/runs/<run>/<path:path>',
methods=('PUT',))
def run_upload_artifact(sig, proj, build_id, run, path):
run = _get_run(proj, build_id, run)
# validate the signature
ls = Storage()
p = os.path.join(ls.artifacts, ls._get_run_path(run), path)
msg = '%s,%s,%s' % (request.method, p, request.headers.get('Content-Type'))
computed = hmac.new(INTERNAL_API_KEY, msg.encode(), 'sha1').hexdigest()
if not hmac.compare_digest(sig, computed):
return 'Invalid signature', 401
dirname = os.path.dirname(p)
try:
# we could have 2 uploads trying this, so just do it this way to avoid
# race conditions
os.makedirs(dirname)
except FileExistsError:
pass
# stream the contents to disk
with open(p, 'wb') as f:
chunk_size = 4096
while True:
chunk = request.stream.read(chunk_size)
if len(chunk) == 0:
break
f.write(chunk)
return 'ok'
|
gjtorikian/readthedocs.org
|
refs/heads/master
|
readthedocs/redirects/views.py
|
12133432
| |
popoyz/charts
|
refs/heads/master
|
hillinsight/www/__init__.py
|
12133432
| |
sarvex/django
|
refs/heads/master
|
tests/m2m_through/__init__.py
|
12133432
| |
weigj/django-multidb
|
refs/heads/master
|
tests/modeltests/field_subclassing/__init__.py
|
12133432
| |
SerCeMan/intellij-community
|
refs/heads/master
|
python/testData/resolve/multiFile/nestedPackageElement/foo/__init__.py
|
12133432
| |
openaps/openaps
|
refs/heads/master
|
openaps/controllers/__init__.py
|
12133432
| |
mwieler/django-registration-1.5
|
refs/heads/master
|
registration/tests/__init__.py
|
59
|
from django.test import TestCase
import registration
from registration.tests.backends import *
from registration.tests.forms import *
from registration.tests.models import *
from registration.tests.views import *
class RegistrationVersionInfoTests(TestCase):
"""
Test django-registration's internal version-reporting
infrastructure.
"""
def setUp(self):
self.version = registration.VERSION
def tearDown(self):
registration.VERSION = self.version
def test_get_version(self):
"""
Test the version-info reporting.
"""
versions = [
{'version': (1, 0, 0, 'alpha', 0),
'expected': "1.0 pre-alpha"},
{'version': (1, 0, 1, 'alpha', 1),
'expected': "1.0.1 alpha 1"},
{'version': (1, 1, 0, 'beta', 2),
'expected': "1.1 beta 2"},
{'version': (1, 2, 1, 'rc', 3),
'expected': "1.2.1 rc 3"},
{'version': (1, 3, 0, 'final', 0),
'expected': "1.3"},
{'version': (1, 4, 1, 'beta', 0),
'expected': "1.4.1 beta"},
]
for version_dict in versions:
registration.VERSION = version_dict['version']
self.assertEqual(registration.get_version(), version_dict['expected'])
|
cfrs2005/flask_blog
|
refs/heads/master
|
lib/__init__.py
|
1
|
__author__ = 'aj'
|
MinerKasch/dd-agent
|
refs/heads/master
|
checks.d/varnish.py
|
5
|
# (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# stdlib
from collections import defaultdict
import re
import xml.parsers.expat # python 2.4 compatible
# project
from checks import AgentCheck
from utils.subprocess_output import get_subprocess_output
class BackendStatus(object):
HEALTHY = 'healthy'
SICK = 'sick'
ALL = (HEALTHY, SICK)
@classmethod
def to_check_status(cls, status):
if status == cls.HEALTHY:
return AgentCheck.OK
elif status == cls.SICK:
return AgentCheck.CRITICAL
return AgentCheck.UNKNOWN
class Varnish(AgentCheck):
SERVICE_CHECK_NAME = 'varnish.backend_healthy'
# XML parsing bits, a.k.a. Kafka in Code
def _reset(self):
self._current_element = ""
self._current_metric = "varnish"
self._current_value = 0
self._current_str = ""
self._current_type = ""
def _start_element(self, name, attrs):
self._current_element = name
def _end_element(self, name, tags):
if name == "stat":
m_name = self.normalize(self._current_metric)
if self._current_type in ("a", "c"):
self.rate(m_name, long(self._current_value), tags=tags)
elif self._current_type in ("i", "g"):
self.gauge(m_name, long(self._current_value), tags=tags)
else:
# Unsupported data type, ignore
self._reset()
return # don't save
# reset for next stat element
self._reset()
elif name in ("ident", "name") or (name == "type" and self._current_str != "MAIN"):
self._current_metric += "." + self._current_str
def _char_data(self, data):
self.log.debug("Data %s [%s]" % (data, self._current_element))
data = data.strip()
if len(data) > 0 and self._current_element != "":
if self._current_element == "value":
self._current_value = long(data)
elif self._current_element == "flag":
self._current_type = data
else:
self._current_str = data
def check(self, instance):
# Not configured? Not a problem.
if instance.get("varnishstat", None) is None:
raise Exception("varnishstat is not configured")
tags = instance.get('tags', [])
if tags is None:
tags = []
else:
tags = list(set(tags))
varnishstat_path = instance.get("varnishstat")
name = instance.get('name')
# Get version and version-specific args from varnishstat -V.
version, use_xml = self._get_version_info(varnishstat_path)
# Parse metrics from varnishstat.
arg = '-x' if use_xml else '-1'
cmd = [varnishstat_path, arg]
if name is not None:
cmd.extend(['-n', name])
tags += [u'varnish_name:%s' % name]
else:
tags += [u'varnish_name:default']
output, _, _ = get_subprocess_output(cmd, self.log)
self._parse_varnishstat(output, use_xml, tags)
# Parse service checks from varnishadm.
varnishadm_path = instance.get('varnishadm')
if varnishadm_path:
secretfile_path = instance.get('secretfile', '/etc/varnish/secret')
cmd = ['sudo', varnishadm_path, '-S', secretfile_path, 'debug.health']
output, _, _ = get_subprocess_output(cmd, self.log)
if output:
self._parse_varnishadm(output)
def _get_version_info(self, varnishstat_path):
# Get the varnish version from varnishstat
output, error, _ = get_subprocess_output([varnishstat_path, "-V"], self.log)
# Assumptions regarding varnish's version
use_xml = True
version = 3
m1 = re.search(r"varnish-(\d+)", output, re.MULTILINE)
# v2 prints the version on stderr, v3 on stdout
m2 = re.search(r"varnish-(\d+)", error, re.MULTILINE)
if m1 is None and m2 is None:
self.log.warn("Cannot determine the version of varnishstat, assuming 3 or greater")
self.warning("Cannot determine the version of varnishstat, assuming 3 or greater")
else:
if m1 is not None:
version = int(m1.group(1))
elif m2 is not None:
version = int(m2.group(1))
self.log.debug("Varnish version: %d" % version)
# Location of varnishstat
if version <= 2:
use_xml = False
return version, use_xml
def _parse_varnishstat(self, output, use_xml, tags=None):
"""Extract stats from varnishstat -x
The text option (-1) is not reliable enough when counters get large.
VBE.media_video_prd_services_01(10.93.67.16,,8080).happy18446744073709551615
2 types of data, "a" for counter ("c" in newer versions of varnish), "i" for gauge ("g")
https://github.com/varnish/Varnish-Cache/blob/master/include/tbl/vsc_fields.h
Bitmaps are not supported.
Example XML output (with `use_xml=True`)
<varnishstat>
<stat>
<name>fetch_304</name>
<value>0</value>
<flag>a</flag>
<description>Fetch no body (304)</description>
</stat>
<stat>
<name>n_sess_mem</name>
<value>334</value>
<flag>i</flag>
<description>N struct sess_mem</description>
</stat>
<stat>
<type>LCK</type>
<ident>vcl</ident>
<name>creat</name>
<value>1</value>
<flag>a</flag>
<description>Created locks</description>
</stat>
</varnishstat>
"""
tags = tags or []
# FIXME: this check is processing an unbounded amount of data
# we should explicitly list the metrics we want to get from the check
if use_xml:
p = xml.parsers.expat.ParserCreate()
p.StartElementHandler = self._start_element
end_handler = lambda name: self._end_element(name, tags)
p.EndElementHandler = end_handler
p.CharacterDataHandler = self._char_data
self._reset()
p.Parse(output, True)
else:
for line in output.split("\n"):
self.log.debug("Parsing varnish results: %s" % line)
fields = line.split()
if len(fields) < 3:
break
name, gauge_val, rate_val = fields[0], fields[1], fields[2]
metric_name = self.normalize(name, prefix="varnish")
# Now figure out which value to pick
if rate_val.lower() in ("nan", "."):
# col 2 matters
self.log.debug("Varnish (gauge) %s %d" % (metric_name, int(gauge_val)))
self.gauge(metric_name, int(gauge_val), tags=tags)
else:
# col 3 has a rate (since restart)
self.log.debug("Varnish (rate) %s %d" % (metric_name, int(gauge_val)))
self.rate(metric_name, float(gauge_val), tags=tags)
def _parse_varnishadm(self, output):
""" Parse out service checks from varnishadm.
Example output:
Backend b0 is Sick
Current states good: 2 threshold: 3 window: 5
Average responsetime of good probes: 0.000000
Oldest Newest
================================================================
-------------------------------------------------------------444 Good IPv4
-------------------------------------------------------------XXX Good Xmit
-------------------------------------------------------------RRR Good Recv
----------------------------------------------------------HHH--- Happy
Backend b1 is Sick
Current states good: 2 threshold: 3 window: 5
Average responsetime of good probes: 0.000000
Oldest Newest
================================================================
----------------------------------------------------------HHH--- Happy
"""
# Process status by backend.
backends_by_status = defaultdict(list)
backend, status, message = None, None, None
for line in output.split("\n"):
tokens = line.strip().split(' ')
if len(tokens) > 0:
if tokens[0] == 'Backend':
backend = tokens[1]
status = tokens[1].lower()
elif tokens[0] == 'Current' and backend is not None:
try:
message = ' '.join(tokens[2:]).strip()
except Exception:
# If we can't parse a message still send a status.
self.log.exception('Error when parsing message from varnishadm')
message = ''
backends_by_status[status].append((backend, message))
for status, backends in backends_by_status.iteritems():
check_status = BackendStatus.to_check_status(status)
for backend, message in backends:
tags = ['backend:%s' % backend]
self.service_check(self.SERVICE_CHECK_NAME, check_status,
tags=tags, message=message)
|
ktaneishi/deepchem
|
refs/heads/master
|
deepchem/models/tensorgraph/robust_multitask.py
|
1
|
from __future__ import division
from __future__ import unicode_literals
import numpy as np
import tensorflow as tf
import collections
from deepchem.metrics import to_one_hot
from deepchem.models.tensorgraph.tensor_graph import TensorGraph, TFWrapper
from deepchem.models.tensorgraph.layers import Feature, Label, Weights, \
WeightedError, Dense, Dropout, WeightDecay, Reshape, SoftMax, SoftMaxCrossEntropy, \
L2Loss, ReduceSum, Concat, Stack
class RobustMultitaskClassifier(TensorGraph):
"""Implements a neural network for robust multitasking.
Key idea is to have bypass layers that feed directly from features to task
output. Hopefully will allow tasks to route around bad multitasking.
"""
def __init__(self,
n_tasks,
n_features,
layer_sizes=[1000],
weight_init_stddevs=0.02,
bias_init_consts=1.0,
weight_decay_penalty=0.0,
weight_decay_penalty_type="l2",
dropouts=0.5,
activation_fns=tf.nn.relu,
n_classes=2,
bypass_layer_sizes=[100],
bypass_weight_init_stddevs=[.02],
bypass_bias_init_consts=[1.],
bypass_dropouts=[.5],
**kwargs):
""" Create a RobustMultitaskClassifier.
Parameters
----------
n_tasks: int
number of tasks
n_features: int
number of features
layer_sizes: list
the size of each dense layer in the network. The length of this list determines the number of layers.
weight_init_stddevs: list or float
the standard deviation of the distribution to use for weight initialization of each layer. The length
of this list should equal len(layer_sizes). Alternatively this may be a single value instead of a list,
in which case the same value is used for every layer.
bias_init_consts: list or loat
the value to initialize the biases in each layer to. The length of this list should equal len(layer_sizes).
Alternatively this may be a single value instead of a list, in which case the same value is used for every layer.
weight_decay_penalty: float
the magnitude of the weight decay penalty to use
weight_decay_penalty_type: str
the type of penalty to use for weight decay, either 'l1' or 'l2'
dropouts: list or float
the dropout probablity to use for each layer. The length of this list should equal len(layer_sizes).
Alternatively this may be a single value instead of a list, in which case the same value is used for every layer.
activation_fns: list or object
the Tensorflow activation function to apply to each layer. The length of this list should equal
len(layer_sizes). Alternatively this may be a single value instead of a list, in which case the
same value is used for every layer.
n_classes: int
the number of classes
bypass_layer_sizes: list
the size of each dense layer in the bypass network. The length of this list determines the number of bypass layers.
bypass_weight_init_stddevs: list or float
the standard deviation of the distribution to use for weight initialization of bypass layers.
same requirements as weight_init_stddevs
bypass_bias_init_consts: list or float
the value to initialize the biases in bypass layers
same requirements as bias_init_consts
bypass_dropouts: list or float
the dropout probablity to use for bypass layers.
same requirements as dropouts
"""
super(RobustMultitaskClassifier, self).__init__(**kwargs)
self.n_tasks = n_tasks
self.n_features = n_features
self.n_classes = n_classes
n_layers = len(layer_sizes)
if not isinstance(weight_init_stddevs, collections.Sequence):
weight_init_stddevs = [weight_init_stddevs] * n_layers
if not isinstance(bias_init_consts, collections.Sequence):
bias_init_consts = [bias_init_consts] * n_layers
if not isinstance(dropouts, collections.Sequence):
dropouts = [dropouts] * n_layers
if not isinstance(activation_fns, collections.Sequence):
activation_fns = [activation_fns] * n_layers
n_bypass_layers = len(bypass_layer_sizes)
if not isinstance(bypass_weight_init_stddevs, collections.Sequence):
bypass_weight_init_stddevs = [bypass_weight_init_stddevs
] * n_bypass_layers
if not isinstance(bypass_bias_init_consts, collections.Sequence):
bypass_bias_init_consts = [bypass_bias_init_consts] * n_bypass_layers
if not isinstance(bypass_dropouts, collections.Sequence):
bypass_dropouts = [bypass_dropouts] * n_bypass_layers
bypass_activation_fns = [activation_fns[0]] * n_bypass_layers
# Add the input features.
mol_features = Feature(shape=(None, n_features))
prev_layer = mol_features
# Add the shared dense layers
for size, weight_stddev, bias_const, dropout, activation_fn in zip(
layer_sizes, weight_init_stddevs, bias_init_consts, dropouts,
activation_fns):
layer = Dense(
in_layers=[prev_layer],
out_channels=size,
activation_fn=activation_fn,
weights_initializer=TFWrapper(
tf.truncated_normal_initializer, stddev=weight_stddev),
biases_initializer=TFWrapper(
tf.constant_initializer, value=bias_const))
if dropout > 0.0:
layer = Dropout(dropout, in_layers=[layer])
prev_layer = layer
top_multitask_layer = prev_layer
task_outputs = []
for i in range(self.n_tasks):
prev_layer = mol_features
# Add task-specific bypass layers
for size, weight_stddev, bias_const, dropout, activation_fn in zip(
bypass_layer_sizes, bypass_weight_init_stddevs,
bypass_bias_init_consts, bypass_dropouts, bypass_activation_fns):
layer = Dense(
in_layers=[prev_layer],
out_channels=size,
activation_fn=activation_fn,
weights_initializer=TFWrapper(
tf.truncated_normal_initializer, stddev=weight_stddev),
biases_initializer=TFWrapper(
tf.constant_initializer, value=bias_const))
if dropout > 0.0:
layer = Dropout(dropout, in_layers=[layer])
prev_layer = layer
top_bypass_layer = prev_layer
if n_bypass_layers > 0:
task_layer = Concat(
axis=1, in_layers=[top_multitask_layer, top_bypass_layer])
else:
task_layer = top_multitask_layer
task_out = Dense(in_layers=[task_layer], out_channels=n_classes)
task_outputs.append(task_out)
logits = Stack(axis=1, in_layers=task_outputs)
output = SoftMax(logits)
self.add_output(output)
labels = Label(shape=(None, n_tasks, n_classes))
weights = Weights(shape=(None, n_tasks))
loss = SoftMaxCrossEntropy(in_layers=[labels, logits])
weighted_loss = WeightedError(in_layers=[loss, weights])
if weight_decay_penalty != 0.0:
weighted_loss = WeightDecay(
weight_decay_penalty,
weight_decay_penalty_type,
in_layers=[weighted_loss])
self.set_loss(weighted_loss)
def default_generator(self,
dataset,
epochs=1,
predict=False,
deterministic=True,
pad_batches=True):
for epoch in range(epochs):
for (X_b, y_b, w_b, ids_b) in dataset.iterbatches(
batch_size=self.batch_size,
deterministic=deterministic,
pad_batches=pad_batches):
feed_dict = dict()
if y_b is not None and not predict:
feed_dict[self.labels[0]] = to_one_hot(y_b.flatten(),
self.n_classes).reshape(
-1, self.n_tasks,
self.n_classes)
if X_b is not None:
feed_dict[self.features[0]] = X_b
if w_b is not None and not predict:
feed_dict[self.task_weights[0]] = w_b
yield feed_dict
def create_estimator_inputs(self, feature_columns, weight_column, features,
labels, mode):
tensors = {}
for layer, column in zip(self.features, feature_columns):
tensors[layer] = tf.feature_column.input_layer(features, [column])
if weight_column is not None:
tensors[self.task_weights[0]] = tf.feature_column.input_layer(
features, [weight_column])
if labels is not None:
tensors[self.labels[0]] = tf.one_hot(
tf.cast(labels, tf.int32), self.n_classes)
return tensors
class RobustMultitaskRegressor(TensorGraph):
"""Implements a neural network for robust multitasking.
Key idea is to have bypass layers that feed directly from features to task
output. Hopefully will allow tasks to route around bad multitasking.
"""
def __init__(self,
n_tasks,
n_features,
layer_sizes=[1000],
weight_init_stddevs=0.02,
bias_init_consts=1.0,
weight_decay_penalty=0.0,
weight_decay_penalty_type="l2",
dropouts=0.5,
activation_fns=tf.nn.relu,
bypass_layer_sizes=[100],
bypass_weight_init_stddevs=[.02],
bypass_bias_init_consts=[1.],
bypass_dropouts=[.5],
**kwargs):
""" Create a RobustMultitaskRegressor.
Parameters
----------
n_tasks: int
number of tasks
n_features: int
number of features
layer_sizes: list
the size of each dense layer in the network. The length of this list determines the number of layers.
weight_init_stddevs: list or float
the standard deviation of the distribution to use for weight initialization of each layer. The length
of this list should equal len(layer_sizes). Alternatively this may be a single value instead of a list,
in which case the same value is used for every layer.
bias_init_consts: list or loat
the value to initialize the biases in each layer to. The length of this list should equal len(layer_sizes).
Alternatively this may be a single value instead of a list, in which case the same value is used for every layer.
weight_decay_penalty: float
the magnitude of the weight decay penalty to use
weight_decay_penalty_type: str
the type of penalty to use for weight decay, either 'l1' or 'l2'
dropouts: list or float
the dropout probablity to use for each layer. The length of this list should equal len(layer_sizes).
Alternatively this may be a single value instead of a list, in which case the same value is used for every layer.
activation_fns: list or object
the Tensorflow activation function to apply to each layer. The length of this list should equal
len(layer_sizes). Alternatively this may be a single value instead of a list, in which case the
same value is used for every layer.
bypass_layer_sizes: list
the size of each dense layer in the bypass network. The length of this list determines the number of bypass layers.
bypass_weight_init_stddevs: list or float
the standard deviation of the distribution to use for weight initialization of bypass layers.
same requirements as weight_init_stddevs
bypass_bias_init_consts: list or float
the value to initialize the biases in bypass layers
same requirements as bias_init_consts
bypass_dropouts: list or float
the dropout probablity to use for bypass layers.
same requirements as dropouts
"""
super(RobustMultitaskRegressor, self).__init__(**kwargs)
self.n_tasks = n_tasks
self.n_features = n_features
n_layers = len(layer_sizes)
if not isinstance(weight_init_stddevs, collections.Sequence):
weight_init_stddevs = [weight_init_stddevs] * n_layers
if not isinstance(bias_init_consts, collections.Sequence):
bias_init_consts = [bias_init_consts] * n_layers
if not isinstance(dropouts, collections.Sequence):
dropouts = [dropouts] * n_layers
if not isinstance(activation_fns, collections.Sequence):
activation_fns = [activation_fns] * n_layers
n_bypass_layers = len(bypass_layer_sizes)
if not isinstance(bypass_weight_init_stddevs, collections.Sequence):
bypass_weight_init_stddevs = [bypass_weight_init_stddevs
] * n_bypass_layers
if not isinstance(bypass_bias_init_consts, collections.Sequence):
bypass_bias_init_consts = [bypass_bias_init_consts] * n_bypass_layers
if not isinstance(bypass_dropouts, collections.Sequence):
bypass_dropouts = [bypass_dropouts] * n_bypass_layers
bypass_activation_fns = [activation_fns[0]] * n_bypass_layers
# Add the input features.
mol_features = Feature(shape=(None, n_features))
prev_layer = mol_features
# Add the shared dense layers
for size, weight_stddev, bias_const, dropout, activation_fn in zip(
layer_sizes, weight_init_stddevs, bias_init_consts, dropouts,
activation_fns):
layer = Dense(
in_layers=[prev_layer],
out_channels=size,
activation_fn=activation_fn,
weights_initializer=TFWrapper(
tf.truncated_normal_initializer, stddev=weight_stddev),
biases_initializer=TFWrapper(
tf.constant_initializer, value=bias_const))
if dropout > 0.0:
layer = Dropout(dropout, in_layers=[layer])
prev_layer = layer
top_multitask_layer = prev_layer
task_outputs = []
for i in range(self.n_tasks):
prev_layer = mol_features
# Add task-specific bypass layers
for size, weight_stddev, bias_const, dropout, activation_fn in zip(
bypass_layer_sizes, bypass_weight_init_stddevs,
bypass_bias_init_consts, bypass_dropouts, bypass_activation_fns):
layer = Dense(
in_layers=[prev_layer],
out_channels=size,
activation_fn=activation_fn,
weights_initializer=TFWrapper(
tf.truncated_normal_initializer, stddev=weight_stddev),
biases_initializer=TFWrapper(
tf.constant_initializer, value=bias_const))
if dropout > 0.0:
layer = Dropout(dropout, in_layers=[layer])
prev_layer = layer
top_bypass_layer = prev_layer
if n_bypass_layers > 0:
task_layer = Concat(
axis=1, in_layers=[top_multitask_layer, top_bypass_layer])
else:
task_layer = top_multitask_layer
task_out = Dense(in_layers=[task_layer], out_channels=1)
task_outputs.append(task_out)
output = Concat(axis=1, in_layers=task_outputs)
self.add_output(output)
labels = Label(shape=(None, n_tasks))
weights = Weights(shape=(None, n_tasks))
weighted_loss = ReduceSum(L2Loss(in_layers=[labels, output, weights]))
if weight_decay_penalty != 0.0:
weighted_loss = WeightDecay(
weight_decay_penalty,
weight_decay_penalty_type,
in_layers=[weighted_loss])
self.set_loss(weighted_loss)
|
a0c/odoo
|
refs/heads/master
|
addons/website_event_sale/controllers/main.py
|
54
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.addons.web import http
from openerp.addons.web.http import request
from openerp.addons.website_event.controllers.main import website_event
from openerp.tools.translate import _
class website_event(website_event):
@http.route(['/event/cart/update'], type='http', auth="public", methods=['POST'], website=True)
def cart_update(self, event_id, **post):
cr, uid, context = request.cr, request.uid, request.context
ticket_obj = request.registry.get('event.event.ticket')
sale = False
for key, value in post.items():
quantity = int(value or "0")
if not quantity:
continue
sale = True
ticket_id = key.split("-")[0] == 'ticket' and int(key.split("-")[1]) or None
ticket = ticket_obj.browse(cr, SUPERUSER_ID, ticket_id, context=context)
request.website.sale_get_order(force_create=1)._cart_update(
product_id=ticket.product_id.id, add_qty=quantity, context=dict(context, event_ticket_id=ticket.id))
if not sale:
return request.redirect("/event/%s" % event_id)
return request.redirect("/shop/checkout")
def _add_event(self, event_name="New Event", context={}, **kwargs):
try:
dummy, res_id = request.registry.get('ir.model.data').get_object_reference(request.cr, request.uid, 'event_sale', 'product_product_event')
context['default_event_ticket_ids'] = [[0,0,{
'name': _('Subscription'),
'product_id': res_id,
'deadline' : False,
'seats_max': 1000,
'price': 0,
}]]
except ValueError:
pass
return super(website_event, self)._add_event(event_name, context, **kwargs)
|
nabsboss/CouchPotatoServer
|
refs/heads/develop
|
libs/oauthlib/oauth1/rfc5849/signature.py
|
112
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
"""
oauthlib.oauth1.rfc5849.signature
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module represents a direct implementation of `section 3.4`_ of the spec.
Terminology:
* Client: software interfacing with an OAuth API
* Server: the API provider
* Resource Owner: the user who is granting authorization to the client
Steps for signing a request:
1. Collect parameters from the uri query, auth header, & body
2. Normalize those parameters
3. Normalize the uri
4. Pass the normalized uri, normalized parameters, and http method to
construct the base string
5. Pass the base string and any keys needed to a signing function
.. _`section 3.4`: http://tools.ietf.org/html/rfc5849#section-3.4
"""
import binascii
import hashlib
import hmac
import urlparse
from . import utils
from oauthlib.common import extract_params
def construct_base_string(http_method, base_string_uri,
normalized_encoded_request_parameters):
"""**String Construction**
Per `section 3.4.1.1`_ of the spec.
For example, the HTTP request::
POST /request?b5=%3D%253D&a3=a&c%40=&a2=r%20b HTTP/1.1
Host: example.com
Content-Type: application/x-www-form-urlencoded
Authorization: OAuth realm="Example",
oauth_consumer_key="9djdj82h48djs9d2",
oauth_token="kkk9d7dh3k39sjv7",
oauth_signature_method="HMAC-SHA1",
oauth_timestamp="137131201",
oauth_nonce="7d8f3e4a",
oauth_signature="bYT5CMsGcbgUdFHObYMEfcx6bsw%3D"
c2&a3=2+q
is represented by the following signature base string (line breaks
are for display purposes only)::
POST&http%3A%2F%2Fexample.com%2Frequest&a2%3Dr%2520b%26a3%3D2%2520q
%26a3%3Da%26b5%3D%253D%25253D%26c%2540%3D%26c2%3D%26oauth_consumer_
key%3D9djdj82h48djs9d2%26oauth_nonce%3D7d8f3e4a%26oauth_signature_m
ethod%3DHMAC-SHA1%26oauth_timestamp%3D137131201%26oauth_token%3Dkkk
9d7dh3k39sjv7
.. _`section 3.4.1.1`: http://tools.ietf.org/html/rfc5849#section-3.4.1.1
"""
# The signature base string is constructed by concatenating together,
# in order, the following HTTP request elements:
# 1. The HTTP request method in uppercase. For example: "HEAD",
# "GET", "POST", etc. If the request uses a custom HTTP method, it
# MUST be encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
base_string = utils.escape(http_method.upper())
# 2. An "&" character (ASCII code 38).
base_string += u'&'
# 3. The base string URI from `Section 3.4.1.2`_, after being encoded
# (`Section 3.6`_).
#
# .. _`Section 3.4.1.2`: http://tools.ietf.org/html/rfc5849#section-3.4.1.2
# .. _`Section 3.4.6`: http://tools.ietf.org/html/rfc5849#section-3.4.6
base_string += utils.escape(base_string_uri)
# 4. An "&" character (ASCII code 38).
base_string += u'&'
# 5. The request parameters as normalized in `Section 3.4.1.3.2`_, after
# being encoded (`Section 3.6`).
#
# .. _`Section 3.4.1.3.2`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3.2
# .. _`Section 3.4.6`: http://tools.ietf.org/html/rfc5849#section-3.4.6
base_string += utils.escape(normalized_encoded_request_parameters)
return base_string
def normalize_base_string_uri(uri):
"""**Base String URI**
Per `section 3.4.1.2`_ of the spec.
For example, the HTTP request::
GET /r%20v/X?id=123 HTTP/1.1
Host: EXAMPLE.COM:80
is represented by the base string URI: "http://example.com/r%20v/X".
In another example, the HTTPS request::
GET /?q=1 HTTP/1.1
Host: www.example.net:8080
is represented by the base string URI: "https://www.example.net:8080/".
.. _`section 3.4.1.2`: http://tools.ietf.org/html/rfc5849#section-3.4.1.2
"""
if not isinstance(uri, unicode):
raise ValueError('uri must be a unicode object.')
# FIXME: urlparse does not support unicode
scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri)
# The scheme, authority, and path of the request resource URI `RFC3986`
# are included by constructing an "http" or "https" URI representing
# the request resource (without the query or fragment) as follows:
#
# .. _`RFC2616`: http://tools.ietf.org/html/rfc3986
# 1. The scheme and host MUST be in lowercase.
scheme = scheme.lower()
netloc = netloc.lower()
# 2. The host and port values MUST match the content of the HTTP
# request "Host" header field.
# TODO: enforce this constraint
# 3. The port MUST be included if it is not the default port for the
# scheme, and MUST be excluded if it is the default. Specifically,
# the port MUST be excluded when making an HTTP request `RFC2616`_
# to port 80 or when making an HTTPS request `RFC2818`_ to port 443.
# All other non-default port numbers MUST be included.
#
# .. _`RFC2616`: http://tools.ietf.org/html/rfc2616
# .. _`RFC2818`: http://tools.ietf.org/html/rfc2818
default_ports = (
(u'http', u'80'),
(u'https', u'443'),
)
if u':' in netloc:
host, port = netloc.split(u':', 1)
if (scheme, port) in default_ports:
netloc = host
return urlparse.urlunparse((scheme, netloc, path, u'', u'', u''))
# ** Request Parameters **
#
# Per `section 3.4.1.3`_ of the spec.
#
# In order to guarantee a consistent and reproducible representation of
# the request parameters, the parameters are collected and decoded to
# their original decoded form. They are then sorted and encoded in a
# particular manner that is often different from their original
# encoding scheme, and concatenated into a single string.
#
# .. _`section 3.4.1.3`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3
def collect_parameters(uri_query='', body=[], headers=None,
exclude_oauth_signature=True):
"""**Parameter Sources**
Parameters starting with `oauth_` will be unescaped.
Body parameters must be supplied as a dict, a list of 2-tuples, or a
formencoded query string.
Headers must be supplied as a dict.
Per `section 3.4.1.3.1`_ of the spec.
For example, the HTTP request::
POST /request?b5=%3D%253D&a3=a&c%40=&a2=r%20b HTTP/1.1
Host: example.com
Content-Type: application/x-www-form-urlencoded
Authorization: OAuth realm="Example",
oauth_consumer_key="9djdj82h48djs9d2",
oauth_token="kkk9d7dh3k39sjv7",
oauth_signature_method="HMAC-SHA1",
oauth_timestamp="137131201",
oauth_nonce="7d8f3e4a",
oauth_signature="djosJKDKJSD8743243%2Fjdk33klY%3D"
c2&a3=2+q
contains the following (fully decoded) parameters used in the
signature base sting::
+------------------------+------------------+
| Name | Value |
+------------------------+------------------+
| b5 | =%3D |
| a3 | a |
| c@ | |
| a2 | r b |
| oauth_consumer_key | 9djdj82h48djs9d2 |
| oauth_token | kkk9d7dh3k39sjv7 |
| oauth_signature_method | HMAC-SHA1 |
| oauth_timestamp | 137131201 |
| oauth_nonce | 7d8f3e4a |
| c2 | |
| a3 | 2 q |
+------------------------+------------------+
Note that the value of "b5" is "=%3D" and not "==". Both "c@" and
"c2" have empty values. While the encoding rules specified in this
specification for the purpose of constructing the signature base
string exclude the use of a "+" character (ASCII code 43) to
represent an encoded space character (ASCII code 32), this practice
is widely used in "application/x-www-form-urlencoded" encoded values,
and MUST be properly decoded, as demonstrated by one of the "a3"
parameter instances (the "a3" parameter is used twice in this
request).
.. _`section 3.4.1.3.1`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3.1
"""
headers = headers or {}
params = []
# The parameters from the following sources are collected into a single
# list of name/value pairs:
# * The query component of the HTTP request URI as defined by
# `RFC3986, Section 3.4`_. The query component is parsed into a list
# of name/value pairs by treating it as an
# "application/x-www-form-urlencoded" string, separating the names
# and values and decoding them as defined by
# `W3C.REC-html40-19980424`_, Section 17.13.4.
#
# .. _`RFC3986, Section 3.4`: http://tools.ietf.org/html/rfc3986#section-3.4
# .. _`W3C.REC-html40-19980424`: http://tools.ietf.org/html/rfc5849#ref-W3C.REC-html40-19980424
if uri_query:
params.extend(urlparse.parse_qsl(uri_query, keep_blank_values=True))
# * The OAuth HTTP "Authorization" header field (`Section 3.5.1`_) if
# present. The header's content is parsed into a list of name/value
# pairs excluding the "realm" parameter if present. The parameter
# values are decoded as defined by `Section 3.5.1`_.
#
# .. _`Section 3.5.1`: http://tools.ietf.org/html/rfc5849#section-3.5.1
if headers:
headers_lower = dict((k.lower(), v) for k, v in headers.items())
authorization_header = headers_lower.get(u'authorization')
if authorization_header is not None:
params.extend([i for i in utils.parse_authorization_header(
authorization_header) if i[0] != u'realm'])
# * The HTTP request entity-body, but only if all of the following
# conditions are met:
# * The entity-body is single-part.
#
# * The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# `W3C.REC-html40-19980424`_.
# * The HTTP request entity-header includes the "Content-Type"
# header field set to "application/x-www-form-urlencoded".
#
# .._`W3C.REC-html40-19980424`: http://tools.ietf.org/html/rfc5849#ref-W3C.REC-html40-19980424
# TODO: enforce header param inclusion conditions
bodyparams = extract_params(body) or []
params.extend(bodyparams)
# ensure all oauth params are unescaped
unescaped_params = []
for k, v in params:
if k.startswith(u'oauth_'):
v = utils.unescape(v)
unescaped_params.append((k, v))
# The "oauth_signature" parameter MUST be excluded from the signature
# base string if present.
if exclude_oauth_signature:
unescaped_params = filter(lambda i: i[0] != u'oauth_signature',
unescaped_params)
return unescaped_params
def normalize_parameters(params):
"""**Parameters Normalization**
Per `section 3.4.1.3.2`_ of the spec.
For example, the list of parameters from the previous section would
be normalized as follows:
Encoded::
+------------------------+------------------+
| Name | Value |
+------------------------+------------------+
| b5 | %3D%253D |
| a3 | a |
| c%40 | |
| a2 | r%20b |
| oauth_consumer_key | 9djdj82h48djs9d2 |
| oauth_token | kkk9d7dh3k39sjv7 |
| oauth_signature_method | HMAC-SHA1 |
| oauth_timestamp | 137131201 |
| oauth_nonce | 7d8f3e4a |
| c2 | |
| a3 | 2%20q |
+------------------------+------------------+
Sorted::
+------------------------+------------------+
| Name | Value |
+------------------------+------------------+
| a2 | r%20b |
| a3 | 2%20q |
| a3 | a |
| b5 | %3D%253D |
| c%40 | |
| c2 | |
| oauth_consumer_key | 9djdj82h48djs9d2 |
| oauth_nonce | 7d8f3e4a |
| oauth_signature_method | HMAC-SHA1 |
| oauth_timestamp | 137131201 |
| oauth_token | kkk9d7dh3k39sjv7 |
+------------------------+------------------+
Concatenated Pairs::
+-------------------------------------+
| Name=Value |
+-------------------------------------+
| a2=r%20b |
| a3=2%20q |
| a3=a |
| b5=%3D%253D |
| c%40= |
| c2= |
| oauth_consumer_key=9djdj82h48djs9d2 |
| oauth_nonce=7d8f3e4a |
| oauth_signature_method=HMAC-SHA1 |
| oauth_timestamp=137131201 |
| oauth_token=kkk9d7dh3k39sjv7 |
+-------------------------------------+
and concatenated together into a single string (line breaks are for
display purposes only)::
a2=r%20b&a3=2%20q&a3=a&b5=%3D%253D&c%40=&c2=&oauth_consumer_key=9dj
dj82h48djs9d2&oauth_nonce=7d8f3e4a&oauth_signature_method=HMAC-SHA1
&oauth_timestamp=137131201&oauth_token=kkk9d7dh3k39sjv7
.. _`section 3.4.1.3.2`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3.2
"""
# The parameters collected in `Section 3.4.1.3`_ are normalized into a
# single string as follows:
#
# .. _`Section 3.4.1.3`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3
# 1. First, the name and value of each parameter are encoded
# (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
key_values = [(utils.escape(k), utils.escape(v)) for k, v in params]
# 2. The parameters are sorted by name, using ascending byte value
# ordering. If two or more parameters share the same name, they
# are sorted by their value.
key_values.sort()
# 3. The name of each parameter is concatenated to its corresponding
# value using an "=" character (ASCII code 61) as a separator, even
# if the value is empty.
parameter_parts = [u'{0}={1}'.format(k, v) for k, v in key_values]
# 4. The sorted name/value pairs are concatenated together into a
# single string by using an "&" character (ASCII code 38) as
# separator.
return u'&'.join(parameter_parts)
def sign_hmac_sha1(base_string, client_secret, resource_owner_secret):
"""**HMAC-SHA1**
The "HMAC-SHA1" signature method uses the HMAC-SHA1 signature
algorithm as defined in `RFC2104`_::
digest = HMAC-SHA1 (key, text)
Per `section 3.4.2`_ of the spec.
.. _`RFC2104`: http://tools.ietf.org/html/rfc2104
.. _`section 3.4.2`: http://tools.ietf.org/html/rfc5849#section-3.4.2
"""
# The HMAC-SHA1 function variables are used in following way:
# text is set to the value of the signature base string from
# `Section 3.4.1.1`_.
#
# .. _`Section 3.4.1.1`: http://tools.ietf.org/html/rfc5849#section-3.4.1.1
text = base_string
# key is set to the concatenated values of:
# 1. The client shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
key = utils.escape(client_secret or u'')
# 2. An "&" character (ASCII code 38), which MUST be included
# even when either secret is empty.
key += u'&'
# 3. The token shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
key += utils.escape(resource_owner_secret or u'')
# FIXME: HMAC does not support unicode!
key_utf8 = key.encode('utf-8')
text_utf8 = text.encode('utf-8')
signature = hmac.new(key_utf8, text_utf8, hashlib.sha1)
# digest is used to set the value of the "oauth_signature" protocol
# parameter, after the result octet string is base64-encoded
# per `RFC2045, Section 6.8`.
#
# .. _`RFC2045, Section 6.8`: http://tools.ietf.org/html/rfc2045#section-6.8
return binascii.b2a_base64(signature.digest())[:-1].decode('utf-8')
def sign_rsa_sha1(base_string, rsa_private_key):
"""**RSA-SHA1**
Per `section 3.4.3`_ of the spec.
The "RSA-SHA1" signature method uses the RSASSA-PKCS1-v1_5 signature
algorithm as defined in `RFC3447, Section 8.2`_ (also known as
PKCS#1), using SHA-1 as the hash function for EMSA-PKCS1-v1_5. To
use this method, the client MUST have established client credentials
with the server that included its RSA public key (in a manner that is
beyond the scope of this specification).
NOTE: this method requires the python-rsa library.
.. _`section 3.4.3`: http://tools.ietf.org/html/rfc5849#section-3.4.3
.. _`RFC3447, Section 8.2`: http://tools.ietf.org/html/rfc3447#section-8.2
"""
# TODO: finish RSA documentation
import rsa
key = rsa.PrivateKey.load_pkcs1(rsa_private_key)
sig = rsa.sign(base_string, key, 'SHA-1')
return binascii.b2a_base64(sig)[:-1]
def sign_plaintext(client_secret, resource_owner_secret):
"""Sign a request using plaintext.
Per `section 3.4.4`_ of the spec.
The "PLAINTEXT" method does not employ a signature algorithm. It
MUST be used with a transport-layer mechanism such as TLS or SSL (or
sent over a secure channel with equivalent protections). It does not
utilize the signature base string or the "oauth_timestamp" and
"oauth_nonce" parameters.
.. _`section 3.4.4`: http://tools.ietf.org/html/rfc5849#section-3.4.4
"""
# The "oauth_signature" protocol parameter is set to the concatenated
# value of:
# 1. The client shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
signature = utils.escape(client_secret or u'')
# 2. An "&" character (ASCII code 38), which MUST be included even
# when either secret is empty.
signature += u'&'
# 3. The token shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: http://tools.ietf.org/html/rfc5849#section-3.6
signature += utils.escape(resource_owner_secret or u'')
return signature
|
tuxfux-hlp-notes/python-batches
|
refs/heads/master
|
batch-67/12-modules/myenv/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.py
|
327
|
from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import with_metaclass, viewkeys, PY3
import types
try:
from collections import OrderedDict
except ImportError:
from pip._vendor.ordereddict import OrderedDict
from . import _inputstream
from . import _tokenizer
from . import treebuilders
from .treebuilders.base import Marker
from . import _utils
from .constants import (
spaceCharacters, asciiUpper2Lower,
specialElements, headingElements, cdataElements, rcdataElements,
tokenTypes, tagTokenTypes,
namespaces,
htmlIntegrationPointElements, mathmlTextIntegrationPointElements,
adjustForeignAttributes as adjustForeignAttributesMap,
adjustMathMLAttributes, adjustSVGAttributes,
E,
ReparseException
)
def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs):
"""Parse a string or file-like object into a tree"""
tb = treebuilders.getTreeBuilder(treebuilder)
p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
return p.parse(doc, **kwargs)
def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs):
tb = treebuilders.getTreeBuilder(treebuilder)
p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
return p.parseFragment(doc, container=container, **kwargs)
def method_decorator_metaclass(function):
class Decorated(type):
def __new__(meta, classname, bases, classDict):
for attributeName, attribute in classDict.items():
if isinstance(attribute, types.FunctionType):
attribute = function(attribute)
classDict[attributeName] = attribute
return type.__new__(meta, classname, bases, classDict)
return Decorated
class HTMLParser(object):
"""HTML parser. Generates a tree structure from a stream of (possibly
malformed) HTML"""
def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False):
"""
strict - raise an exception when a parse error is encountered
tree - a treebuilder class controlling the type of tree that will be
returned. Built in treebuilders can be accessed through
html5lib.treebuilders.getTreeBuilder(treeType)
"""
# Raise an exception on the first error encountered
self.strict = strict
if tree is None:
tree = treebuilders.getTreeBuilder("etree")
self.tree = tree(namespaceHTMLElements)
self.errors = []
self.phases = dict([(name, cls(self, self.tree)) for name, cls in
getPhases(debug).items()])
def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs):
self.innerHTMLMode = innerHTML
self.container = container
self.scripting = scripting
self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs)
self.reset()
try:
self.mainLoop()
except ReparseException:
self.reset()
self.mainLoop()
def reset(self):
self.tree.reset()
self.firstStartTag = False
self.errors = []
self.log = [] # only used with debug mode
# "quirks" / "limited quirks" / "no quirks"
self.compatMode = "no quirks"
if self.innerHTMLMode:
self.innerHTML = self.container.lower()
if self.innerHTML in cdataElements:
self.tokenizer.state = self.tokenizer.rcdataState
elif self.innerHTML in rcdataElements:
self.tokenizer.state = self.tokenizer.rawtextState
elif self.innerHTML == 'plaintext':
self.tokenizer.state = self.tokenizer.plaintextState
else:
# state already is data state
# self.tokenizer.state = self.tokenizer.dataState
pass
self.phase = self.phases["beforeHtml"]
self.phase.insertHtmlElement()
self.resetInsertionMode()
else:
self.innerHTML = False # pylint:disable=redefined-variable-type
self.phase = self.phases["initial"]
self.lastPhase = None
self.beforeRCDataPhase = None
self.framesetOK = True
@property
def documentEncoding(self):
"""The name of the character encoding
that was used to decode the input stream,
or :obj:`None` if that is not determined yet.
"""
if not hasattr(self, 'tokenizer'):
return None
return self.tokenizer.stream.charEncoding[0].name
def isHTMLIntegrationPoint(self, element):
if (element.name == "annotation-xml" and
element.namespace == namespaces["mathml"]):
return ("encoding" in element.attributes and
element.attributes["encoding"].translate(
asciiUpper2Lower) in
("text/html", "application/xhtml+xml"))
else:
return (element.namespace, element.name) in htmlIntegrationPointElements
def isMathMLTextIntegrationPoint(self, element):
return (element.namespace, element.name) in mathmlTextIntegrationPointElements
def mainLoop(self):
CharactersToken = tokenTypes["Characters"]
SpaceCharactersToken = tokenTypes["SpaceCharacters"]
StartTagToken = tokenTypes["StartTag"]
EndTagToken = tokenTypes["EndTag"]
CommentToken = tokenTypes["Comment"]
DoctypeToken = tokenTypes["Doctype"]
ParseErrorToken = tokenTypes["ParseError"]
for token in self.normalizedTokens():
prev_token = None
new_token = token
while new_token is not None:
prev_token = new_token
currentNode = self.tree.openElements[-1] if self.tree.openElements else None
currentNodeNamespace = currentNode.namespace if currentNode else None
currentNodeName = currentNode.name if currentNode else None
type = new_token["type"]
if type == ParseErrorToken:
self.parseError(new_token["data"], new_token.get("datavars", {}))
new_token = None
else:
if (len(self.tree.openElements) == 0 or
currentNodeNamespace == self.tree.defaultNamespace or
(self.isMathMLTextIntegrationPoint(currentNode) and
((type == StartTagToken and
token["name"] not in frozenset(["mglyph", "malignmark"])) or
type in (CharactersToken, SpaceCharactersToken))) or
(currentNodeNamespace == namespaces["mathml"] and
currentNodeName == "annotation-xml" and
type == StartTagToken and
token["name"] == "svg") or
(self.isHTMLIntegrationPoint(currentNode) and
type in (StartTagToken, CharactersToken, SpaceCharactersToken))):
phase = self.phase
else:
phase = self.phases["inForeignContent"]
if type == CharactersToken:
new_token = phase.processCharacters(new_token)
elif type == SpaceCharactersToken:
new_token = phase.processSpaceCharacters(new_token)
elif type == StartTagToken:
new_token = phase.processStartTag(new_token)
elif type == EndTagToken:
new_token = phase.processEndTag(new_token)
elif type == CommentToken:
new_token = phase.processComment(new_token)
elif type == DoctypeToken:
new_token = phase.processDoctype(new_token)
if (type == StartTagToken and prev_token["selfClosing"] and
not prev_token["selfClosingAcknowledged"]):
self.parseError("non-void-element-with-trailing-solidus",
{"name": prev_token["name"]})
# When the loop finishes it's EOF
reprocess = True
phases = []
while reprocess:
phases.append(self.phase)
reprocess = self.phase.processEOF()
if reprocess:
assert self.phase not in phases
def normalizedTokens(self):
for token in self.tokenizer:
yield self.normalizeToken(token)
def parse(self, stream, *args, **kwargs):
"""Parse a HTML document into a well-formed tree
stream - a filelike object or string containing the HTML to be parsed
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
scripting - treat noscript elements as if javascript was turned on
"""
self._parse(stream, False, None, *args, **kwargs)
return self.tree.getDocument()
def parseFragment(self, stream, *args, **kwargs):
"""Parse a HTML fragment into a well-formed tree fragment
container - name of the element we're setting the innerHTML property
if set to None, default to 'div'
stream - a filelike object or string containing the HTML to be parsed
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
scripting - treat noscript elements as if javascript was turned on
"""
self._parse(stream, True, *args, **kwargs)
return self.tree.getFragment()
def parseError(self, errorcode="XXX-undefined-error", datavars=None):
# XXX The idea is to make errorcode mandatory.
if datavars is None:
datavars = {}
self.errors.append((self.tokenizer.stream.position(), errorcode, datavars))
if self.strict:
raise ParseError(E[errorcode] % datavars)
def normalizeToken(self, token):
""" HTML5 specific normalizations to the token stream """
if token["type"] == tokenTypes["StartTag"]:
raw = token["data"]
token["data"] = OrderedDict(raw)
if len(raw) > len(token["data"]):
# we had some duplicated attribute, fix so first wins
token["data"].update(raw[::-1])
return token
def adjustMathMLAttributes(self, token):
adjust_attributes(token, adjustMathMLAttributes)
def adjustSVGAttributes(self, token):
adjust_attributes(token, adjustSVGAttributes)
def adjustForeignAttributes(self, token):
adjust_attributes(token, adjustForeignAttributesMap)
def reparseTokenNormal(self, token):
# pylint:disable=unused-argument
self.parser.phase()
def resetInsertionMode(self):
# The name of this method is mostly historical. (It's also used in the
# specification.)
last = False
newModes = {
"select": "inSelect",
"td": "inCell",
"th": "inCell",
"tr": "inRow",
"tbody": "inTableBody",
"thead": "inTableBody",
"tfoot": "inTableBody",
"caption": "inCaption",
"colgroup": "inColumnGroup",
"table": "inTable",
"head": "inBody",
"body": "inBody",
"frameset": "inFrameset",
"html": "beforeHead"
}
for node in self.tree.openElements[::-1]:
nodeName = node.name
new_phase = None
if node == self.tree.openElements[0]:
assert self.innerHTML
last = True
nodeName = self.innerHTML
# Check for conditions that should only happen in the innerHTML
# case
if nodeName in ("select", "colgroup", "head", "html"):
assert self.innerHTML
if not last and node.namespace != self.tree.defaultNamespace:
continue
if nodeName in newModes:
new_phase = self.phases[newModes[nodeName]]
break
elif last:
new_phase = self.phases["inBody"]
break
self.phase = new_phase
def parseRCDataRawtext(self, token, contentType):
"""Generic RCDATA/RAWTEXT Parsing algorithm
contentType - RCDATA or RAWTEXT
"""
assert contentType in ("RAWTEXT", "RCDATA")
self.tree.insertElement(token)
if contentType == "RAWTEXT":
self.tokenizer.state = self.tokenizer.rawtextState
else:
self.tokenizer.state = self.tokenizer.rcdataState
self.originalPhase = self.phase
self.phase = self.phases["text"]
@_utils.memoize
def getPhases(debug):
def log(function):
"""Logger that records which phase processes each token"""
type_names = dict((value, key) for key, value in
tokenTypes.items())
def wrapped(self, *args, **kwargs):
if function.__name__.startswith("process") and len(args) > 0:
token = args[0]
try:
info = {"type": type_names[token['type']]}
except:
raise
if token['type'] in tagTokenTypes:
info["name"] = token['name']
self.parser.log.append((self.parser.tokenizer.state.__name__,
self.parser.phase.__class__.__name__,
self.__class__.__name__,
function.__name__,
info))
return function(self, *args, **kwargs)
else:
return function(self, *args, **kwargs)
return wrapped
def getMetaclass(use_metaclass, metaclass_func):
if use_metaclass:
return method_decorator_metaclass(metaclass_func)
else:
return type
# pylint:disable=unused-argument
class Phase(with_metaclass(getMetaclass(debug, log))):
"""Base class for helper object that implements each phase of processing
"""
def __init__(self, parser, tree):
self.parser = parser
self.tree = tree
def processEOF(self):
raise NotImplementedError
def processComment(self, token):
# For most phases the following is correct. Where it's not it will be
# overridden.
self.tree.insertComment(token, self.tree.openElements[-1])
def processDoctype(self, token):
self.parser.parseError("unexpected-doctype")
def processCharacters(self, token):
self.tree.insertText(token["data"])
def processSpaceCharacters(self, token):
self.tree.insertText(token["data"])
def processStartTag(self, token):
return self.startTagHandler[token["name"]](token)
def startTagHtml(self, token):
if not self.parser.firstStartTag and token["name"] == "html":
self.parser.parseError("non-html-root")
# XXX Need a check here to see if the first start tag token emitted is
# this token... If it's not, invoke self.parser.parseError().
for attr, value in token["data"].items():
if attr not in self.tree.openElements[0].attributes:
self.tree.openElements[0].attributes[attr] = value
self.parser.firstStartTag = False
def processEndTag(self, token):
return self.endTagHandler[token["name"]](token)
class InitialPhase(Phase):
def processSpaceCharacters(self, token):
pass
def processComment(self, token):
self.tree.insertComment(token, self.tree.document)
def processDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
correct = token["correct"]
if (name != "html" or publicId is not None or
systemId is not None and systemId != "about:legacy-compat"):
self.parser.parseError("unknown-doctype")
if publicId is None:
publicId = ""
self.tree.insertDoctype(token)
if publicId != "":
publicId = publicId.translate(asciiUpper2Lower)
if (not correct or token["name"] != "html" or
publicId.startswith(
("+//silmaril//dtd html pro v0r11 19970101//",
"-//advasoft ltd//dtd html 3.0 aswedit + extensions//",
"-//as//dtd html 3.0 aswedit + extensions//",
"-//ietf//dtd html 2.0 level 1//",
"-//ietf//dtd html 2.0 level 2//",
"-//ietf//dtd html 2.0 strict level 1//",
"-//ietf//dtd html 2.0 strict level 2//",
"-//ietf//dtd html 2.0 strict//",
"-//ietf//dtd html 2.0//",
"-//ietf//dtd html 2.1e//",
"-//ietf//dtd html 3.0//",
"-//ietf//dtd html 3.2 final//",
"-//ietf//dtd html 3.2//",
"-//ietf//dtd html 3//",
"-//ietf//dtd html level 0//",
"-//ietf//dtd html level 1//",
"-//ietf//dtd html level 2//",
"-//ietf//dtd html level 3//",
"-//ietf//dtd html strict level 0//",
"-//ietf//dtd html strict level 1//",
"-//ietf//dtd html strict level 2//",
"-//ietf//dtd html strict level 3//",
"-//ietf//dtd html strict//",
"-//ietf//dtd html//",
"-//metrius//dtd metrius presentational//",
"-//microsoft//dtd internet explorer 2.0 html strict//",
"-//microsoft//dtd internet explorer 2.0 html//",
"-//microsoft//dtd internet explorer 2.0 tables//",
"-//microsoft//dtd internet explorer 3.0 html strict//",
"-//microsoft//dtd internet explorer 3.0 html//",
"-//microsoft//dtd internet explorer 3.0 tables//",
"-//netscape comm. corp.//dtd html//",
"-//netscape comm. corp.//dtd strict html//",
"-//o'reilly and associates//dtd html 2.0//",
"-//o'reilly and associates//dtd html extended 1.0//",
"-//o'reilly and associates//dtd html extended relaxed 1.0//",
"-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//",
"-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//",
"-//spyglass//dtd html 2.0 extended//",
"-//sq//dtd html 2.0 hotmetal + extensions//",
"-//sun microsystems corp.//dtd hotjava html//",
"-//sun microsystems corp.//dtd hotjava strict html//",
"-//w3c//dtd html 3 1995-03-24//",
"-//w3c//dtd html 3.2 draft//",
"-//w3c//dtd html 3.2 final//",
"-//w3c//dtd html 3.2//",
"-//w3c//dtd html 3.2s draft//",
"-//w3c//dtd html 4.0 frameset//",
"-//w3c//dtd html 4.0 transitional//",
"-//w3c//dtd html experimental 19960712//",
"-//w3c//dtd html experimental 970421//",
"-//w3c//dtd w3 html//",
"-//w3o//dtd w3 html 3.0//",
"-//webtechs//dtd mozilla html 2.0//",
"-//webtechs//dtd mozilla html//")) or
publicId in ("-//w3o//dtd w3 html strict 3.0//en//",
"-/w3c/dtd html 4.0 transitional/en",
"html") or
publicId.startswith(
("-//w3c//dtd html 4.01 frameset//",
"-//w3c//dtd html 4.01 transitional//")) and
systemId is None or
systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"):
self.parser.compatMode = "quirks"
elif (publicId.startswith(
("-//w3c//dtd xhtml 1.0 frameset//",
"-//w3c//dtd xhtml 1.0 transitional//")) or
publicId.startswith(
("-//w3c//dtd html 4.01 frameset//",
"-//w3c//dtd html 4.01 transitional//")) and
systemId is not None):
self.parser.compatMode = "limited quirks"
self.parser.phase = self.parser.phases["beforeHtml"]
def anythingElse(self):
self.parser.compatMode = "quirks"
self.parser.phase = self.parser.phases["beforeHtml"]
def processCharacters(self, token):
self.parser.parseError("expected-doctype-but-got-chars")
self.anythingElse()
return token
def processStartTag(self, token):
self.parser.parseError("expected-doctype-but-got-start-tag",
{"name": token["name"]})
self.anythingElse()
return token
def processEndTag(self, token):
self.parser.parseError("expected-doctype-but-got-end-tag",
{"name": token["name"]})
self.anythingElse()
return token
def processEOF(self):
self.parser.parseError("expected-doctype-but-got-eof")
self.anythingElse()
return True
class BeforeHtmlPhase(Phase):
# helper methods
def insertHtmlElement(self):
self.tree.insertRoot(impliedTagToken("html", "StartTag"))
self.parser.phase = self.parser.phases["beforeHead"]
# other
def processEOF(self):
self.insertHtmlElement()
return True
def processComment(self, token):
self.tree.insertComment(token, self.tree.document)
def processSpaceCharacters(self, token):
pass
def processCharacters(self, token):
self.insertHtmlElement()
return token
def processStartTag(self, token):
if token["name"] == "html":
self.parser.firstStartTag = True
self.insertHtmlElement()
return token
def processEndTag(self, token):
if token["name"] not in ("head", "body", "html", "br"):
self.parser.parseError("unexpected-end-tag-before-html",
{"name": token["name"]})
else:
self.insertHtmlElement()
return token
class BeforeHeadPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("head", self.startTagHead)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
(("head", "body", "html", "br"), self.endTagImplyHead)
])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
self.startTagHead(impliedTagToken("head", "StartTag"))
return True
def processSpaceCharacters(self, token):
pass
def processCharacters(self, token):
self.startTagHead(impliedTagToken("head", "StartTag"))
return token
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagHead(self, token):
self.tree.insertElement(token)
self.tree.headPointer = self.tree.openElements[-1]
self.parser.phase = self.parser.phases["inHead"]
def startTagOther(self, token):
self.startTagHead(impliedTagToken("head", "StartTag"))
return token
def endTagImplyHead(self, token):
self.startTagHead(impliedTagToken("head", "StartTag"))
return token
def endTagOther(self, token):
self.parser.parseError("end-tag-after-implied-root",
{"name": token["name"]})
class InHeadPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("title", self.startTagTitle),
(("noframes", "style"), self.startTagNoFramesStyle),
("noscript", self.startTagNoscript),
("script", self.startTagScript),
(("base", "basefont", "bgsound", "command", "link"),
self.startTagBaseLinkCommand),
("meta", self.startTagMeta),
("head", self.startTagHead)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("head", self.endTagHead),
(("br", "html", "body"), self.endTagHtmlBodyBr)
])
self.endTagHandler.default = self.endTagOther
# the real thing
def processEOF(self):
self.anythingElse()
return True
def processCharacters(self, token):
self.anythingElse()
return token
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagHead(self, token):
self.parser.parseError("two-heads-are-not-better-than-one")
def startTagBaseLinkCommand(self, token):
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def startTagMeta(self, token):
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
attributes = token["data"]
if self.parser.tokenizer.stream.charEncoding[1] == "tentative":
if "charset" in attributes:
self.parser.tokenizer.stream.changeEncoding(attributes["charset"])
elif ("content" in attributes and
"http-equiv" in attributes and
attributes["http-equiv"].lower() == "content-type"):
# Encoding it as UTF-8 here is a hack, as really we should pass
# the abstract Unicode string, and just use the
# ContentAttrParser on that, but using UTF-8 allows all chars
# to be encoded and as a ASCII-superset works.
data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8"))
parser = _inputstream.ContentAttrParser(data)
codec = parser.parse()
self.parser.tokenizer.stream.changeEncoding(codec)
def startTagTitle(self, token):
self.parser.parseRCDataRawtext(token, "RCDATA")
def startTagNoFramesStyle(self, token):
# Need to decide whether to implement the scripting-disabled case
self.parser.parseRCDataRawtext(token, "RAWTEXT")
def startTagNoscript(self, token):
if self.parser.scripting:
self.parser.parseRCDataRawtext(token, "RAWTEXT")
else:
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inHeadNoscript"]
def startTagScript(self, token):
self.tree.insertElement(token)
self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState
self.parser.originalPhase = self.parser.phase
self.parser.phase = self.parser.phases["text"]
def startTagOther(self, token):
self.anythingElse()
return token
def endTagHead(self, token):
node = self.parser.tree.openElements.pop()
assert node.name == "head", "Expected head got %s" % node.name
self.parser.phase = self.parser.phases["afterHead"]
def endTagHtmlBodyBr(self, token):
self.anythingElse()
return token
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def anythingElse(self):
self.endTagHead(impliedTagToken("head"))
class InHeadNoscriptPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
(("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand),
(("head", "noscript"), self.startTagHeadNoscript),
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("noscript", self.endTagNoscript),
("br", self.endTagBr),
])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
self.parser.parseError("eof-in-head-noscript")
self.anythingElse()
return True
def processComment(self, token):
return self.parser.phases["inHead"].processComment(token)
def processCharacters(self, token):
self.parser.parseError("char-in-head-noscript")
self.anythingElse()
return token
def processSpaceCharacters(self, token):
return self.parser.phases["inHead"].processSpaceCharacters(token)
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagBaseLinkCommand(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagHeadNoscript(self, token):
self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
def startTagOther(self, token):
self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})
self.anythingElse()
return token
def endTagNoscript(self, token):
node = self.parser.tree.openElements.pop()
assert node.name == "noscript", "Expected noscript got %s" % node.name
self.parser.phase = self.parser.phases["inHead"]
def endTagBr(self, token):
self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})
self.anythingElse()
return token
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def anythingElse(self):
# Caller must raise parse error first!
self.endTagNoscript(impliedTagToken("noscript"))
class AfterHeadPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("body", self.startTagBody),
("frameset", self.startTagFrameset),
(("base", "basefont", "bgsound", "link", "meta", "noframes", "script",
"style", "title"),
self.startTagFromHead),
("head", self.startTagHead)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"),
self.endTagHtmlBodyBr)])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
self.anythingElse()
return True
def processCharacters(self, token):
self.anythingElse()
return token
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagBody(self, token):
self.parser.framesetOK = False
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inBody"]
def startTagFrameset(self, token):
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inFrameset"]
def startTagFromHead(self, token):
self.parser.parseError("unexpected-start-tag-out-of-my-head",
{"name": token["name"]})
self.tree.openElements.append(self.tree.headPointer)
self.parser.phases["inHead"].processStartTag(token)
for node in self.tree.openElements[::-1]:
if node.name == "head":
self.tree.openElements.remove(node)
break
def startTagHead(self, token):
self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
def startTagOther(self, token):
self.anythingElse()
return token
def endTagHtmlBodyBr(self, token):
self.anythingElse()
return token
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def anythingElse(self):
self.tree.insertElement(impliedTagToken("body", "StartTag"))
self.parser.phase = self.parser.phases["inBody"]
self.parser.framesetOK = True
class InBodyPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody
# the really-really-really-very crazy mode
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
# Set this to the default handler
self.processSpaceCharacters = self.processSpaceCharactersNonPre
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
(("base", "basefont", "bgsound", "command", "link", "meta",
"script", "style", "title"),
self.startTagProcessInHead),
("body", self.startTagBody),
("frameset", self.startTagFrameset),
(("address", "article", "aside", "blockquote", "center", "details",
"dir", "div", "dl", "fieldset", "figcaption", "figure",
"footer", "header", "hgroup", "main", "menu", "nav", "ol", "p",
"section", "summary", "ul"),
self.startTagCloseP),
(headingElements, self.startTagHeading),
(("pre", "listing"), self.startTagPreListing),
("form", self.startTagForm),
(("li", "dd", "dt"), self.startTagListItem),
("plaintext", self.startTagPlaintext),
("a", self.startTagA),
(("b", "big", "code", "em", "font", "i", "s", "small", "strike",
"strong", "tt", "u"), self.startTagFormatting),
("nobr", self.startTagNobr),
("button", self.startTagButton),
(("applet", "marquee", "object"), self.startTagAppletMarqueeObject),
("xmp", self.startTagXmp),
("table", self.startTagTable),
(("area", "br", "embed", "img", "keygen", "wbr"),
self.startTagVoidFormatting),
(("param", "source", "track"), self.startTagParamSource),
("input", self.startTagInput),
("hr", self.startTagHr),
("image", self.startTagImage),
("isindex", self.startTagIsIndex),
("textarea", self.startTagTextarea),
("iframe", self.startTagIFrame),
("noscript", self.startTagNoscript),
(("noembed", "noframes"), self.startTagRawtext),
("select", self.startTagSelect),
(("rp", "rt"), self.startTagRpRt),
(("option", "optgroup"), self.startTagOpt),
(("math"), self.startTagMath),
(("svg"), self.startTagSvg),
(("caption", "col", "colgroup", "frame", "head",
"tbody", "td", "tfoot", "th", "thead",
"tr"), self.startTagMisplaced)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("body", self.endTagBody),
("html", self.endTagHtml),
(("address", "article", "aside", "blockquote", "button", "center",
"details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure",
"footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre",
"section", "summary", "ul"), self.endTagBlock),
("form", self.endTagForm),
("p", self.endTagP),
(("dd", "dt", "li"), self.endTagListItem),
(headingElements, self.endTagHeading),
(("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small",
"strike", "strong", "tt", "u"), self.endTagFormatting),
(("applet", "marquee", "object"), self.endTagAppletMarqueeObject),
("br", self.endTagBr),
])
self.endTagHandler.default = self.endTagOther
def isMatchingFormattingElement(self, node1, node2):
return (node1.name == node2.name and
node1.namespace == node2.namespace and
node1.attributes == node2.attributes)
# helper
def addFormattingElement(self, token):
self.tree.insertElement(token)
element = self.tree.openElements[-1]
matchingElements = []
for node in self.tree.activeFormattingElements[::-1]:
if node is Marker:
break
elif self.isMatchingFormattingElement(node, element):
matchingElements.append(node)
assert len(matchingElements) <= 3
if len(matchingElements) == 3:
self.tree.activeFormattingElements.remove(matchingElements[-1])
self.tree.activeFormattingElements.append(element)
# the real deal
def processEOF(self):
allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td",
"tfoot", "th", "thead", "tr", "body",
"html"))
for node in self.tree.openElements[::-1]:
if node.name not in allowed_elements:
self.parser.parseError("expected-closing-tag-but-got-eof")
break
# Stop parsing
def processSpaceCharactersDropNewline(self, token):
# Sometimes (start of <pre>, <listing>, and <textarea> blocks) we
# want to drop leading newlines
data = token["data"]
self.processSpaceCharacters = self.processSpaceCharactersNonPre
if (data.startswith("\n") and
self.tree.openElements[-1].name in ("pre", "listing", "textarea") and
not self.tree.openElements[-1].hasContent()):
data = data[1:]
if data:
self.tree.reconstructActiveFormattingElements()
self.tree.insertText(data)
def processCharacters(self, token):
if token["data"] == "\u0000":
# The tokenizer should always emit null on its own
return
self.tree.reconstructActiveFormattingElements()
self.tree.insertText(token["data"])
# This must be bad for performance
if (self.parser.framesetOK and
any([char not in spaceCharacters
for char in token["data"]])):
self.parser.framesetOK = False
def processSpaceCharactersNonPre(self, token):
self.tree.reconstructActiveFormattingElements()
self.tree.insertText(token["data"])
def startTagProcessInHead(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagBody(self, token):
self.parser.parseError("unexpected-start-tag", {"name": "body"})
if (len(self.tree.openElements) == 1 or
self.tree.openElements[1].name != "body"):
assert self.parser.innerHTML
else:
self.parser.framesetOK = False
for attr, value in token["data"].items():
if attr not in self.tree.openElements[1].attributes:
self.tree.openElements[1].attributes[attr] = value
def startTagFrameset(self, token):
self.parser.parseError("unexpected-start-tag", {"name": "frameset"})
if (len(self.tree.openElements) == 1 or self.tree.openElements[1].name != "body"):
assert self.parser.innerHTML
elif not self.parser.framesetOK:
pass
else:
if self.tree.openElements[1].parent:
self.tree.openElements[1].parent.removeChild(self.tree.openElements[1])
while self.tree.openElements[-1].name != "html":
self.tree.openElements.pop()
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inFrameset"]
def startTagCloseP(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.insertElement(token)
def startTagPreListing(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.insertElement(token)
self.parser.framesetOK = False
self.processSpaceCharacters = self.processSpaceCharactersDropNewline
def startTagForm(self, token):
if self.tree.formPointer:
self.parser.parseError("unexpected-start-tag", {"name": "form"})
else:
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.insertElement(token)
self.tree.formPointer = self.tree.openElements[-1]
def startTagListItem(self, token):
self.parser.framesetOK = False
stopNamesMap = {"li": ["li"],
"dt": ["dt", "dd"],
"dd": ["dt", "dd"]}
stopNames = stopNamesMap[token["name"]]
for node in reversed(self.tree.openElements):
if node.name in stopNames:
self.parser.phase.processEndTag(
impliedTagToken(node.name, "EndTag"))
break
if (node.nameTuple in specialElements and
node.name not in ("address", "div", "p")):
break
if self.tree.elementInScope("p", variant="button"):
self.parser.phase.processEndTag(
impliedTagToken("p", "EndTag"))
self.tree.insertElement(token)
def startTagPlaintext(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.insertElement(token)
self.parser.tokenizer.state = self.parser.tokenizer.plaintextState
def startTagHeading(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
if self.tree.openElements[-1].name in headingElements:
self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
self.tree.openElements.pop()
self.tree.insertElement(token)
def startTagA(self, token):
afeAElement = self.tree.elementInActiveFormattingElements("a")
if afeAElement:
self.parser.parseError("unexpected-start-tag-implies-end-tag",
{"startName": "a", "endName": "a"})
self.endTagFormatting(impliedTagToken("a"))
if afeAElement in self.tree.openElements:
self.tree.openElements.remove(afeAElement)
if afeAElement in self.tree.activeFormattingElements:
self.tree.activeFormattingElements.remove(afeAElement)
self.tree.reconstructActiveFormattingElements()
self.addFormattingElement(token)
def startTagFormatting(self, token):
self.tree.reconstructActiveFormattingElements()
self.addFormattingElement(token)
def startTagNobr(self, token):
self.tree.reconstructActiveFormattingElements()
if self.tree.elementInScope("nobr"):
self.parser.parseError("unexpected-start-tag-implies-end-tag",
{"startName": "nobr", "endName": "nobr"})
self.processEndTag(impliedTagToken("nobr"))
# XXX Need tests that trigger the following
self.tree.reconstructActiveFormattingElements()
self.addFormattingElement(token)
def startTagButton(self, token):
if self.tree.elementInScope("button"):
self.parser.parseError("unexpected-start-tag-implies-end-tag",
{"startName": "button", "endName": "button"})
self.processEndTag(impliedTagToken("button"))
return token
else:
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(token)
self.parser.framesetOK = False
def startTagAppletMarqueeObject(self, token):
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(token)
self.tree.activeFormattingElements.append(Marker)
self.parser.framesetOK = False
def startTagXmp(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.reconstructActiveFormattingElements()
self.parser.framesetOK = False
self.parser.parseRCDataRawtext(token, "RAWTEXT")
def startTagTable(self, token):
if self.parser.compatMode != "quirks":
if self.tree.elementInScope("p", variant="button"):
self.processEndTag(impliedTagToken("p"))
self.tree.insertElement(token)
self.parser.framesetOK = False
self.parser.phase = self.parser.phases["inTable"]
def startTagVoidFormatting(self, token):
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
self.parser.framesetOK = False
def startTagInput(self, token):
framesetOK = self.parser.framesetOK
self.startTagVoidFormatting(token)
if ("type" in token["data"] and
token["data"]["type"].translate(asciiUpper2Lower) == "hidden"):
# input type=hidden doesn't change framesetOK
self.parser.framesetOK = framesetOK
def startTagParamSource(self, token):
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def startTagHr(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
self.parser.framesetOK = False
def startTagImage(self, token):
# No really...
self.parser.parseError("unexpected-start-tag-treated-as",
{"originalName": "image", "newName": "img"})
self.processStartTag(impliedTagToken("img", "StartTag",
attributes=token["data"],
selfClosing=token["selfClosing"]))
def startTagIsIndex(self, token):
self.parser.parseError("deprecated-tag", {"name": "isindex"})
if self.tree.formPointer:
return
form_attrs = {}
if "action" in token["data"]:
form_attrs["action"] = token["data"]["action"]
self.processStartTag(impliedTagToken("form", "StartTag",
attributes=form_attrs))
self.processStartTag(impliedTagToken("hr", "StartTag"))
self.processStartTag(impliedTagToken("label", "StartTag"))
# XXX Localization ...
if "prompt" in token["data"]:
prompt = token["data"]["prompt"]
else:
prompt = "This is a searchable index. Enter search keywords: "
self.processCharacters(
{"type": tokenTypes["Characters"], "data": prompt})
attributes = token["data"].copy()
if "action" in attributes:
del attributes["action"]
if "prompt" in attributes:
del attributes["prompt"]
attributes["name"] = "isindex"
self.processStartTag(impliedTagToken("input", "StartTag",
attributes=attributes,
selfClosing=token["selfClosing"]))
self.processEndTag(impliedTagToken("label"))
self.processStartTag(impliedTagToken("hr", "StartTag"))
self.processEndTag(impliedTagToken("form"))
def startTagTextarea(self, token):
self.tree.insertElement(token)
self.parser.tokenizer.state = self.parser.tokenizer.rcdataState
self.processSpaceCharacters = self.processSpaceCharactersDropNewline
self.parser.framesetOK = False
def startTagIFrame(self, token):
self.parser.framesetOK = False
self.startTagRawtext(token)
def startTagNoscript(self, token):
if self.parser.scripting:
self.startTagRawtext(token)
else:
self.startTagOther(token)
def startTagRawtext(self, token):
"""iframe, noembed noframes, noscript(if scripting enabled)"""
self.parser.parseRCDataRawtext(token, "RAWTEXT")
def startTagOpt(self, token):
if self.tree.openElements[-1].name == "option":
self.parser.phase.processEndTag(impliedTagToken("option"))
self.tree.reconstructActiveFormattingElements()
self.parser.tree.insertElement(token)
def startTagSelect(self, token):
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(token)
self.parser.framesetOK = False
if self.parser.phase in (self.parser.phases["inTable"],
self.parser.phases["inCaption"],
self.parser.phases["inColumnGroup"],
self.parser.phases["inTableBody"],
self.parser.phases["inRow"],
self.parser.phases["inCell"]):
self.parser.phase = self.parser.phases["inSelectInTable"]
else:
self.parser.phase = self.parser.phases["inSelect"]
def startTagRpRt(self, token):
if self.tree.elementInScope("ruby"):
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1].name != "ruby":
self.parser.parseError()
self.tree.insertElement(token)
def startTagMath(self, token):
self.tree.reconstructActiveFormattingElements()
self.parser.adjustMathMLAttributes(token)
self.parser.adjustForeignAttributes(token)
token["namespace"] = namespaces["mathml"]
self.tree.insertElement(token)
# Need to get the parse error right for the case where the token
# has a namespace not equal to the xmlns attribute
if token["selfClosing"]:
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def startTagSvg(self, token):
self.tree.reconstructActiveFormattingElements()
self.parser.adjustSVGAttributes(token)
self.parser.adjustForeignAttributes(token)
token["namespace"] = namespaces["svg"]
self.tree.insertElement(token)
# Need to get the parse error right for the case where the token
# has a namespace not equal to the xmlns attribute
if token["selfClosing"]:
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def startTagMisplaced(self, token):
""" Elements that should be children of other elements that have a
different insertion mode; here they are ignored
"caption", "col", "colgroup", "frame", "frameset", "head",
"option", "optgroup", "tbody", "td", "tfoot", "th", "thead",
"tr", "noscript"
"""
self.parser.parseError("unexpected-start-tag-ignored", {"name": token["name"]})
def startTagOther(self, token):
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(token)
def endTagP(self, token):
if not self.tree.elementInScope("p", variant="button"):
self.startTagCloseP(impliedTagToken("p", "StartTag"))
self.parser.parseError("unexpected-end-tag", {"name": "p"})
self.endTagP(impliedTagToken("p", "EndTag"))
else:
self.tree.generateImpliedEndTags("p")
if self.tree.openElements[-1].name != "p":
self.parser.parseError("unexpected-end-tag", {"name": "p"})
node = self.tree.openElements.pop()
while node.name != "p":
node = self.tree.openElements.pop()
def endTagBody(self, token):
if not self.tree.elementInScope("body"):
self.parser.parseError()
return
elif self.tree.openElements[-1].name != "body":
for node in self.tree.openElements[2:]:
if node.name not in frozenset(("dd", "dt", "li", "optgroup",
"option", "p", "rp", "rt",
"tbody", "td", "tfoot",
"th", "thead", "tr", "body",
"html")):
# Not sure this is the correct name for the parse error
self.parser.parseError(
"expected-one-end-tag-but-got-another",
{"gotName": "body", "expectedName": node.name})
break
self.parser.phase = self.parser.phases["afterBody"]
def endTagHtml(self, token):
# We repeat the test for the body end tag token being ignored here
if self.tree.elementInScope("body"):
self.endTagBody(impliedTagToken("body"))
return token
def endTagBlock(self, token):
# Put us back in the right whitespace handling mode
if token["name"] == "pre":
self.processSpaceCharacters = self.processSpaceCharactersNonPre
inScope = self.tree.elementInScope(token["name"])
if inScope:
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("end-tag-too-early", {"name": token["name"]})
if inScope:
node = self.tree.openElements.pop()
while node.name != token["name"]:
node = self.tree.openElements.pop()
def endTagForm(self, token):
node = self.tree.formPointer
self.tree.formPointer = None
if node is None or not self.tree.elementInScope(node):
self.parser.parseError("unexpected-end-tag",
{"name": "form"})
else:
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1] != node:
self.parser.parseError("end-tag-too-early-ignored",
{"name": "form"})
self.tree.openElements.remove(node)
def endTagListItem(self, token):
if token["name"] == "li":
variant = "list"
else:
variant = None
if not self.tree.elementInScope(token["name"], variant=variant):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
else:
self.tree.generateImpliedEndTags(exclude=token["name"])
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError(
"end-tag-too-early",
{"name": token["name"]})
node = self.tree.openElements.pop()
while node.name != token["name"]:
node = self.tree.openElements.pop()
def endTagHeading(self, token):
for item in headingElements:
if self.tree.elementInScope(item):
self.tree.generateImpliedEndTags()
break
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("end-tag-too-early", {"name": token["name"]})
for item in headingElements:
if self.tree.elementInScope(item):
item = self.tree.openElements.pop()
while item.name not in headingElements:
item = self.tree.openElements.pop()
break
def endTagFormatting(self, token):
"""The much-feared adoption agency algorithm"""
# http://svn.whatwg.org/webapps/complete.html#adoptionAgency revision 7867
# XXX Better parseError messages appreciated.
# Step 1
outerLoopCounter = 0
# Step 2
while outerLoopCounter < 8:
# Step 3
outerLoopCounter += 1
# Step 4:
# Let the formatting element be the last element in
# the list of active formatting elements that:
# - is between the end of the list and the last scope
# marker in the list, if any, or the start of the list
# otherwise, and
# - has the same tag name as the token.
formattingElement = self.tree.elementInActiveFormattingElements(
token["name"])
if (not formattingElement or
(formattingElement in self.tree.openElements and
not self.tree.elementInScope(formattingElement.name))):
# If there is no such node, then abort these steps
# and instead act as described in the "any other
# end tag" entry below.
self.endTagOther(token)
return
# Otherwise, if there is such a node, but that node is
# not in the stack of open elements, then this is a
# parse error; remove the element from the list, and
# abort these steps.
elif formattingElement not in self.tree.openElements:
self.parser.parseError("adoption-agency-1.2", {"name": token["name"]})
self.tree.activeFormattingElements.remove(formattingElement)
return
# Otherwise, if there is such a node, and that node is
# also in the stack of open elements, but the element
# is not in scope, then this is a parse error; ignore
# the token, and abort these steps.
elif not self.tree.elementInScope(formattingElement.name):
self.parser.parseError("adoption-agency-4.4", {"name": token["name"]})
return
# Otherwise, there is a formatting element and that
# element is in the stack and is in scope. If the
# element is not the current node, this is a parse
# error. In any case, proceed with the algorithm as
# written in the following steps.
else:
if formattingElement != self.tree.openElements[-1]:
self.parser.parseError("adoption-agency-1.3", {"name": token["name"]})
# Step 5:
# Let the furthest block be the topmost node in the
# stack of open elements that is lower in the stack
# than the formatting element, and is an element in
# the special category. There might not be one.
afeIndex = self.tree.openElements.index(formattingElement)
furthestBlock = None
for element in self.tree.openElements[afeIndex:]:
if element.nameTuple in specialElements:
furthestBlock = element
break
# Step 6:
# If there is no furthest block, then the UA must
# first pop all the nodes from the bottom of the stack
# of open elements, from the current node up to and
# including the formatting element, then remove the
# formatting element from the list of active
# formatting elements, and finally abort these steps.
if furthestBlock is None:
element = self.tree.openElements.pop()
while element != formattingElement:
element = self.tree.openElements.pop()
self.tree.activeFormattingElements.remove(element)
return
# Step 7
commonAncestor = self.tree.openElements[afeIndex - 1]
# Step 8:
# The bookmark is supposed to help us identify where to reinsert
# nodes in step 15. We have to ensure that we reinsert nodes after
# the node before the active formatting element. Note the bookmark
# can move in step 9.7
bookmark = self.tree.activeFormattingElements.index(formattingElement)
# Step 9
lastNode = node = furthestBlock
innerLoopCounter = 0
index = self.tree.openElements.index(node)
while innerLoopCounter < 3:
innerLoopCounter += 1
# Node is element before node in open elements
index -= 1
node = self.tree.openElements[index]
if node not in self.tree.activeFormattingElements:
self.tree.openElements.remove(node)
continue
# Step 9.6
if node == formattingElement:
break
# Step 9.7
if lastNode == furthestBlock:
bookmark = self.tree.activeFormattingElements.index(node) + 1
# Step 9.8
clone = node.cloneNode()
# Replace node with clone
self.tree.activeFormattingElements[
self.tree.activeFormattingElements.index(node)] = clone
self.tree.openElements[
self.tree.openElements.index(node)] = clone
node = clone
# Step 9.9
# Remove lastNode from its parents, if any
if lastNode.parent:
lastNode.parent.removeChild(lastNode)
node.appendChild(lastNode)
# Step 9.10
lastNode = node
# Step 10
# Foster parent lastNode if commonAncestor is a
# table, tbody, tfoot, thead, or tr we need to foster
# parent the lastNode
if lastNode.parent:
lastNode.parent.removeChild(lastNode)
if commonAncestor.name in frozenset(("table", "tbody", "tfoot", "thead", "tr")):
parent, insertBefore = self.tree.getTableMisnestedNodePosition()
parent.insertBefore(lastNode, insertBefore)
else:
commonAncestor.appendChild(lastNode)
# Step 11
clone = formattingElement.cloneNode()
# Step 12
furthestBlock.reparentChildren(clone)
# Step 13
furthestBlock.appendChild(clone)
# Step 14
self.tree.activeFormattingElements.remove(formattingElement)
self.tree.activeFormattingElements.insert(bookmark, clone)
# Step 15
self.tree.openElements.remove(formattingElement)
self.tree.openElements.insert(
self.tree.openElements.index(furthestBlock) + 1, clone)
def endTagAppletMarqueeObject(self, token):
if self.tree.elementInScope(token["name"]):
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("end-tag-too-early", {"name": token["name"]})
if self.tree.elementInScope(token["name"]):
element = self.tree.openElements.pop()
while element.name != token["name"]:
element = self.tree.openElements.pop()
self.tree.clearActiveFormattingElements()
def endTagBr(self, token):
self.parser.parseError("unexpected-end-tag-treated-as",
{"originalName": "br", "newName": "br element"})
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(impliedTagToken("br", "StartTag"))
self.tree.openElements.pop()
def endTagOther(self, token):
for node in self.tree.openElements[::-1]:
if node.name == token["name"]:
self.tree.generateImpliedEndTags(exclude=token["name"])
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
while self.tree.openElements.pop() != node:
pass
break
else:
if node.nameTuple in specialElements:
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
break
class TextPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("script", self.endTagScript)])
self.endTagHandler.default = self.endTagOther
def processCharacters(self, token):
self.tree.insertText(token["data"])
def processEOF(self):
self.parser.parseError("expected-named-closing-tag-but-got-eof",
{"name": self.tree.openElements[-1].name})
self.tree.openElements.pop()
self.parser.phase = self.parser.originalPhase
return True
def startTagOther(self, token):
assert False, "Tried to process start tag %s in RCDATA/RAWTEXT mode" % token['name']
def endTagScript(self, token):
node = self.tree.openElements.pop()
assert node.name == "script"
self.parser.phase = self.parser.originalPhase
# The rest of this method is all stuff that only happens if
# document.write works
def endTagOther(self, token):
self.tree.openElements.pop()
self.parser.phase = self.parser.originalPhase
class InTablePhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-table
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("caption", self.startTagCaption),
("colgroup", self.startTagColgroup),
("col", self.startTagCol),
(("tbody", "tfoot", "thead"), self.startTagRowGroup),
(("td", "th", "tr"), self.startTagImplyTbody),
("table", self.startTagTable),
(("style", "script"), self.startTagStyleScript),
("input", self.startTagInput),
("form", self.startTagForm)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("table", self.endTagTable),
(("body", "caption", "col", "colgroup", "html", "tbody", "td",
"tfoot", "th", "thead", "tr"), self.endTagIgnore)
])
self.endTagHandler.default = self.endTagOther
# helper methods
def clearStackToTableContext(self):
# "clear the stack back to a table context"
while self.tree.openElements[-1].name not in ("table", "html"):
# self.parser.parseError("unexpected-implied-end-tag-in-table",
# {"name": self.tree.openElements[-1].name})
self.tree.openElements.pop()
# When the current node is <html> it's an innerHTML case
# processing methods
def processEOF(self):
if self.tree.openElements[-1].name != "html":
self.parser.parseError("eof-in-table")
else:
assert self.parser.innerHTML
# Stop parsing
def processSpaceCharacters(self, token):
originalPhase = self.parser.phase
self.parser.phase = self.parser.phases["inTableText"]
self.parser.phase.originalPhase = originalPhase
self.parser.phase.processSpaceCharacters(token)
def processCharacters(self, token):
originalPhase = self.parser.phase
self.parser.phase = self.parser.phases["inTableText"]
self.parser.phase.originalPhase = originalPhase
self.parser.phase.processCharacters(token)
def insertText(self, token):
# If we get here there must be at least one non-whitespace character
# Do the table magic!
self.tree.insertFromTable = True
self.parser.phases["inBody"].processCharacters(token)
self.tree.insertFromTable = False
def startTagCaption(self, token):
self.clearStackToTableContext()
self.tree.activeFormattingElements.append(Marker)
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inCaption"]
def startTagColgroup(self, token):
self.clearStackToTableContext()
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inColumnGroup"]
def startTagCol(self, token):
self.startTagColgroup(impliedTagToken("colgroup", "StartTag"))
return token
def startTagRowGroup(self, token):
self.clearStackToTableContext()
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inTableBody"]
def startTagImplyTbody(self, token):
self.startTagRowGroup(impliedTagToken("tbody", "StartTag"))
return token
def startTagTable(self, token):
self.parser.parseError("unexpected-start-tag-implies-end-tag",
{"startName": "table", "endName": "table"})
self.parser.phase.processEndTag(impliedTagToken("table"))
if not self.parser.innerHTML:
return token
def startTagStyleScript(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagInput(self, token):
if ("type" in token["data"] and
token["data"]["type"].translate(asciiUpper2Lower) == "hidden"):
self.parser.parseError("unexpected-hidden-input-in-table")
self.tree.insertElement(token)
# XXX associate with form
self.tree.openElements.pop()
else:
self.startTagOther(token)
def startTagForm(self, token):
self.parser.parseError("unexpected-form-in-table")
if self.tree.formPointer is None:
self.tree.insertElement(token)
self.tree.formPointer = self.tree.openElements[-1]
self.tree.openElements.pop()
def startTagOther(self, token):
self.parser.parseError("unexpected-start-tag-implies-table-voodoo", {"name": token["name"]})
# Do the table magic!
self.tree.insertFromTable = True
self.parser.phases["inBody"].processStartTag(token)
self.tree.insertFromTable = False
def endTagTable(self, token):
if self.tree.elementInScope("table", variant="table"):
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1].name != "table":
self.parser.parseError("end-tag-too-early-named",
{"gotName": "table",
"expectedName": self.tree.openElements[-1].name})
while self.tree.openElements[-1].name != "table":
self.tree.openElements.pop()
self.tree.openElements.pop()
self.parser.resetInsertionMode()
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag-implies-table-voodoo", {"name": token["name"]})
# Do the table magic!
self.tree.insertFromTable = True
self.parser.phases["inBody"].processEndTag(token)
self.tree.insertFromTable = False
class InTableTextPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.originalPhase = None
self.characterTokens = []
def flushCharacters(self):
data = "".join([item["data"] for item in self.characterTokens])
if any([item not in spaceCharacters for item in data]):
token = {"type": tokenTypes["Characters"], "data": data}
self.parser.phases["inTable"].insertText(token)
elif data:
self.tree.insertText(data)
self.characterTokens = []
def processComment(self, token):
self.flushCharacters()
self.parser.phase = self.originalPhase
return token
def processEOF(self):
self.flushCharacters()
self.parser.phase = self.originalPhase
return True
def processCharacters(self, token):
if token["data"] == "\u0000":
return
self.characterTokens.append(token)
def processSpaceCharacters(self, token):
# pretty sure we should never reach here
self.characterTokens.append(token)
# assert False
def processStartTag(self, token):
self.flushCharacters()
self.parser.phase = self.originalPhase
return token
def processEndTag(self, token):
self.flushCharacters()
self.parser.phase = self.originalPhase
return token
class InCaptionPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-caption
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
(("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
"thead", "tr"), self.startTagTableElement)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("caption", self.endTagCaption),
("table", self.endTagTable),
(("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th",
"thead", "tr"), self.endTagIgnore)
])
self.endTagHandler.default = self.endTagOther
def ignoreEndTagCaption(self):
return not self.tree.elementInScope("caption", variant="table")
def processEOF(self):
self.parser.phases["inBody"].processEOF()
def processCharacters(self, token):
return self.parser.phases["inBody"].processCharacters(token)
def startTagTableElement(self, token):
self.parser.parseError()
# XXX Have to duplicate logic here to find out if the tag is ignored
ignoreEndTag = self.ignoreEndTagCaption()
self.parser.phase.processEndTag(impliedTagToken("caption"))
if not ignoreEndTag:
return token
def startTagOther(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def endTagCaption(self, token):
if not self.ignoreEndTagCaption():
# AT this code is quite similar to endTagTable in "InTable"
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1].name != "caption":
self.parser.parseError("expected-one-end-tag-but-got-another",
{"gotName": "caption",
"expectedName": self.tree.openElements[-1].name})
while self.tree.openElements[-1].name != "caption":
self.tree.openElements.pop()
self.tree.openElements.pop()
self.tree.clearActiveFormattingElements()
self.parser.phase = self.parser.phases["inTable"]
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def endTagTable(self, token):
self.parser.parseError()
ignoreEndTag = self.ignoreEndTagCaption()
self.parser.phase.processEndTag(impliedTagToken("caption"))
if not ignoreEndTag:
return token
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def endTagOther(self, token):
return self.parser.phases["inBody"].processEndTag(token)
class InColumnGroupPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-column
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("col", self.startTagCol)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("colgroup", self.endTagColgroup),
("col", self.endTagCol)
])
self.endTagHandler.default = self.endTagOther
def ignoreEndTagColgroup(self):
return self.tree.openElements[-1].name == "html"
def processEOF(self):
if self.tree.openElements[-1].name == "html":
assert self.parser.innerHTML
return
else:
ignoreEndTag = self.ignoreEndTagColgroup()
self.endTagColgroup(impliedTagToken("colgroup"))
if not ignoreEndTag:
return True
def processCharacters(self, token):
ignoreEndTag = self.ignoreEndTagColgroup()
self.endTagColgroup(impliedTagToken("colgroup"))
if not ignoreEndTag:
return token
def startTagCol(self, token):
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def startTagOther(self, token):
ignoreEndTag = self.ignoreEndTagColgroup()
self.endTagColgroup(impliedTagToken("colgroup"))
if not ignoreEndTag:
return token
def endTagColgroup(self, token):
if self.ignoreEndTagColgroup():
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
else:
self.tree.openElements.pop()
self.parser.phase = self.parser.phases["inTable"]
def endTagCol(self, token):
self.parser.parseError("no-end-tag", {"name": "col"})
def endTagOther(self, token):
ignoreEndTag = self.ignoreEndTagColgroup()
self.endTagColgroup(impliedTagToken("colgroup"))
if not ignoreEndTag:
return token
class InTableBodyPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-table0
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("tr", self.startTagTr),
(("td", "th"), self.startTagTableCell),
(("caption", "col", "colgroup", "tbody", "tfoot", "thead"),
self.startTagTableOther)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
(("tbody", "tfoot", "thead"), self.endTagTableRowGroup),
("table", self.endTagTable),
(("body", "caption", "col", "colgroup", "html", "td", "th",
"tr"), self.endTagIgnore)
])
self.endTagHandler.default = self.endTagOther
# helper methods
def clearStackToTableBodyContext(self):
while self.tree.openElements[-1].name not in ("tbody", "tfoot",
"thead", "html"):
# self.parser.parseError("unexpected-implied-end-tag-in-table",
# {"name": self.tree.openElements[-1].name})
self.tree.openElements.pop()
if self.tree.openElements[-1].name == "html":
assert self.parser.innerHTML
# the rest
def processEOF(self):
self.parser.phases["inTable"].processEOF()
def processSpaceCharacters(self, token):
return self.parser.phases["inTable"].processSpaceCharacters(token)
def processCharacters(self, token):
return self.parser.phases["inTable"].processCharacters(token)
def startTagTr(self, token):
self.clearStackToTableBodyContext()
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inRow"]
def startTagTableCell(self, token):
self.parser.parseError("unexpected-cell-in-table-body",
{"name": token["name"]})
self.startTagTr(impliedTagToken("tr", "StartTag"))
return token
def startTagTableOther(self, token):
# XXX AT Any ideas on how to share this with endTagTable?
if (self.tree.elementInScope("tbody", variant="table") or
self.tree.elementInScope("thead", variant="table") or
self.tree.elementInScope("tfoot", variant="table")):
self.clearStackToTableBodyContext()
self.endTagTableRowGroup(
impliedTagToken(self.tree.openElements[-1].name))
return token
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def startTagOther(self, token):
return self.parser.phases["inTable"].processStartTag(token)
def endTagTableRowGroup(self, token):
if self.tree.elementInScope(token["name"], variant="table"):
self.clearStackToTableBodyContext()
self.tree.openElements.pop()
self.parser.phase = self.parser.phases["inTable"]
else:
self.parser.parseError("unexpected-end-tag-in-table-body",
{"name": token["name"]})
def endTagTable(self, token):
if (self.tree.elementInScope("tbody", variant="table") or
self.tree.elementInScope("thead", variant="table") or
self.tree.elementInScope("tfoot", variant="table")):
self.clearStackToTableBodyContext()
self.endTagTableRowGroup(
impliedTagToken(self.tree.openElements[-1].name))
return token
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag-in-table-body",
{"name": token["name"]})
def endTagOther(self, token):
return self.parser.phases["inTable"].processEndTag(token)
class InRowPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-row
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
(("td", "th"), self.startTagTableCell),
(("caption", "col", "colgroup", "tbody", "tfoot", "thead",
"tr"), self.startTagTableOther)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("tr", self.endTagTr),
("table", self.endTagTable),
(("tbody", "tfoot", "thead"), self.endTagTableRowGroup),
(("body", "caption", "col", "colgroup", "html", "td", "th"),
self.endTagIgnore)
])
self.endTagHandler.default = self.endTagOther
# helper methods (XXX unify this with other table helper methods)
def clearStackToTableRowContext(self):
while self.tree.openElements[-1].name not in ("tr", "html"):
self.parser.parseError("unexpected-implied-end-tag-in-table-row",
{"name": self.tree.openElements[-1].name})
self.tree.openElements.pop()
def ignoreEndTagTr(self):
return not self.tree.elementInScope("tr", variant="table")
# the rest
def processEOF(self):
self.parser.phases["inTable"].processEOF()
def processSpaceCharacters(self, token):
return self.parser.phases["inTable"].processSpaceCharacters(token)
def processCharacters(self, token):
return self.parser.phases["inTable"].processCharacters(token)
def startTagTableCell(self, token):
self.clearStackToTableRowContext()
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inCell"]
self.tree.activeFormattingElements.append(Marker)
def startTagTableOther(self, token):
ignoreEndTag = self.ignoreEndTagTr()
self.endTagTr(impliedTagToken("tr"))
# XXX how are we sure it's always ignored in the innerHTML case?
if not ignoreEndTag:
return token
def startTagOther(self, token):
return self.parser.phases["inTable"].processStartTag(token)
def endTagTr(self, token):
if not self.ignoreEndTagTr():
self.clearStackToTableRowContext()
self.tree.openElements.pop()
self.parser.phase = self.parser.phases["inTableBody"]
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def endTagTable(self, token):
ignoreEndTag = self.ignoreEndTagTr()
self.endTagTr(impliedTagToken("tr"))
# Reprocess the current tag if the tr end tag was not ignored
# XXX how are we sure it's always ignored in the innerHTML case?
if not ignoreEndTag:
return token
def endTagTableRowGroup(self, token):
if self.tree.elementInScope(token["name"], variant="table"):
self.endTagTr(impliedTagToken("tr"))
return token
else:
self.parser.parseError()
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag-in-table-row",
{"name": token["name"]})
def endTagOther(self, token):
return self.parser.phases["inTable"].processEndTag(token)
class InCellPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-cell
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
(("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
"thead", "tr"), self.startTagTableOther)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
(("td", "th"), self.endTagTableCell),
(("body", "caption", "col", "colgroup", "html"), self.endTagIgnore),
(("table", "tbody", "tfoot", "thead", "tr"), self.endTagImply)
])
self.endTagHandler.default = self.endTagOther
# helper
def closeCell(self):
if self.tree.elementInScope("td", variant="table"):
self.endTagTableCell(impliedTagToken("td"))
elif self.tree.elementInScope("th", variant="table"):
self.endTagTableCell(impliedTagToken("th"))
# the rest
def processEOF(self):
self.parser.phases["inBody"].processEOF()
def processCharacters(self, token):
return self.parser.phases["inBody"].processCharacters(token)
def startTagTableOther(self, token):
if (self.tree.elementInScope("td", variant="table") or
self.tree.elementInScope("th", variant="table")):
self.closeCell()
return token
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def startTagOther(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def endTagTableCell(self, token):
if self.tree.elementInScope(token["name"], variant="table"):
self.tree.generateImpliedEndTags(token["name"])
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("unexpected-cell-end-tag",
{"name": token["name"]})
while True:
node = self.tree.openElements.pop()
if node.name == token["name"]:
break
else:
self.tree.openElements.pop()
self.tree.clearActiveFormattingElements()
self.parser.phase = self.parser.phases["inRow"]
else:
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def endTagImply(self, token):
if self.tree.elementInScope(token["name"], variant="table"):
self.closeCell()
return token
else:
# sometimes innerHTML case
self.parser.parseError()
def endTagOther(self, token):
return self.parser.phases["inBody"].processEndTag(token)
class InSelectPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("option", self.startTagOption),
("optgroup", self.startTagOptgroup),
("select", self.startTagSelect),
(("input", "keygen", "textarea"), self.startTagInput),
("script", self.startTagScript)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("option", self.endTagOption),
("optgroup", self.endTagOptgroup),
("select", self.endTagSelect)
])
self.endTagHandler.default = self.endTagOther
# http://www.whatwg.org/specs/web-apps/current-work/#in-select
def processEOF(self):
if self.tree.openElements[-1].name != "html":
self.parser.parseError("eof-in-select")
else:
assert self.parser.innerHTML
def processCharacters(self, token):
if token["data"] == "\u0000":
return
self.tree.insertText(token["data"])
def startTagOption(self, token):
# We need to imply </option> if <option> is the current node.
if self.tree.openElements[-1].name == "option":
self.tree.openElements.pop()
self.tree.insertElement(token)
def startTagOptgroup(self, token):
if self.tree.openElements[-1].name == "option":
self.tree.openElements.pop()
if self.tree.openElements[-1].name == "optgroup":
self.tree.openElements.pop()
self.tree.insertElement(token)
def startTagSelect(self, token):
self.parser.parseError("unexpected-select-in-select")
self.endTagSelect(impliedTagToken("select"))
def startTagInput(self, token):
self.parser.parseError("unexpected-input-in-select")
if self.tree.elementInScope("select", variant="select"):
self.endTagSelect(impliedTagToken("select"))
return token
else:
assert self.parser.innerHTML
def startTagScript(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("unexpected-start-tag-in-select",
{"name": token["name"]})
def endTagOption(self, token):
if self.tree.openElements[-1].name == "option":
self.tree.openElements.pop()
else:
self.parser.parseError("unexpected-end-tag-in-select",
{"name": "option"})
def endTagOptgroup(self, token):
# </optgroup> implicitly closes <option>
if (self.tree.openElements[-1].name == "option" and
self.tree.openElements[-2].name == "optgroup"):
self.tree.openElements.pop()
# It also closes </optgroup>
if self.tree.openElements[-1].name == "optgroup":
self.tree.openElements.pop()
# But nothing else
else:
self.parser.parseError("unexpected-end-tag-in-select",
{"name": "optgroup"})
def endTagSelect(self, token):
if self.tree.elementInScope("select", variant="select"):
node = self.tree.openElements.pop()
while node.name != "select":
node = self.tree.openElements.pop()
self.parser.resetInsertionMode()
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag-in-select",
{"name": token["name"]})
class InSelectInTablePhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
(("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),
self.startTagTable)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
(("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),
self.endTagTable)
])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
self.parser.phases["inSelect"].processEOF()
def processCharacters(self, token):
return self.parser.phases["inSelect"].processCharacters(token)
def startTagTable(self, token):
self.parser.parseError("unexpected-table-element-start-tag-in-select-in-table", {"name": token["name"]})
self.endTagOther(impliedTagToken("select"))
return token
def startTagOther(self, token):
return self.parser.phases["inSelect"].processStartTag(token)
def endTagTable(self, token):
self.parser.parseError("unexpected-table-element-end-tag-in-select-in-table", {"name": token["name"]})
if self.tree.elementInScope(token["name"], variant="table"):
self.endTagOther(impliedTagToken("select"))
return token
def endTagOther(self, token):
return self.parser.phases["inSelect"].processEndTag(token)
class InForeignContentPhase(Phase):
breakoutElements = frozenset(["b", "big", "blockquote", "body", "br",
"center", "code", "dd", "div", "dl", "dt",
"em", "embed", "h1", "h2", "h3",
"h4", "h5", "h6", "head", "hr", "i", "img",
"li", "listing", "menu", "meta", "nobr",
"ol", "p", "pre", "ruby", "s", "small",
"span", "strong", "strike", "sub", "sup",
"table", "tt", "u", "ul", "var"])
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
def adjustSVGTagNames(self, token):
replacements = {"altglyph": "altGlyph",
"altglyphdef": "altGlyphDef",
"altglyphitem": "altGlyphItem",
"animatecolor": "animateColor",
"animatemotion": "animateMotion",
"animatetransform": "animateTransform",
"clippath": "clipPath",
"feblend": "feBlend",
"fecolormatrix": "feColorMatrix",
"fecomponenttransfer": "feComponentTransfer",
"fecomposite": "feComposite",
"feconvolvematrix": "feConvolveMatrix",
"fediffuselighting": "feDiffuseLighting",
"fedisplacementmap": "feDisplacementMap",
"fedistantlight": "feDistantLight",
"feflood": "feFlood",
"fefunca": "feFuncA",
"fefuncb": "feFuncB",
"fefuncg": "feFuncG",
"fefuncr": "feFuncR",
"fegaussianblur": "feGaussianBlur",
"feimage": "feImage",
"femerge": "feMerge",
"femergenode": "feMergeNode",
"femorphology": "feMorphology",
"feoffset": "feOffset",
"fepointlight": "fePointLight",
"fespecularlighting": "feSpecularLighting",
"fespotlight": "feSpotLight",
"fetile": "feTile",
"feturbulence": "feTurbulence",
"foreignobject": "foreignObject",
"glyphref": "glyphRef",
"lineargradient": "linearGradient",
"radialgradient": "radialGradient",
"textpath": "textPath"}
if token["name"] in replacements:
token["name"] = replacements[token["name"]]
def processCharacters(self, token):
if token["data"] == "\u0000":
token["data"] = "\uFFFD"
elif (self.parser.framesetOK and
any(char not in spaceCharacters for char in token["data"])):
self.parser.framesetOK = False
Phase.processCharacters(self, token)
def processStartTag(self, token):
currentNode = self.tree.openElements[-1]
if (token["name"] in self.breakoutElements or
(token["name"] == "font" and
set(token["data"].keys()) & set(["color", "face", "size"]))):
self.parser.parseError("unexpected-html-element-in-foreign-content",
{"name": token["name"]})
while (self.tree.openElements[-1].namespace !=
self.tree.defaultNamespace and
not self.parser.isHTMLIntegrationPoint(self.tree.openElements[-1]) and
not self.parser.isMathMLTextIntegrationPoint(self.tree.openElements[-1])):
self.tree.openElements.pop()
return token
else:
if currentNode.namespace == namespaces["mathml"]:
self.parser.adjustMathMLAttributes(token)
elif currentNode.namespace == namespaces["svg"]:
self.adjustSVGTagNames(token)
self.parser.adjustSVGAttributes(token)
self.parser.adjustForeignAttributes(token)
token["namespace"] = currentNode.namespace
self.tree.insertElement(token)
if token["selfClosing"]:
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def processEndTag(self, token):
nodeIndex = len(self.tree.openElements) - 1
node = self.tree.openElements[-1]
if node.name.translate(asciiUpper2Lower) != token["name"]:
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
while True:
if node.name.translate(asciiUpper2Lower) == token["name"]:
# XXX this isn't in the spec but it seems necessary
if self.parser.phase == self.parser.phases["inTableText"]:
self.parser.phase.flushCharacters()
self.parser.phase = self.parser.phase.originalPhase
while self.tree.openElements.pop() != node:
assert self.tree.openElements
new_token = None
break
nodeIndex -= 1
node = self.tree.openElements[nodeIndex]
if node.namespace != self.tree.defaultNamespace:
continue
else:
new_token = self.parser.phase.processEndTag(token)
break
return new_token
class AfterBodyPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
# Stop parsing
pass
def processComment(self, token):
# This is needed because data is to be appended to the <html> element
# here and not to whatever is currently open.
self.tree.insertComment(token, self.tree.openElements[0])
def processCharacters(self, token):
self.parser.parseError("unexpected-char-after-body")
self.parser.phase = self.parser.phases["inBody"]
return token
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("unexpected-start-tag-after-body",
{"name": token["name"]})
self.parser.phase = self.parser.phases["inBody"]
return token
def endTagHtml(self, name):
if self.parser.innerHTML:
self.parser.parseError("unexpected-end-tag-after-body-innerhtml")
else:
self.parser.phase = self.parser.phases["afterAfterBody"]
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag-after-body",
{"name": token["name"]})
self.parser.phase = self.parser.phases["inBody"]
return token
class InFramesetPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-frameset
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("frameset", self.startTagFrameset),
("frame", self.startTagFrame),
("noframes", self.startTagNoframes)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("frameset", self.endTagFrameset)
])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
if self.tree.openElements[-1].name != "html":
self.parser.parseError("eof-in-frameset")
else:
assert self.parser.innerHTML
def processCharacters(self, token):
self.parser.parseError("unexpected-char-in-frameset")
def startTagFrameset(self, token):
self.tree.insertElement(token)
def startTagFrame(self, token):
self.tree.insertElement(token)
self.tree.openElements.pop()
def startTagNoframes(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("unexpected-start-tag-in-frameset",
{"name": token["name"]})
def endTagFrameset(self, token):
if self.tree.openElements[-1].name == "html":
# innerHTML case
self.parser.parseError("unexpected-frameset-in-frameset-innerhtml")
else:
self.tree.openElements.pop()
if (not self.parser.innerHTML and
self.tree.openElements[-1].name != "frameset"):
# If we're not in innerHTML mode and the current node is not a
# "frameset" element (anymore) then switch.
self.parser.phase = self.parser.phases["afterFrameset"]
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag-in-frameset",
{"name": token["name"]})
class AfterFramesetPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#after3
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("noframes", self.startTagNoframes)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("html", self.endTagHtml)
])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
# Stop parsing
pass
def processCharacters(self, token):
self.parser.parseError("unexpected-char-after-frameset")
def startTagNoframes(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("unexpected-start-tag-after-frameset",
{"name": token["name"]})
def endTagHtml(self, token):
self.parser.phase = self.parser.phases["afterAfterFrameset"]
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag-after-frameset",
{"name": token["name"]})
class AfterAfterBodyPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml)
])
self.startTagHandler.default = self.startTagOther
def processEOF(self):
pass
def processComment(self, token):
self.tree.insertComment(token, self.tree.document)
def processSpaceCharacters(self, token):
return self.parser.phases["inBody"].processSpaceCharacters(token)
def processCharacters(self, token):
self.parser.parseError("expected-eof-but-got-char")
self.parser.phase = self.parser.phases["inBody"]
return token
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("expected-eof-but-got-start-tag",
{"name": token["name"]})
self.parser.phase = self.parser.phases["inBody"]
return token
def processEndTag(self, token):
self.parser.parseError("expected-eof-but-got-end-tag",
{"name": token["name"]})
self.parser.phase = self.parser.phases["inBody"]
return token
class AfterAfterFramesetPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("noframes", self.startTagNoFrames)
])
self.startTagHandler.default = self.startTagOther
def processEOF(self):
pass
def processComment(self, token):
self.tree.insertComment(token, self.tree.document)
def processSpaceCharacters(self, token):
return self.parser.phases["inBody"].processSpaceCharacters(token)
def processCharacters(self, token):
self.parser.parseError("expected-eof-but-got-char")
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagNoFrames(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("expected-eof-but-got-start-tag",
{"name": token["name"]})
def processEndTag(self, token):
self.parser.parseError("expected-eof-but-got-end-tag",
{"name": token["name"]})
# pylint:enable=unused-argument
return {
"initial": InitialPhase,
"beforeHtml": BeforeHtmlPhase,
"beforeHead": BeforeHeadPhase,
"inHead": InHeadPhase,
"inHeadNoscript": InHeadNoscriptPhase,
"afterHead": AfterHeadPhase,
"inBody": InBodyPhase,
"text": TextPhase,
"inTable": InTablePhase,
"inTableText": InTableTextPhase,
"inCaption": InCaptionPhase,
"inColumnGroup": InColumnGroupPhase,
"inTableBody": InTableBodyPhase,
"inRow": InRowPhase,
"inCell": InCellPhase,
"inSelect": InSelectPhase,
"inSelectInTable": InSelectInTablePhase,
"inForeignContent": InForeignContentPhase,
"afterBody": AfterBodyPhase,
"inFrameset": InFramesetPhase,
"afterFrameset": AfterFramesetPhase,
"afterAfterBody": AfterAfterBodyPhase,
"afterAfterFrameset": AfterAfterFramesetPhase,
# XXX after after frameset
}
def adjust_attributes(token, replacements):
if PY3 or _utils.PY27:
needs_adjustment = viewkeys(token['data']) & viewkeys(replacements)
else:
needs_adjustment = frozenset(token['data']) & frozenset(replacements)
if needs_adjustment:
token['data'] = OrderedDict((replacements.get(k, k), v)
for k, v in token['data'].items())
def impliedTagToken(name, type="EndTag", attributes=None,
selfClosing=False):
if attributes is None:
attributes = {}
return {"type": tokenTypes[type], "name": name, "data": attributes,
"selfClosing": selfClosing}
class ParseError(Exception):
"""Error in parsed document"""
pass
|
BeATz-UnKNoWN/python-for-android
|
refs/heads/master
|
python-build/python-libs/gdata/tests/all_tests_coverage.py
|
87
|
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import coverage
import all_tests
import atom.core
import atom.http_core
import atom.mock_http_core
import atom.auth
import atom.client
import gdata.gauth
import gdata.client
import gdata.data
import gdata.blogger.data
import gdata.blogger.client
from gdata.test_config import settings
# Ensure that coverage tests execute the live requests to the servers, but
# allow use of cached server responses to speed up repeated runs.
settings.RUN_LIVE_TESTS = True
settings.CLEAR_CACHE = False
def suite():
return unittest.TestSuite((atom_tests.core_test.suite(),))
if __name__ == '__main__':
coverage.erase()
coverage.start()
unittest.TextTestRunner().run(all_tests.suite())
coverage.stop()
coverage.report([atom.core, atom.http_core, atom.auth, atom.data,
atom.mock_http_core, atom.client, gdata.gauth, gdata.client,
gdata.data, gdata.blogger.data, gdata.blogger.client])
|
bluevoda/BloggyBlog
|
refs/heads/master
|
lib/python3.4/site-packages/django/utils/ipv6.py
|
71
|
# This code was mostly based on ipaddr-py
# Copyright 2007 Google Inc. https://github.com/google/ipaddr-py
# Licensed under the Apache License, Version 2.0 (the "License").
import re
from django.core.exceptions import ValidationError
from django.utils.six.moves import range
from django.utils.translation import ugettext_lazy as _
def clean_ipv6_address(ip_str, unpack_ipv4=False,
error_message=_("This is not a valid IPv6 address.")):
"""
Cleans an IPv6 address string.
Validity is checked by calling is_valid_ipv6_address() - if an
invalid address is passed, ValidationError is raised.
Replaces the longest continuous zero-sequence with "::" and
removes leading zeroes and makes sure all hextets are lowercase.
Args:
ip_str: A valid IPv6 address.
unpack_ipv4: if an IPv4-mapped address is found,
return the plain IPv4 address (default=False).
error_message: An error message used in the ValidationError.
Returns:
A compressed IPv6 address, or the same value
"""
best_doublecolon_start = -1
best_doublecolon_len = 0
doublecolon_start = -1
doublecolon_len = 0
if not is_valid_ipv6_address(ip_str):
raise ValidationError(error_message, code='invalid')
# This algorithm can only handle fully exploded
# IP strings
ip_str = _explode_shorthand_ip_string(ip_str)
ip_str = _sanitize_ipv4_mapping(ip_str)
# If needed, unpack the IPv4 and return straight away
# - no need in running the rest of the algorithm
if unpack_ipv4:
ipv4_unpacked = _unpack_ipv4(ip_str)
if ipv4_unpacked:
return ipv4_unpacked
hextets = ip_str.split(":")
for index in range(len(hextets)):
# Remove leading zeroes
if '.' not in hextets[index]:
hextets[index] = hextets[index].lstrip('0')
if not hextets[index]:
hextets[index] = '0'
# Determine best hextet to compress
if hextets[index] == '0':
doublecolon_len += 1
if doublecolon_start == -1:
# Start of a sequence of zeros.
doublecolon_start = index
if doublecolon_len > best_doublecolon_len:
# This is the longest sequence of zeros so far.
best_doublecolon_len = doublecolon_len
best_doublecolon_start = doublecolon_start
else:
doublecolon_len = 0
doublecolon_start = -1
# Compress the most suitable hextet
if best_doublecolon_len > 1:
best_doublecolon_end = (best_doublecolon_start +
best_doublecolon_len)
# For zeros at the end of the address.
if best_doublecolon_end == len(hextets):
hextets += ['']
hextets[best_doublecolon_start:best_doublecolon_end] = ['']
# For zeros at the beginning of the address.
if best_doublecolon_start == 0:
hextets = [''] + hextets
result = ":".join(hextets)
return result.lower()
def _sanitize_ipv4_mapping(ip_str):
"""
Sanitize IPv4 mapping in an expanded IPv6 address.
This converts ::ffff:0a0a:0a0a to ::ffff:10.10.10.10.
If there is nothing to sanitize, returns an unchanged
string.
Args:
ip_str: A string, the expanded IPv6 address.
Returns:
The sanitized output string, if applicable.
"""
if not ip_str.lower().startswith('0000:0000:0000:0000:0000:ffff:'):
# not an ipv4 mapping
return ip_str
hextets = ip_str.split(':')
if '.' in hextets[-1]:
# already sanitized
return ip_str
ipv4_address = "%d.%d.%d.%d" % (
int(hextets[6][0:2], 16),
int(hextets[6][2:4], 16),
int(hextets[7][0:2], 16),
int(hextets[7][2:4], 16),
)
result = ':'.join(hextets[0:6])
result += ':' + ipv4_address
return result
def _unpack_ipv4(ip_str):
"""
Unpack an IPv4 address that was mapped in a compressed IPv6 address.
This converts 0000:0000:0000:0000:0000:ffff:10.10.10.10 to 10.10.10.10.
If there is nothing to sanitize, returns None.
Args:
ip_str: A string, the expanded IPv6 address.
Returns:
The unpacked IPv4 address, or None if there was nothing to unpack.
"""
if not ip_str.lower().startswith('0000:0000:0000:0000:0000:ffff:'):
return None
return ip_str.rsplit(':', 1)[1]
def is_valid_ipv6_address(ip_str):
"""
Ensure we have a valid IPv6 address.
Args:
ip_str: A string, the IPv6 address.
Returns:
A boolean, True if this is a valid IPv6 address.
"""
from django.core.validators import validate_ipv4_address
symbols_re = re.compile(r'^[0-9a-fA-F:.]+$')
if not symbols_re.match(ip_str):
return False
# We need to have at least one ':'.
if ':' not in ip_str:
return False
# We can only have one '::' shortener.
if ip_str.count('::') > 1:
return False
# '::' should be encompassed by start, digits or end.
if ':::' in ip_str:
return False
# A single colon can neither start nor end an address.
if ((ip_str.startswith(':') and not ip_str.startswith('::')) or
(ip_str.endswith(':') and not ip_str.endswith('::'))):
return False
# We can never have more than 7 ':' (1::2:3:4:5:6:7:8 is invalid)
if ip_str.count(':') > 7:
return False
# If we have no concatenation, we need to have 8 fields with 7 ':'.
if '::' not in ip_str and ip_str.count(':') != 7:
# We might have an IPv4 mapped address.
if ip_str.count('.') != 3:
return False
ip_str = _explode_shorthand_ip_string(ip_str)
# Now that we have that all squared away, let's check that each of the
# hextets are between 0x0 and 0xFFFF.
for hextet in ip_str.split(':'):
if hextet.count('.') == 3:
# If we have an IPv4 mapped address, the IPv4 portion has to
# be at the end of the IPv6 portion.
if not ip_str.split(':')[-1] == hextet:
return False
try:
validate_ipv4_address(hextet)
except ValidationError:
return False
else:
try:
# a value error here means that we got a bad hextet,
# something like 0xzzzz
if int(hextet, 16) < 0x0 or int(hextet, 16) > 0xFFFF:
return False
except ValueError:
return False
return True
def _explode_shorthand_ip_string(ip_str):
"""
Expand a shortened IPv6 address.
Args:
ip_str: A string, the IPv6 address.
Returns:
A string, the expanded IPv6 address.
"""
if not _is_shorthand_ip(ip_str):
# We've already got a longhand ip_str.
return ip_str
new_ip = []
hextet = ip_str.split('::')
# If there is a ::, we need to expand it with zeroes
# to get to 8 hextets - unless there is a dot in the last hextet,
# meaning we're doing v4-mapping
if '.' in ip_str.split(':')[-1]:
fill_to = 7
else:
fill_to = 8
if len(hextet) > 1:
sep = len(hextet[0].split(':')) + len(hextet[1].split(':'))
new_ip = hextet[0].split(':')
for __ in range(fill_to - sep):
new_ip.append('0000')
new_ip += hextet[1].split(':')
else:
new_ip = ip_str.split(':')
# Now need to make sure every hextet is 4 lower case characters.
# If a hextet is < 4 characters, we've got missing leading 0's.
ret_ip = []
for hextet in new_ip:
ret_ip.append(('0' * (4 - len(hextet)) + hextet).lower())
return ':'.join(ret_ip)
def _is_shorthand_ip(ip_str):
"""Determine if the address is shortened.
Args:
ip_str: A string, the IPv6 address.
Returns:
A boolean, True if the address is shortened.
"""
if ip_str.count('::') == 1:
return True
if any(len(x) < 4 for x in ip_str.split(':')):
return True
return False
|
nkgilley/home-assistant
|
refs/heads/dev
|
tests/components/rflink/test_init.py
|
6
|
"""Common functions for RFLink component tests and generic platform tests."""
import pytest
from voluptuous.error import MultipleInvalid
from homeassistant.bootstrap import async_setup_component
from homeassistant.components.rflink import (
CONF_RECONNECT_INTERVAL,
DATA_ENTITY_LOOKUP,
EVENT_KEY_COMMAND,
EVENT_KEY_SENSOR,
SERVICE_SEND_COMMAND,
TMP_ENTITY,
RflinkCommand,
)
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_STOP_COVER, SERVICE_TURN_OFF
from tests.async_mock import Mock
async def mock_rflink(
hass, config, domain, monkeypatch, failures=None, failcommand=False
):
"""Create mock RFLink asyncio protocol, test component setup."""
transport, protocol = (Mock(), Mock())
async def send_command_ack(*command):
return not failcommand
protocol.send_command_ack = Mock(wraps=send_command_ack)
def send_command(*command):
return not failcommand
protocol.send_command = Mock(wraps=send_command)
async def create_rflink_connection(*args, **kwargs):
"""Return mocked transport and protocol."""
# failures can be a list of booleans indicating in which sequence
# creating a connection should success or fail
if failures:
fail = failures.pop()
else:
fail = False
if fail:
raise ConnectionRefusedError
else:
return transport, protocol
mock_create = Mock(wraps=create_rflink_connection)
monkeypatch.setattr(
"homeassistant.components.rflink.create_rflink_connection", mock_create
)
await async_setup_component(hass, "rflink", config)
await async_setup_component(hass, domain, config)
await hass.async_block_till_done()
# hook into mock config for injecting events
event_callback = mock_create.call_args_list[0][1]["event_callback"]
assert event_callback
disconnect_callback = mock_create.call_args_list[0][1]["disconnect_callback"]
return event_callback, mock_create, protocol, disconnect_callback
async def test_version_banner(hass, monkeypatch):
"""Test sending unknown commands doesn't cause issues."""
# use sensor domain during testing main platform
domain = "sensor"
config = {
"rflink": {"port": "/dev/ttyABC0"},
domain: {
"platform": "rflink",
"devices": {"test": {"name": "test", "sensor_type": "temperature"}},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, domain, monkeypatch)
event_callback(
{
"hardware": "Nodo RadioFrequencyLink",
"firmware": "RFLink Gateway",
"version": "1.1",
"revision": "45",
}
)
async def test_send_no_wait(hass, monkeypatch):
"""Test command sending without ack."""
domain = "switch"
config = {
"rflink": {"port": "/dev/ttyABC0", "wait_for_ack": False},
domain: {
"platform": "rflink",
"devices": {
"protocol_0_0": {"name": "test", "aliases": ["test_alias_0_0"]}
},
},
}
# setup mocking rflink module
_, _, protocol, _ = await mock_rflink(hass, config, domain, monkeypatch)
hass.async_create_task(
hass.services.async_call(
domain, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: "switch.test"}
)
)
await hass.async_block_till_done()
assert protocol.send_command.call_args_list[0][0][0] == "protocol_0_0"
assert protocol.send_command.call_args_list[0][0][1] == "off"
async def test_cover_send_no_wait(hass, monkeypatch):
"""Test command sending to a cover device without ack."""
domain = "cover"
config = {
"rflink": {"port": "/dev/ttyABC0", "wait_for_ack": False},
domain: {
"platform": "rflink",
"devices": {
"RTS_0100F2_0": {"name": "test", "aliases": ["test_alias_0_0"]}
},
},
}
# setup mocking rflink module
_, _, protocol, _ = await mock_rflink(hass, config, domain, monkeypatch)
hass.async_create_task(
hass.services.async_call(
domain, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: "cover.test"}
)
)
await hass.async_block_till_done()
assert protocol.send_command.call_args_list[0][0][0] == "RTS_0100F2_0"
assert protocol.send_command.call_args_list[0][0][1] == "STOP"
async def test_send_command(hass, monkeypatch):
"""Test send_command service."""
domain = "rflink"
config = {"rflink": {"port": "/dev/ttyABC0"}}
# setup mocking rflink module
_, _, protocol, _ = await mock_rflink(hass, config, domain, monkeypatch)
hass.async_create_task(
hass.services.async_call(
domain,
SERVICE_SEND_COMMAND,
{"device_id": "newkaku_0000c6c2_1", "command": "on"},
)
)
await hass.async_block_till_done()
assert protocol.send_command_ack.call_args_list[0][0][0] == "newkaku_0000c6c2_1"
assert protocol.send_command_ack.call_args_list[0][0][1] == "on"
async def test_send_command_invalid_arguments(hass, monkeypatch):
"""Test send_command service."""
domain = "rflink"
config = {"rflink": {"port": "/dev/ttyABC0"}}
# setup mocking rflink module
_, _, protocol, _ = await mock_rflink(hass, config, domain, monkeypatch)
# one argument missing
with pytest.raises(MultipleInvalid):
await hass.services.async_call(domain, SERVICE_SEND_COMMAND, {"command": "on"})
with pytest.raises(MultipleInvalid):
await hass.services.async_call(
domain, SERVICE_SEND_COMMAND, {"device_id": "newkaku_0000c6c2_1"}
)
# no arguments
with pytest.raises(MultipleInvalid):
await hass.services.async_call(domain, SERVICE_SEND_COMMAND, {})
await hass.async_block_till_done()
assert protocol.send_command_ack.call_args_list == []
# bad command (no_command)
success = await hass.services.async_call(
domain,
SERVICE_SEND_COMMAND,
{"device_id": "newkaku_0000c6c2_1", "command": "no_command"},
)
assert not success, "send command should not succeed for unknown command"
async def test_reconnecting_after_disconnect(hass, monkeypatch):
"""An unexpected disconnect should cause a reconnect."""
domain = "sensor"
config = {
"rflink": {"port": "/dev/ttyABC0", CONF_RECONNECT_INTERVAL: 0},
domain: {"platform": "rflink"},
}
# setup mocking rflink module
_, mock_create, _, disconnect_callback = await mock_rflink(
hass, config, domain, monkeypatch
)
assert disconnect_callback, "disconnect callback not passed to rflink"
# rflink initiated disconnect
disconnect_callback(None)
await hass.async_block_till_done()
# we expect 2 call, the initial and reconnect
assert mock_create.call_count == 2
async def test_reconnecting_after_failure(hass, monkeypatch):
"""A failure to reconnect should be retried."""
domain = "sensor"
config = {
"rflink": {"port": "/dev/ttyABC0", CONF_RECONNECT_INTERVAL: 0},
domain: {"platform": "rflink"},
}
# success first time but fail second
failures = [False, True, False]
# setup mocking rflink module
_, mock_create, _, disconnect_callback = await mock_rflink(
hass, config, domain, monkeypatch, failures=failures
)
# rflink initiated disconnect
disconnect_callback(None)
# wait for reconnects to have happened
await hass.async_block_till_done()
await hass.async_block_till_done()
# we expect 3 calls, the initial and 2 reconnects
assert mock_create.call_count == 3
async def test_error_when_not_connected(hass, monkeypatch):
"""Sending command should error when not connected."""
domain = "switch"
config = {
"rflink": {"port": "/dev/ttyABC0", CONF_RECONNECT_INTERVAL: 0},
domain: {
"platform": "rflink",
"devices": {
"protocol_0_0": {"name": "test", "aliases": ["test_alias_0_0"]}
},
},
}
# success first time but fail second
failures = [False, True, False]
# setup mocking rflink module
_, _, _, disconnect_callback = await mock_rflink(
hass, config, domain, monkeypatch, failures=failures
)
# rflink initiated disconnect
disconnect_callback(None)
success = await hass.services.async_call(
domain, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: "switch.test"}
)
assert not success, "changing state should not succeed when disconnected"
async def test_async_send_command_error(hass, monkeypatch):
"""Sending command should error when protocol fails."""
domain = "rflink"
config = {"rflink": {"port": "/dev/ttyABC0"}}
# setup mocking rflink module
_, _, protocol, _ = await mock_rflink(
hass, config, domain, monkeypatch, failcommand=True
)
success = await hass.services.async_call(
domain,
SERVICE_SEND_COMMAND,
{"device_id": "newkaku_0000c6c2_1", "command": SERVICE_TURN_OFF},
)
await hass.async_block_till_done()
assert not success, "send command should not succeed if failcommand=True"
assert protocol.send_command_ack.call_args_list[0][0][0] == "newkaku_0000c6c2_1"
assert protocol.send_command_ack.call_args_list[0][0][1] == SERVICE_TURN_OFF
async def test_race_condition(hass, monkeypatch):
"""Test race condition for unknown components."""
domain = "light"
config = {"rflink": {"port": "/dev/ttyABC0"}, domain: {"platform": "rflink"}}
tmp_entity = TMP_ENTITY.format("test3")
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, domain, monkeypatch)
# test event for new unconfigured sensor
event_callback({"id": "test3", "command": "off"})
event_callback({"id": "test3", "command": "on"})
# tmp_entity added to EVENT_KEY_COMMAND
assert tmp_entity in hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND]["test3"]
# tmp_entity must no be added to EVENT_KEY_SENSOR
assert tmp_entity not in hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_SENSOR]["test3"]
await hass.async_block_till_done()
# test state of new sensor
new_sensor = hass.states.get(f"{domain}.test3")
assert new_sensor
assert new_sensor.state == "off"
event_callback({"id": "test3", "command": "on"})
await hass.async_block_till_done()
# tmp_entity must be deleted from EVENT_KEY_COMMAND
assert tmp_entity not in hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND]["test3"]
# test state of new sensor
new_sensor = hass.states.get(f"{domain}.test3")
assert new_sensor
assert new_sensor.state == "on"
async def test_not_connected(hass, monkeypatch):
"""Test Error when sending commands to a disconnected device."""
import pytest
from homeassistant.core import HomeAssistantError
test_device = RflinkCommand("DUMMY_DEVICE")
RflinkCommand.set_rflink_protocol(None)
with pytest.raises(HomeAssistantError):
await test_device._async_handle_command("turn_on")
|
CloudVLab/professional-services
|
refs/heads/master
|
tools/cloud-vision-utils/cloud_vision_utils/dataset.py
|
2
|
# python3
# Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-bad-import-order
"""Utilities for image dataset generation."""
import csv
import os
from tensorflow.io import gfile
from cloud_vision_utils import annotation
from cloud_vision_utils import constants
def basename(path):
return os.path.basename(os.path.normpath(path))
def gen_csv_from_images(
input_dir: str,
output_file=constants.DEFAULT_CSV_FILENAME,
add_label=False,
out_path_prefix='',
dataset_type=constants.DEFAULT_DATASET_TYPE):
"""Generate AutoML dataset CSV from directory of images.
Args:
input_dir: Directory of images.
output_file: Output CSV filename.
add_label: Whether to include image label based on
last directory on the image's filepath.
out_path_prefix: Output path prefix to prepend to each filename.
(e.g. gs://path/to/the/imagedir)
dataset_type: AutoML dataset type (TRAIN, VALIDATE, TEST, UNSPECIFIED)
to use for all the parsed images.
"""
get_label = basename if add_label else lambda _: ''
with gfile.GFile(os.path.expanduser(output_file), 'w') as f:
writer = csv.writer(f, delimiter=',')
for topdir, _, files in gfile.walk(os.path.expanduser(input_dir)):
for f in files:
if out_path_prefix:
filepath = os.path.join(out_path_prefix, f)
else:
filepath = os.path.join(topdir, f)
label = get_label(topdir)
row = ([dataset_type, filepath, label] +
['']*constants.NUM_BOUNDING_BOX_FIELDS)
writer.writerow(row)
def gen_csv_from_annotations(
input_dir: str,
output_file=constants.DEFAULT_CSV_FILENAME,
out_path_prefix='',
dataset_type=constants.DEFAULT_DATASET_TYPE):
"""Generates AutoML dataset CSV from annotation files.
Args:
input_dir: Directory of annotation files.
output_file: Output CSV filename.
out_path_prefix: Filepath prefix to prepend to the image files.
e.g.
src_image_filename = '/tmp/path/to/image.jpg'
out_path_prefix = 'gs://bucket/images'
output_image_filename = 'gs://bucket/images/image.jpg'
dataset_type: Dataset type (TRAIN, VAL, TEST, UNSPECIFIED)
to use for all the parsed images.
"""
if not gfile.exists(input_dir):
raise ValueError('Input directory not found.')
with gfile.GFile(os.path.expanduser(output_file), 'w') as outf:
writer = csv.writer(outf, delimiter=',')
for filename in gfile.listdir(os.path.expanduser(input_dir)):
filepath = os.path.join(input_dir, filename)
image_filename, boxes = annotation.read(filepath)
out_image_filename = os.path.join(out_path_prefix, image_filename)
for b in boxes:
row = [
dataset_type,
out_image_filename,
b.label,
b.xmin,
b.ymin,
'',
'',
b.xmax,
b.ymax,
'',
'',
]
writer.writerow(row)
|
campagnola/neuroanalysis
|
refs/heads/master
|
neuroanalysis/neuronsim/components.py
|
1
|
# -*- coding: utf-8 -*-
"""
Simple neuron simulator for Python.
Also simulates voltage clamp and current clamp with access resistance.
Luke Campagnola 2015
"""
from collections import OrderedDict
from .sim import SimObject
from ..units import pF, mV, uF, cm
class Mechanism(SimObject):
"""Base class for simulation objects that interact with a section's
membrane--channels, electrodes, etc.
"""
def __init__(self, init_state, section=None, **kwds):
SimObject.__init__(self, init_state, **kwds)
self._name = kwds.pop('name', None) # overwrite auto-generated name
self._section = section
self.dep_state_vars['I'] = self.current
def current(self, state):
"""Return the membrane current being passed by this mechanism.
Must be implemented in subclasses.
"""
raise NotImplementedError()
@property
def name(self):
if self._name is None:
# pick a name that is unique to the section we live in
# first collect all names
names = []
if self._section is None:
return None
for o in self._section.mechanisms:
if isinstance(o, Mechanism) and o._name is None:
# skip to avoid recursion
continue
names.append(o.name)
# iterate until we find an unused name
pfx = self._section.name + '.'
name = pfx + self.type
i = 1
while name in names:
name = pfx + self.type + str(i)
i += 1
self._name = name
return self._name
@property
def section(self):
return self._section
@property
def sim(self):
return self.section.sim
class Channel(Mechanism):
"""Base class for simple ion channels.
"""
# precomputed rate constant tables
rates = None
# maximum open probability (to be redefined by subclasses)
max_op = 1.0
@classmethod
def compute_rates(cls):
return
def __init__(self, gmax=None, gbar=None, init_state=None, **kwds):
Mechanism.__init__(self, init_state, **kwds)
self._gmax = gmax
self._gbar = gbar
if self.rates is None:
type(self).compute_rates()
self.dep_state_vars['G'] = self.conductance
self.dep_state_vars['OP'] = self.open_probability
@property
def gmax(self):
if self._gmax is not None:
return self._gmax
else:
return self._gbar * self.section.area
@gmax.setter
def gmax(self, v):
self._gmax = v
self._gbar = None
@property
def gbar(self):
if self._gbar is not None:
return self._gbar
else:
return self._gmax / self.section.area
@gbar.setter
def gbar(self, v):
self._gbar = v
self._gmax = None
def conductance(self, state):
op = self.open_probability(state)
return self.gmax * op
def current(self, state):
vm = state[self.section, 'V']
g = self.conductance(state)
return -g * (vm - self.erev)
@staticmethod
def interpolate_rates(rates, val, minval, step):
"""Helper function for interpolating kinetic rates from precomputed
tables.
"""
i = (val - minval) / step
i1 = int(i)
i2 = i1 + 1
s = i2 - i
if i1 < 0:
return rates[0]
elif i2 >= len(rates):
return rates[-1]
else:
return rates[i1] * s + rates[i2] * (1-s)
class Section(SimObject):
type = 'section'
def __init__(self, radius=None, cap=10*pF, vm=-65*mV, **kwds):
self.cap_bar = 1 * uF/cm**2
if radius is None:
self.cap = cap
self.area = cap / self.cap_bar
else:
self.area = 4 * 3.1415926 * radius**2
self.cap = self.area * self.cap_bar
self.ek = -77*mV
self.ena = 50*mV
self.ecl = -70*mV
init_state = OrderedDict([('V', vm)])
SimObject.__init__(self, init_state, **kwds)
self.dep_state_vars['I'] = self.current
self.mechanisms = []
def add(self, mech):
assert mech._section is None
mech._section = self
self.mechanisms.append(mech)
self._sub_objs.append(mech)
return mech
def derivatives(self, state):
Im = 0
for mech in self.mechanisms:
if not mech.enabled:
continue
Im += mech.current(state)
dv = Im / self.cap
return [dv]
def current(self, state):
"""Return the current flowing across the membrane capacitance.
"""
dv = self.derivatives(state)[0]
return - self.cap * dv
def conductance(self, state):
"""Return the total conductance of all channels in this section.
This is for introspection; not used by the integrator.
"""
g = 0
for mech in self.mechanisms:
if not isinstance(mech, Channel) or not mech.enabled:
continue
g += mech.conductance(state)
return g
|
drjeep/django
|
refs/heads/master
|
tests/gis_tests/test_measure.py
|
325
|
"""
Distance and Area objects to allow for sensible and convenient calculation
and conversions. Here are some tests.
"""
import unittest
from django.contrib.gis.measure import A, D, Area, Distance
class DistanceTest(unittest.TestCase):
"Testing the Distance object"
def testInit(self):
"Testing initialization from valid units"
d = Distance(m=100)
self.assertEqual(d.m, 100)
d1, d2, d3 = D(m=100), D(meter=100), D(metre=100)
for d in (d1, d2, d3):
self.assertEqual(d.m, 100)
d = D(nm=100)
self.assertEqual(d.m, 185200)
y1, y2, y3 = D(yd=100), D(yard=100), D(Yard=100)
for d in (y1, y2, y3):
self.assertEqual(d.yd, 100)
mm1, mm2 = D(millimeter=1000), D(MiLLiMeTeR=1000)
for d in (mm1, mm2):
self.assertEqual(d.m, 1.0)
self.assertEqual(d.mm, 1000.0)
def testInitInvalid(self):
"Testing initialization from invalid units"
self.assertRaises(AttributeError, D, banana=100)
def testAccess(self):
"Testing access in different units"
d = D(m=100)
self.assertEqual(d.km, 0.1)
self.assertAlmostEqual(d.ft, 328.084, 3)
def testAccessInvalid(self):
"Testing access in invalid units"
d = D(m=100)
self.assertFalse(hasattr(d, 'banana'))
def testAddition(self):
"Test addition & subtraction"
d1 = D(m=100)
d2 = D(m=200)
d3 = d1 + d2
self.assertEqual(d3.m, 300)
d3 += d1
self.assertEqual(d3.m, 400)
d4 = d1 - d2
self.assertEqual(d4.m, -100)
d4 -= d1
self.assertEqual(d4.m, -200)
with self.assertRaises(TypeError):
d1 + 1
with self.assertRaises(TypeError):
d1 - 1
with self.assertRaises(TypeError):
d1 += 1
with self.assertRaises(TypeError):
d1 -= 1
def testMultiplication(self):
"Test multiplication & division"
d1 = D(m=100)
d3 = d1 * 2
self.assertEqual(d3.m, 200)
d3 = 2 * d1
self.assertEqual(d3.m, 200)
d3 *= 5
self.assertEqual(d3.m, 1000)
d4 = d1 / 2
self.assertEqual(d4.m, 50)
d4 /= 5
self.assertEqual(d4.m, 10)
d5 = d1 / D(m=2)
self.assertEqual(d5, 50)
a5 = d1 * D(m=10)
self.assertIsInstance(a5, Area)
self.assertEqual(a5.sq_m, 100 * 10)
with self.assertRaises(TypeError):
d1 *= D(m=1)
with self.assertRaises(TypeError):
d1 /= D(m=1)
def testUnitConversions(self):
"Testing default units during maths"
d1 = D(m=100)
d2 = D(km=1)
d3 = d1 + d2
self.assertEqual(d3._default_unit, 'm')
d4 = d2 + d1
self.assertEqual(d4._default_unit, 'km')
d5 = d1 * 2
self.assertEqual(d5._default_unit, 'm')
d6 = d1 / 2
self.assertEqual(d6._default_unit, 'm')
def testComparisons(self):
"Testing comparisons"
d1 = D(m=100)
d2 = D(km=1)
d3 = D(km=0)
self.assertGreater(d2, d1)
self.assertEqual(d1, d1)
self.assertLess(d1, d2)
self.assertFalse(d3)
def testUnitsStr(self):
"Testing conversion to strings"
d1 = D(m=100)
d2 = D(km=3.5)
self.assertEqual(str(d1), '100.0 m')
self.assertEqual(str(d2), '3.5 km')
self.assertEqual(repr(d1), 'Distance(m=100.0)')
self.assertEqual(repr(d2), 'Distance(km=3.5)')
def testUnitAttName(self):
"Testing the `unit_attname` class method"
unit_tuple = [('Yard', 'yd'), ('Nautical Mile', 'nm'), ('German legal metre', 'german_m'),
('Indian yard', 'indian_yd'), ('Chain (Sears)', 'chain_sears'), ('Chain', 'chain')]
for nm, att in unit_tuple:
self.assertEqual(att, D.unit_attname(nm))
class AreaTest(unittest.TestCase):
"Testing the Area object"
def testInit(self):
"Testing initialization from valid units"
a = Area(sq_m=100)
self.assertEqual(a.sq_m, 100)
a = A(sq_m=100)
self.assertEqual(a.sq_m, 100)
a = A(sq_mi=100)
self.assertEqual(a.sq_m, 258998811.0336)
def testInitInvaliA(self):
"Testing initialization from invalid units"
self.assertRaises(AttributeError, A, banana=100)
def testAccess(self):
"Testing access in different units"
a = A(sq_m=100)
self.assertEqual(a.sq_km, 0.0001)
self.assertAlmostEqual(a.sq_ft, 1076.391, 3)
def testAccessInvaliA(self):
"Testing access in invalid units"
a = A(sq_m=100)
self.assertFalse(hasattr(a, 'banana'))
def testAddition(self):
"Test addition & subtraction"
a1 = A(sq_m=100)
a2 = A(sq_m=200)
a3 = a1 + a2
self.assertEqual(a3.sq_m, 300)
a3 += a1
self.assertEqual(a3.sq_m, 400)
a4 = a1 - a2
self.assertEqual(a4.sq_m, -100)
a4 -= a1
self.assertEqual(a4.sq_m, -200)
with self.assertRaises(TypeError):
a1 + 1
with self.assertRaises(TypeError):
a1 - 1
with self.assertRaises(TypeError):
a1 += 1
with self.assertRaises(TypeError):
a1 -= 1
def testMultiplication(self):
"Test multiplication & division"
a1 = A(sq_m=100)
a3 = a1 * 2
self.assertEqual(a3.sq_m, 200)
a3 = 2 * a1
self.assertEqual(a3.sq_m, 200)
a3 *= 5
self.assertEqual(a3.sq_m, 1000)
a4 = a1 / 2
self.assertEqual(a4.sq_m, 50)
a4 /= 5
self.assertEqual(a4.sq_m, 10)
with self.assertRaises(TypeError):
a1 * A(sq_m=1)
with self.assertRaises(TypeError):
a1 *= A(sq_m=1)
with self.assertRaises(TypeError):
a1 / A(sq_m=1)
with self.assertRaises(TypeError):
a1 /= A(sq_m=1)
def testUnitConversions(self):
"Testing default units during maths"
a1 = A(sq_m=100)
a2 = A(sq_km=1)
a3 = a1 + a2
self.assertEqual(a3._default_unit, 'sq_m')
a4 = a2 + a1
self.assertEqual(a4._default_unit, 'sq_km')
a5 = a1 * 2
self.assertEqual(a5._default_unit, 'sq_m')
a6 = a1 / 2
self.assertEqual(a6._default_unit, 'sq_m')
def testComparisons(self):
"Testing comparisons"
a1 = A(sq_m=100)
a2 = A(sq_km=1)
a3 = A(sq_km=0)
self.assertGreater(a2, a1)
self.assertEqual(a1, a1)
self.assertLess(a1, a2)
self.assertFalse(a3)
def testUnitsStr(self):
"Testing conversion to strings"
a1 = A(sq_m=100)
a2 = A(sq_km=3.5)
self.assertEqual(str(a1), '100.0 sq_m')
self.assertEqual(str(a2), '3.5 sq_km')
self.assertEqual(repr(a1), 'Area(sq_m=100.0)')
self.assertEqual(repr(a2), 'Area(sq_km=3.5)')
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(DistanceTest))
s.addTest(unittest.makeSuite(AreaTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
if __name__ == "__main__":
run()
|
mapennell/ansible
|
refs/heads/devel
|
v1/ansible/runner/action_plugins/pause.py
|
143
|
# Copyright 2012, Tim Bielawa <tbielawa@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible.callbacks import vv
from ansible.errors import AnsibleError as ae
from ansible.runner.return_data import ReturnData
from ansible.utils import getch, parse_kv
import ansible.utils.template as template
from termios import tcflush, TCIFLUSH
import datetime
import sys
import time
class ActionModule(object):
''' pauses execution for a length or time, or until input is received '''
PAUSE_TYPES = ['seconds', 'minutes', 'prompt', '']
BYPASS_HOST_LOOP = True
def __init__(self, runner):
self.runner = runner
# Set defaults
self.duration_unit = 'minutes'
self.prompt = None
self.seconds = None
self.result = {'changed': False,
'rc': 0,
'stderr': '',
'stdout': '',
'start': None,
'stop': None,
'delta': None,
}
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
''' run the pause action module '''
# note: this module does not need to pay attention to the 'check'
# flag, it always runs
hosts = ', '.join(self.runner.host_set)
args = {}
if complex_args:
args.update(complex_args)
# extra template call unneeded?
args.update(parse_kv(template.template(self.runner.basedir, module_args, inject)))
# Are 'minutes' or 'seconds' keys that exist in 'args'?
if 'minutes' in args or 'seconds' in args:
try:
if 'minutes' in args:
self.pause_type = 'minutes'
# The time() command operates in seconds so we need to
# recalculate for minutes=X values.
self.seconds = int(args['minutes']) * 60
else:
self.pause_type = 'seconds'
self.seconds = int(args['seconds'])
self.duration_unit = 'seconds'
except ValueError, e:
raise ae("non-integer value given for prompt duration:\n%s" % str(e))
# Is 'prompt' a key in 'args'?
elif 'prompt' in args:
self.pause_type = 'prompt'
self.prompt = "[%s]\n%s:\n" % (hosts, args['prompt'])
# Is 'args' empty, then this is the default prompted pause
elif len(args.keys()) == 0:
self.pause_type = 'prompt'
self.prompt = "[%s]\nPress enter to continue:\n" % hosts
# I have no idea what you're trying to do. But it's so wrong.
else:
raise ae("invalid pause type given. must be one of: %s" % \
", ".join(self.PAUSE_TYPES))
vv("created 'pause' ActionModule: pause_type=%s, duration_unit=%s, calculated_seconds=%s, prompt=%s" % \
(self.pause_type, self.duration_unit, self.seconds, self.prompt))
########################################################################
# Begin the hard work!
try:
self._start()
if not self.pause_type == 'prompt':
print "[%s]\nPausing for %s seconds" % (hosts, self.seconds)
time.sleep(self.seconds)
else:
# Clear out any unflushed buffered input which would
# otherwise be consumed by raw_input() prematurely.
tcflush(sys.stdin, TCIFLUSH)
self.result['user_input'] = raw_input(self.prompt.encode(sys.stdout.encoding))
except KeyboardInterrupt:
while True:
print '\nAction? (a)bort/(c)ontinue: '
c = getch()
if c == 'c':
# continue playbook evaluation
break
elif c == 'a':
# abort further playbook evaluation
raise ae('user requested abort!')
finally:
self._stop()
return ReturnData(conn=conn, result=self.result)
def _start(self):
''' mark the time of execution for duration calculations later '''
self.start = time.time()
self.result['start'] = str(datetime.datetime.now())
if not self.pause_type == 'prompt':
print "(^C-c = continue early, ^C-a = abort)"
def _stop(self):
''' calculate the duration we actually paused for and then
finish building the task result string '''
duration = time.time() - self.start
self.result['stop'] = str(datetime.datetime.now())
self.result['delta'] = int(duration)
if self.duration_unit == 'minutes':
duration = round(duration / 60.0, 2)
else:
duration = round(duration, 2)
self.result['stdout'] = "Paused for %s %s" % (duration, self.duration_unit)
|
menegon/geonode
|
refs/heads/master
|
geonode/contrib/geosites/templatetags/__init__.py
|
12133432
| |
llcao/keras
|
refs/heads/master
|
tests/auto/test_regularizers.py
|
75
|
import unittest
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.models import Sequential
from keras.layers.core import Merge, Dense, Activation, Flatten, ActivityRegularization
from keras.layers.embeddings import Embedding
from keras.datasets import mnist
from keras.utils import np_utils
from keras import regularizers
nb_classes = 10
batch_size = 128
nb_epoch = 5
weighted_class = 9
standard_weight = 1
high_weight = 5
max_train_samples = 5000
max_test_samples = 1000
# the data, shuffled and split between tran and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(60000, 784)[:max_train_samples]
X_test = X_test.reshape(10000, 784)[:max_test_samples]
X_train = X_train.astype("float32") / 255
X_test = X_test.astype("float32") / 255
# convert class vectors to binary class matrices
y_train = y_train[:max_train_samples]
y_test = y_test[:max_test_samples]
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
test_ids = np.where(y_test == np.array(weighted_class))[0]
def create_model(weight_reg=None, activity_reg=None):
model = Sequential()
model.add(Dense(784, 50))
model.add(Activation('relu'))
model.add(Dense(50, 10, W_regularizer=weight_reg, activity_regularizer=activity_reg))
model.add(Activation('softmax'))
return model
class TestRegularizers(unittest.TestCase):
def test_W_reg(self):
for reg in [regularizers.identity(), regularizers.l1(), regularizers.l2(), regularizers.l1l2()]:
model = create_model(weight_reg=reg)
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.evaluate(X_test[test_ids, :], Y_test[test_ids, :], verbose=0)
def test_A_reg(self):
for reg in [regularizers.activity_l1(), regularizers.activity_l2()]:
model = create_model(activity_reg=reg)
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.evaluate(X_test[test_ids, :], Y_test[test_ids, :], verbose=0)
if __name__ == '__main__':
print('Test weight and activity regularizers')
unittest.main()
|
pyQode/pyqode.python
|
refs/heads/master
|
examples/pynotepad/pynotepad/forms/resources_rc.py
|
2
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.5.1)
#
# WARNING! All changes made in this file will be lost!
from pyqode.qt import QtCore
qt_resource_data = b"\
\x00\x00\x04\xbb\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x03\x00\x00\x00\x44\xa4\x8a\xc6\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x25\x00\x00\x80\x83\
\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x6f\x92\x5f\xc5\x46\x00\x00\x02\x7f\
\x50\x4c\x54\x45\xff\xff\xff\x00\x00\x00\x18\x18\x18\x34\x34\x34\
\x64\x64\x64\x58\x58\x58\x6d\x6d\x6d\x41\x41\x41\x7e\x7e\x7e\x9e\
\x9e\x9e\xb1\xb1\xb1\xc6\xc6\xc6\xd5\xd5\xd5\xf0\xf0\xf0\x16\x16\
\x16\x33\x33\x33\x31\x31\x31\x66\x66\x66\x5a\x5a\x5a\x89\x89\x89\
\x9d\x9d\x9d\xb4\xb4\xb4\xd7\xd7\xd8\x19\x19\x18\x16\x17\x15\x00\
\x00\x00\x2d\x2e\x2c\x81\x83\x7f\x3e\x40\x3d\x08\x08\x07\x29\x2a\
\x28\x7e\x7f\x7c\x2d\x2e\x2c\x19\x19\x18\x32\x33\x32\x64\x66\x62\
\x39\x3a\x38\x55\x55\x55\x3e\x3f\x3e\xa5\xa6\xa3\x57\x58\x55\x53\
\x53\x53\x6d\x6d\x6d\x4c\x4d\x4b\x8a\x8b\x88\x61\x61\x5f\x59\x59\
\x58\x57\x58\x57\x7e\x80\x7d\x75\x76\x75\x9f\x9e\x9f\x63\x64\x63\
\xab\xac\xa9\x78\x7a\x77\x9b\x9b\x9a\xcb\xcb\xcc\x6f\x70\x6e\x95\
\x96\x93\x88\x8a\x88\xca\xcb\xca\xf8\xf8\xf8\x7c\x7d\x7c\x82\x83\
\x80\xad\xae\xac\xf6\xf6\xf6\x89\x89\x88\x85\x86\x84\xb2\xb3\xb1\
\xf7\xf7\xf7\x95\x96\x95\x99\x9a\x98\x9f\xa0\x9e\xec\xec\xeb\xff\
\xff\xff\xa2\xa2\xa1\x91\x93\x90\xd6\xd6\xd6\xff\xff\xff\xaf\xb0\
\xae\x8f\x90\x8d\xbe\xbf\xbd\xfb\xfb\xfb\xb8\xb9\xb7\x95\x96\x94\
\xa7\xa8\xa6\xf1\xf1\xf0\xff\xff\xff\xa3\xa4\xa2\x96\x98\x95\xde\
\xde\xdd\xff\xff\xff\xb7\xb9\xb7\x8e\x8f\x8d\xc7\xc8\xc6\xfe\xfe\
\xfe\xb8\xb9\xb8\x91\x92\x8f\xaf\xb0\xae\xf6\xf7\xf6\xb1\xb1\xaf\
\xa0\xa1\xa0\xe5\xe6\xe5\xff\xff\xff\xd7\xd7\xd7\xce\xcf\xcd\xff\
\xff\xff\xd5\xd6\xd5\xde\xdf\xdd\xcd\xcd\xcc\xd8\xd8\xd6\xe8\xe8\
\xe7\xc2\xc3\xc1\xcd\xce\xcd\xc7\xc8\xc5\xc0\xc2\xbe\xde\xdf\xdc\
\xe8\xe9\xe7\xcc\xcd\xcc\xcc\xcd\xca\xc3\xc4\xc1\xc5\xc6\xc3\xd0\
\xd1\xce\xeb\xeb\xea\xd9\xda\xd9\xc6\xc7\xc4\xcb\xcc\xc9\xce\xce\
\xcb\xcf\xcf\xcc\xdb\xdc\xda\xec\xed\xeb\xc3\xc4\xc2\xcc\xcd\xcb\
\xd5\xd5\xd3\xca\xcb\xc8\xd1\xd2\xcf\xd4\xd5\xd2\xe2\xe3\xe1\xe9\
\xe9\xe8\xcc\xcc\xcb\xd9\xd9\xd7\xce\xcf\xcc\xd1\xd1\xce\xd7\xd7\
\xd5\xda\xda\xd8\xd8\xd9\xd7\xd2\xd2\xd0\xd5\xd6\xd3\xdc\xdd\xdb\
\xcb\xcc\xcb\xdd\xdd\xdb\xd3\xd4\xd1\xd5\xd5\xd2\xd6\xd7\xd5\xda\
\xdb\xd8\xde\xde\xdc\xdb\xdb\xd9\xd6\xd7\xd4\xd4\xd4\xd2\xdc\xdd\
\xda\xee\xee\xed\xd0\xd0\xd0\xcb\xcb\xca\xe0\xe0\xde\xd7\xd8\xd5\
\xd9\xd9\xd6\xdd\xde\xdb\xe1\xe2\xe0\xe1\xe1\xdf\xdc\xdc\xda\xd8\
\xd8\xd5\xdf\xdf\xdc\xee\xee\xec\xd0\xd0\xcf\xca\xcb\xc9\xe2\xe3\
\xe0\xdb\xdb\xd8\xe0\xe1\xde\xe3\xe3\xe1\xe4\xe5\xe2\xe4\xe4\xe2\
\xe5\xe5\xe2\xf1\xf2\xf0\xdb\xdb\xda\xc9\xc9\xc8\xde\xde\xdb\xe2\
\xe2\xe0\xe6\xe6\xe4\xe8\xe8\xe6\xf5\xf5\xf4\xed\xed\xed\xaa\xab\
\xa9\xc8\xc9\xc8\xe5\xe5\xe3\xe4\xe4\xe1\xe6\xe6\xe3\xf2\xf2\xf0\
\xf8\xf9\xf7\xc7\xc8\xc7\xe1\xe2\xdf\xea\xea\xe7\xf9\xf9\xf8\xd7\
\xd7\xd6\xc7\xc7\xc6\xe2\xe2\xdf\xdd\xde\xdc\xf2\xf2\xf1\xe7\xe8\
\xe6\xc6\xc6\xc5\xee\xef\xed\xb7\xb8\xb6\xd3\xd4\xd2\x74\x75\x72\
\xff\xff\xff\xae\x8f\xcf\x2a\x00\x00\x00\x6a\x74\x52\x4e\x53\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x3e\x43\x01\x78\xfe\x93\x22\x6b\xe0\x6f\
\x12\x6f\xc7\x53\x08\x73\xfb\xaa\x3a\x03\x77\xed\x8c\x27\x7a\xda\
\x6f\x18\x7e\xfe\xc1\x56\x0c\x81\xf6\xa7\x3e\x04\x84\xf0\x93\x26\
\x87\xf3\x96\x27\x8b\xf9\xaf\x45\x06\x8f\xcc\x63\x11\x92\xe4\x83\
\x23\x95\xf4\xa4\x3a\x03\xfe\xc2\x57\x0c\x95\xdc\x77\x1b\x94\xee\
\x97\x31\x9f\xb7\x4c\x07\x62\x70\x16\x97\x72\x60\x8b\x00\x00\x00\
\x01\x62\x4b\x47\x44\x00\x88\x05\x1d\x48\x00\x00\x00\x09\x70\x48\
\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\
\x00\x01\x33\x49\x44\x41\x54\x38\xcb\x63\x60\x18\x56\x40\x5c\x42\
\x92\x91\x91\x89\x99\x85\x95\x8d\x9d\x83\x93\x8b\x9b\x87\x17\x5d\
\x81\x94\xb4\x8c\x2c\x23\x1f\x3f\x6e\x05\x72\x59\xd9\xf2\x0a\x8a\
\x02\x82\x38\x15\x28\xe5\xe4\xe6\xe5\x2b\xab\xa8\x0a\xe1\x52\xa0\
\x56\x50\x58\x54\x5c\xa2\xae\xa1\xa9\x85\x43\x81\x76\x69\x59\x79\
\x45\x65\x55\xb5\x8e\xae\x9e\xb0\x08\x36\x05\xfa\x39\x95\x35\xb5\
\x75\xf5\x0d\x8d\x4d\x06\x86\x46\xa2\x58\x14\x18\x37\xb7\xb4\xd6\
\xb5\xb5\xb7\x57\x76\x74\x9a\x98\x9a\x99\x8b\x61\x28\xb0\xe8\xea\
\xee\xe9\x6d\xef\xeb\x9f\x30\x71\x52\xe3\x64\x4b\x2b\x6b\x1b\x74\
\x05\xb6\x53\xa6\x4e\x9b\x3e\x63\xe6\xd4\x59\xb3\xe7\xcc\x9d\x37\
\x7f\x81\x9d\xbd\x03\x9a\x02\xc7\x85\x8b\x16\x2f\xe9\x5f\xba\x68\
\xd9\xf2\x15\x2b\x57\xad\x5e\xe3\xe4\xec\x82\xa6\xc0\x75\xed\xba\
\xf5\x53\x8b\x37\x6c\xdc\xb4\x79\xcb\xd6\x6d\x6e\xee\x1e\x9e\xe8\
\x56\x78\x6d\xdf\xbc\x63\xd1\xce\xcd\xbb\x76\xef\xd9\xbb\xcf\xdb\
\xc7\x17\x33\xb2\xfc\xf6\x1f\x58\xb5\xee\xe0\xa1\xc3\x47\xf2\xfd\
\x03\x02\xb1\xc5\x66\xd0\xd1\xcd\xb3\x8e\x1d\x3f\x71\x32\x38\x24\
\x34\x0c\x6b\x74\x07\x9d\x3a\x7d\xe6\xec\xb9\xf0\x88\xc8\x28\x1c\
\xe9\x21\xfa\xfc\xea\x0b\x17\x63\x62\xe3\x18\x70\x81\xf8\x85\x97\
\x12\x12\x93\x18\x70\x83\xe4\xcb\x29\xa9\x69\x78\xe4\x19\xd2\x33\
\x32\x19\x86\x2f\x00\x00\x5e\x32\x65\xeb\x43\x41\x2e\x28\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x04\x5d\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x30\x00\x00\x00\x30\x08\x04\x00\x00\x00\xfd\x0b\x31\x0c\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x02\
\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x03\xea\x49\x44\
\x41\x54\x58\xc3\xb5\x97\xbf\x8f\x1d\x35\x10\xc7\x3f\x63\xfb\x6d\
\x0e\xc4\xa5\xe0\x40\x02\x24\x8a\x13\x48\x48\x04\x91\x5c\x81\xa8\
\x93\x50\x45\xb4\x34\x14\x88\x9e\x7f\x82\x92\x9a\x9a\x12\x21\x68\
\x82\x10\x15\x12\xd2\xa1\x14\xa1\x03\x51\x84\x48\x07\x22\x08\x08\
\x48\x1c\xbf\x0a\x24\xee\x72\x6f\x3d\x14\x9e\xf5\xda\xde\xbd\x07\
\xf7\xee\xb2\x4f\x6f\x3d\xbb\xb6\x67\xe6\x3b\xf3\x1d\xdb\x2b\xca\
\xfd\xbd\xdc\x7d\xd6\x7f\xff\x0d\x84\xd4\xbc\xf0\xf1\xf2\x65\xc1\
\xb1\xf5\xfb\x1f\x07\x20\xd6\xf9\xfa\xc3\xcf\x3d\xa0\xf4\x08\x81\
\x8e\x05\x11\x10\x3c\x00\x91\xc8\x21\x47\x1c\x01\x9e\x2f\xde\xbd\
\xfe\xb6\xff\x50\x00\x39\xcf\xa6\xee\xf9\xab\xbb\x3f\x25\x1d\x92\
\x72\xb0\xa3\x09\xce\x23\xec\x9b\x7a\x01\x5e\xe3\x12\x00\x1d\x8e\
\xc0\x82\x05\x01\x10\x14\xb8\xcd\x2d\x22\x91\x9e\xcb\xc0\x8d\xfe\
\x13\x25\xa4\x59\x02\x70\xe7\xde\x95\xcf\xbf\x2f\x10\x94\x97\x22\
\x76\x4f\xc6\x3d\x91\x73\x2c\x08\x74\x78\x3a\x40\x89\xec\xf1\x91\
\xb9\xf2\x12\x3d\xea\xd5\x4c\xa7\x3f\xdb\xdd\x2e\xdb\x85\x01\xcd\
\x7e\x0f\xac\x4a\x52\x44\x70\x04\xbc\x61\xf0\x08\x01\x25\xe2\xf2\
\x2c\xcf\x12\xcd\xaa\x33\x2b\x9f\x68\x92\xac\xe8\x00\x0f\xb5\xff\
\x20\x0b\x4b\x3c\x1e\xc1\xe1\x70\x78\x82\x4d\x54\xd4\x4c\x95\xb3\
\x4a\xea\x87\x52\xcd\xd8\x31\xf8\xe2\x70\x74\x2c\xe8\xe8\xe8\x08\
\x36\xdc\x65\xc4\x29\x84\x8e\x58\x79\x2f\x45\x7f\x41\xd3\x11\xc1\
\x88\x28\xe2\x2d\x50\x82\xe0\xc0\xc2\x25\x88\x8d\x00\x87\x6f\xbc\
\x2f\x23\xe0\x4a\x75\x9a\xa5\xe1\xad\xc7\xe1\x39\x97\xfd\x10\xa2\
\x85\x49\x8a\x19\x42\x2c\xe6\x9a\x24\x8d\x81\x32\x4c\xa3\x09\xc1\
\x13\x10\x1c\x1d\x82\xe6\xe8\xbb\x42\x55\xa0\x9f\x20\x18\x35\x84\
\xd1\xdb\x21\x72\x5a\xc4\x50\x8c\x31\x1d\x0b\xbc\x25\x58\x2c\x68\
\x9a\xdd\xf1\xc4\x2a\x93\x52\x38\x19\x4a\xf5\xb5\x34\xd2\x57\xe8\
\x89\x05\x8b\x14\xb8\x84\x12\xe9\x59\xe2\x08\x33\xf4\x90\x39\x04\
\x3d\xbe\x2a\xb4\xef\x78\xd1\x52\xea\x80\x1e\xa5\xa7\x63\x81\x72\
\xc4\x33\x3c\x4d\xcf\x01\x70\x80\xf0\x73\x43\xf2\x82\x51\x49\xb8\
\xa0\xa9\xfb\x51\x1e\x42\x2d\xa6\x91\xc8\x63\x3c\x68\x0c\xd2\x4c\
\x4f\x25\xe6\xa4\x46\x04\xf8\x93\x5f\x32\xd6\x9c\xbd\xe5\xcd\x45\
\x85\x20\xf9\xbc\xcf\x6f\x16\xfb\xf4\xdf\x6f\xa6\x8d\x3d\x52\x3d\
\x55\xcb\x44\xc6\xb1\x82\xa6\xad\x54\x57\x8b\xae\x98\xa1\xd5\xe8\
\x09\x4d\xff\xcf\xb4\x39\xe3\xff\x59\x68\xab\x07\xb7\x38\xe6\x8c\
\x97\x52\x39\xb6\xca\x01\xc0\x0e\x5b\x33\xb1\x2e\x29\x5b\x5e\x52\
\xdc\x01\x6e\xb3\x7f\x1c\x4d\xd3\xf5\x16\x57\x58\xe4\xa7\x68\x5e\
\xc5\xec\x5f\xcc\x5e\xce\xf7\x1d\xf2\x1e\x9f\x56\x26\x8b\x1c\x00\
\x5c\x24\xe4\xe1\xbd\xed\x58\xa9\x1d\xa5\x9e\x25\x4b\xfa\xa2\x1d\
\x25\xcf\x4e\xb3\x58\x54\x85\x86\xed\xb7\x34\xde\xb5\x08\x56\xf5\
\x49\xd6\x55\x19\xa8\xa9\x75\x32\x95\x53\xa9\x5c\x6c\x9a\x42\x4b\
\x91\x3f\xe4\xb3\x82\x17\x53\xa6\xa4\x3a\xa7\xe9\xbb\xc0\xf9\x2c\
\x4b\x4e\xc3\x64\x47\x8b\x28\x0b\xae\xae\xe1\x77\x6a\x99\x47\x50\
\x2e\xd6\xeb\x44\xbe\x64\x58\xbd\x37\x36\xfb\x41\xb4\x61\xeb\x44\
\xbe\x4d\xf4\x0c\x02\xce\x00\x41\x34\x3d\xb3\x08\x98\x20\x38\x89\
\xdf\x43\x0e\xcc\xdd\x69\x92\x39\x13\x04\xad\xbe\x86\xa6\x43\xc1\
\xdf\x98\x90\x70\x15\x41\xd3\x9b\x67\xd9\xcc\x01\xa2\x35\x30\xd0\
\x34\x19\x08\x5c\x3e\x71\xa2\x63\x21\x1f\x9b\x64\x59\x3b\xf2\xd3\
\x1c\x4c\x42\xa4\x05\x82\x75\x16\x89\xa1\xcd\x8c\x94\x99\x1c\x70\
\x06\x08\xe2\x71\x85\x76\xfc\x32\x77\x52\x23\xf5\xb1\xa5\xf9\x3e\
\x88\x6b\xfa\x5d\xe6\xa0\x3e\x5d\x34\x85\xc6\xa9\x11\xd4\xeb\x42\
\x81\xe0\x29\x36\xb8\x93\x07\x1f\xcc\x2a\x58\xbd\x53\x04\x73\x6f\
\x83\x27\x39\xe4\xc7\x3a\xc9\x8f\xf3\x0e\xf0\xbe\x0d\x3e\xe2\xeb\
\x22\xa2\x65\x89\xd5\xa5\x35\xb6\x8a\xb2\x4d\x87\x12\x79\x95\x6b\
\xc0\x1b\x35\x82\x60\xf7\x34\xd4\xf1\xfc\x09\x23\x5f\xb6\x69\xdb\
\xf5\xb5\x81\x1f\x78\x93\x0d\xbe\xe4\xda\x29\x57\x22\x05\xae\xb3\
\xc7\x01\x77\xdb\x1c\xec\xe6\xb3\xf1\xe9\x76\x03\xf8\x95\xdd\xe9\
\x5a\xc4\x57\x5c\x54\xe0\x2e\x7d\x55\x0f\x30\xa6\xb7\x8e\x78\xfb\
\x66\x90\xfe\x32\xfe\x84\x5b\x49\xb1\x28\xe2\x09\x5b\xdb\x1b\xaf\
\xe8\x26\x4e\x1d\x4e\xd2\x57\x86\x53\x27\x1e\xe7\xbc\x0f\xea\xc4\
\x21\x38\x6b\x95\x28\x91\xe8\x48\xad\x44\xb1\x96\xe8\x34\x3d\xfb\
\xbf\xff\xf9\xe0\x9b\x6f\xb9\xc7\x52\xc6\x4f\x4f\xfb\x7c\x49\xdf\
\x49\x0e\xdf\xfc\x1c\x01\x87\xb3\x5d\x29\xda\x89\x2b\x9d\xc3\xfa\
\xe6\x84\x16\x89\x6a\x8a\xff\x05\x00\x96\x90\x2f\x23\x14\x6d\x71\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x0c\x90\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x06\
\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\
\x0c\x19\x49\x44\x41\x54\x68\xde\xe5\x9a\x6b\x6c\x55\xd7\x95\xc7\
\x7f\x6b\xef\x73\x0c\x18\x9b\x84\x38\x09\xc9\x28\x75\x79\x19\x1b\
\xf3\x34\x24\xbe\xd7\x74\x08\x90\x04\x1a\x51\x4d\xa5\xcc\x4c\x68\
\x34\x55\x49\x8b\x48\x68\x49\x44\x0a\x01\x9c\x50\x18\x84\x52\x25\
\x18\xdb\x98\x47\x2a\x95\x6f\x4d\xfb\xa5\x8a\x46\xaa\x34\x15\x95\
\x46\x30\x31\x10\xc0\xbe\xee\x40\x79\x04\x42\x4c\xc2\xc3\xcd\x43\
\x30\x40\x20\x80\x7d\xaf\xcf\x39\x7b\xcf\x87\x73\x1f\xc7\xc6\xd7\
\x98\x84\x99\xd1\x68\xb6\xb4\x7d\xf6\xb5\xcf\xd9\xe7\xff\x5f\xeb\
\xbf\xd7\xda\x7b\x5d\x8b\xb5\x96\xff\xcb\x4d\xfd\x6f\x03\xf8\x7f\
\x4f\xc0\xb9\x1b\x93\x88\x88\x4c\x99\x1e\xfb\x8e\x55\x4c\x17\x6b\
\x2a\x51\xaa\x52\xa0\x02\xc4\x15\x91\x2e\x51\x74\x8a\x95\x4e\x51\
\x72\x42\x90\x7d\x88\xdd\xfb\xe7\x03\x07\x4e\xda\xbb\xa0\x5f\xf9\
\x26\x73\x4c\x89\xc5\x46\x61\x78\x5e\x44\x9e\x17\x18\x89\x08\x22\
\x82\x84\xac\x10\x01\x41\x7a\x8d\x49\xdf\x23\x97\x44\xec\x6f\x8c\
\x52\x5b\x5b\x9b\x9b\x3f\xfd\x1f\x25\x30\x25\x16\x1b\x25\x56\x6d\
\x13\xe1\x7b\x92\x46\x25\x22\x0c\x2d\x1c\xc2\xc8\x91\xa3\xb8\x6f\
\xf8\xbd\x0c\x2b\x2e\xa6\xa8\xa8\x08\x04\x6e\xde\xbc\xc9\x8d\x1b\
\x37\xf8\xf2\xea\x35\xce\x77\x74\x90\xec\xea\x0a\x49\x85\xde\xf3\
\xad\xc8\xef\x5d\xa3\xeb\x9a\x9b\xff\xed\x83\xff\x56\x02\x0b\x16\
\x2c\xd0\xa7\x3b\x3a\x96\x0b\x6a\x83\x08\x85\x82\x50\x3c\x6c\x18\
\x8f\x4e\xab\x62\xec\x98\x31\x3c\x34\x62\x04\x22\x92\x9e\x19\x24\
\xf7\x1a\x24\xf7\x81\x0b\x17\x2f\x72\xe6\xec\x39\x8e\x1e\x3d\xca\
\xf5\x9b\x37\x90\xf0\xef\x9e\x15\x59\x3f\x67\xc6\x8c\xba\xf5\xeb\
\xd7\x9b\xbb\x4e\xe0\xd1\x99\x33\x4b\xad\x6f\xfe\x20\xc2\x34\x11\
\xa1\x64\xf8\x7d\xc4\x62\x8f\x51\x59\x31\x1e\xad\x75\x16\xad\x44\
\x7f\x4a\xef\x71\x8e\x99\x00\x81\x31\xb4\xb7\xb7\x73\xe8\xf0\x61\
\xbe\xbc\x7a\x2d\x94\x99\xc8\x1e\x1f\x7e\xd4\xfc\xa7\x3f\x0d\x48\
\x56\x03\x22\x10\x8f\xcf\x2a\x0b\x74\xb0\x5b\x44\x4a\x95\x28\xaa\
\xaa\xa6\x32\x23\x16\x43\x3b\x3a\x63\xdf\xdb\x02\xce\xde\xd7\x83\
\x68\xb8\x1e\x82\x20\xe0\x3f\x0e\x1d\xe2\xf8\x07\x27\xb0\x58\x44\
\xe4\xb2\x36\x66\xce\xce\x9d\x3b\x8f\x7f\x63\x02\x33\x66\xcc\x9a\
\x14\x68\xb3\x4b\x44\x8d\x18\x5a\x58\xc8\x53\x4f\x3e\xc1\xc3\x0f\
\x3d\x4c\x04\x6b\x2f\x60\xfd\x00\x16\xe9\x77\x7c\xf1\xe2\x45\xf6\
\xed\x7f\x9f\xae\xae\x24\x22\x72\x41\x39\xc1\xcc\x3f\xfe\xcb\x1f\
\x4f\x7f\x6d\x02\xf1\x39\x73\x1e\x11\x63\x8f\x08\x94\x0c\x2d\x2a\
\xe2\xe9\x79\x73\x19\x56\x5c\x0c\x08\xc6\x04\x5c\xb9\x7c\x99\x92\
\x92\xfb\x23\x9e\xe8\x29\x91\x2c\xc8\xc8\x02\xe8\x3d\xee\xea\xec\
\x64\xf0\xe0\xc1\x28\xad\x11\xe0\xfa\x8d\x1b\xec\xd9\xb7\x97\xae\
\xce\x2e\x10\xe9\x40\xeb\x99\xff\xfa\xee\xbb\x1d\xf9\x30\xe6\x4d\
\x64\x22\x1b\x94\xb2\xfc\x4e\x2b\x55\x32\x6c\xd8\x30\xe6\x3d\xf9\
\x04\x43\x06\x0f\xc1\xf3\x7d\x52\xa9\x24\xad\x07\x0f\x32\xe2\x81\
\x07\x38\x78\x60\x3f\xc9\x64\x0a\xdf\xf7\xf1\x7c\x1f\xdf\xf7\x72\
\x63\x2f\x3d\xf6\xbc\x9e\x63\xdf\xc7\xf7\x7d\x0e\x1f\x3a\xc4\x7f\
\x5e\xb8\xc0\xbe\xbd\x7b\x49\x25\x93\x78\xbe\xc7\x90\xc1\x83\x99\
\x35\xf3\x71\x8a\x8a\x8b\xd0\x4a\x95\x3a\xf0\x87\x05\x0b\x16\xe8\
\x3b\x26\xf0\x9d\x39\x7b\x6b\x1d\xad\x67\x3b\xae\xc3\x8c\x78\x9c\
\x82\x82\x02\x3c\xcf\xa3\x3b\x95\xe2\xcf\x89\x04\x4b\x5e\x7c\x91\
\x45\x8b\x16\xf1\xd3\x25\x4b\x48\xb4\x1c\x24\x99\x4c\x66\x81\x47\
\x41\xfa\xd1\x71\xe4\x77\x47\x8f\x1c\x61\xd2\x84\x09\xac\x5d\xbb\
\x96\x17\x16\x2f\xe6\xc0\xfe\xfd\xa4\x92\x29\x7c\xdf\xa3\xa0\xc0\
\xe5\xb1\xe9\xd3\x71\x5d\x07\xa5\xd5\x34\xab\xf5\xf2\x3b\x92\xd0\
\x9c\xef\x7e\x77\xa2\x31\xe6\xb0\x20\xee\xa4\x89\x13\x19\xf9\xed\
\x52\x00\x8c\x31\x1c\x3b\x72\x84\xa5\x4b\x97\x52\x13\x8f\x67\xef\
\x4f\x24\x12\x6c\x7f\xfb\x6d\xaa\xa6\x4f\xc7\x71\x72\xc9\x3d\x9a\
\xd4\xa2\x7a\x3f\xf5\xe1\x87\x4c\x9d\x3c\x99\xc5\x8b\x17\x67\xef\
\x6d\x69\x6d\x65\xc7\x8e\x1d\x3c\x5a\xfd\x18\x8e\x76\x40\xe0\xb3\
\xcf\x3f\xa7\xbd\xbd\x1d\x11\xe9\x34\x4a\x4f\x7c\xf7\xb7\xbf\x3d\
\xdb\x1b\x6b\xdf\x5b\x09\x4b\xad\x56\xda\xbd\xbf\xe4\x3e\xfe\xe6\
\xe1\x87\xf1\x3c\x0f\x63\x2d\x27\x8e\x1d\xe3\xe5\x97\x5e\xea\x01\
\x1e\x20\x16\x8b\x01\xb0\x75\xdb\x36\xa6\x56\x55\xa1\x1d\xa7\x8f\
\x88\x14\x2e\x8c\xd3\xa7\x3e\x62\x5a\x55\x55\x0f\xf0\x00\x35\xf1\
\x38\x02\xfc\x7a\xc7\xaf\x99\x36\x7d\x3a\x5a\x3b\x3c\x34\x62\x04\
\x97\x2f\x5d\xe2\xea\xb5\x6b\x85\x5a\xd8\x06\xfc\xdd\x6d\x25\x34\
\x67\xfe\xfc\x47\xb4\x56\xcf\x69\xad\x18\x35\x6a\x14\x7e\x10\xba\
\xfd\xca\xa5\x4b\xcc\x99\x3d\x9b\x9a\x9a\x9a\x3e\x39\xc7\x62\x31\
\x96\x2d\x5b\xc6\x5f\x0e\x1f\x26\x95\x4a\xe1\xfb\x01\xbe\xef\x13\
\x64\x64\x13\xf8\x7c\xf4\xe1\x29\xa6\x4e\x9d\x7a\x0b\xf8\x4c\x8b\
\xc7\xe3\xcc\xfc\xdb\x99\x5c\xba\x74\x39\xfb\xde\xd2\xd2\x6f\xa1\
\xb5\x42\x29\xf5\xbd\x85\x2f\x2c\x1c\x75\x5b\x02\x05\xf0\x8a\x52\
\xda\x29\x29\x29\x61\xf0\xa0\x41\x59\x0d\x17\x16\x15\xb1\x6b\xf7\
\x6e\x5a\x5a\x5a\xf2\xc9\x91\x78\x2c\xc6\xcf\x5f\x79\x85\x63\x47\
\x8e\x90\x4a\xa5\x42\xf0\x81\x8f\xef\x07\xb4\x9f\x3a\x45\xd5\x94\
\x29\xbc\xf8\xc2\x0b\x79\x9f\x6f\x6d\x6d\xe5\xbd\x3d\xcd\x14\x0f\
\x2b\x26\xf0\x7d\x82\x20\x60\x50\xc1\x20\x86\x0f\x1f\x8e\xd6\x4a\
\x34\xee\xf3\xfd\x12\x10\x11\x11\xad\x7f\xac\xb5\xe2\xa1\x07\x1f\
\xc4\x0f\x02\x82\x20\x20\xf0\x7d\xac\x31\x8c\x2d\x1f\xc7\xe6\xa6\
\x26\x5a\x5b\x5b\xf3\x82\x88\xc5\x62\xac\x58\xbe\x9c\xe3\xc7\x8e\
\x92\xf2\xba\x09\x82\x80\x4f\x4e\x9f\x66\x5a\xd5\x34\x96\x2c\x59\
\x92\xf7\xb9\x44\x22\xc1\xb6\xed\xdb\x98\x34\x79\x32\xd6\x5a\x82\
\xc0\x84\xef\x36\x01\x0f\xdc\xff\x00\x4a\x29\x94\x92\xe7\x25\x1a\
\x87\xe9\xb5\x88\x9f\xfe\xfe\xf7\x27\x68\x25\x1f\xb8\x8e\x4b\x79\
\x79\x39\x3d\x92\x69\xb8\x83\xc4\x58\xc3\xc7\x1f\xb5\xb3\xf2\xd5\
\x57\x89\xf7\x5a\x0b\x3d\x00\xb5\xb5\xd1\xd8\xd8\x48\xe1\xd0\x42\
\xaa\x1f\x7d\x8c\xa5\x4b\x97\xf6\x6b\xf9\x2d\x5b\xb7\x32\x71\xf2\
\x24\xb4\x56\x91\x85\x9f\x0b\x02\x67\xcf\x9d\xc1\x04\x06\x51\xcc\
\xdc\xf1\xab\x1d\xfb\xfb\xf4\x40\x81\x52\xb3\x94\xd2\x14\x15\x17\
\xe5\xdc\x1f\x04\x04\x7e\x90\xfd\x6c\x8d\x61\xf4\xd8\x31\x6c\x6a\
\xa8\xa7\xa5\x3f\x4f\x54\x57\xb3\xf2\xd5\x57\xa9\x89\xc5\x6f\x0b\
\x7e\x73\x53\x13\x15\x95\x15\x60\x43\xab\xfb\x81\x21\xf0\x03\x4c\
\xda\x03\x26\xf0\x29\x1a\x5a\x84\xd2\x0a\x51\xce\xf4\xbc\x12\xc2\
\xd1\x8f\x87\x16\x10\x3c\xdf\x0f\x81\x07\x3e\xbe\xc9\x4d\x1a\x04\
\x01\xc6\x18\x46\x8d\x1e\x4d\x7d\x7d\x3d\xad\x89\xfc\x24\xaa\xab\
\xab\x6f\x2b\x9b\xc6\xa6\xcd\x94\x8f\x2f\x07\xc0\x37\x21\x81\x0c\
\xf0\x10\x7c\x80\x1f\x04\x00\x68\xa5\x70\x94\x54\xe6\x25\xe0\x28\
\x3d\x41\x2b\x45\x67\x57\x27\x17\x2f\x5e\x20\xd5\x9d\x0a\x27\xf4\
\xfd\x34\x91\xb4\x47\x8c\xc1\x58\x4b\xe9\xc8\x91\xd4\xd5\x6d\x22\
\x91\x48\x70\xa7\x2d\x91\x48\x50\xdf\x50\x4f\xd9\xb8\x32\x80\x2c\
\xd8\x9c\xd5\x0d\x26\x08\xe8\xf6\x3c\xae\x5c\xb9\x42\x2a\x95\x44\
\x69\x8d\x52\x2a\x3f\x01\xa5\x55\xa1\x52\x0a\xad\x14\xc6\x18\xae\
\x5c\xb9\x42\xe7\xcd\x9b\xf8\x41\xe8\x0d\xe3\x87\xee\x0c\x7c\x1f\
\x13\x04\x58\x0c\x8f\x7c\xbb\x94\x37\x37\xbe\x75\x47\x24\xda\xda\
\xda\xa8\xab\xdf\xc4\x98\xb2\xb1\x58\x6c\xda\xea\x26\x0d\x3e\x73\
\xf5\xe9\xec\xea\xe2\xda\xb5\xab\x18\x13\x90\xc1\xa5\xb5\xae\xc8\
\x4b\x40\x6b\x55\xa8\xb5\x46\x85\x71\x17\xa5\x14\x9d\xc9\x2e\xbe\
\xfa\xea\x3a\x5d\xc9\xce\x74\x3c\x37\xa1\x7b\xd3\x11\x0a\x2c\xdf\
\x2a\x4d\x93\x68\x6b\x1b\x10\xf8\x8d\x75\x1b\x19\x35\x7a\x24\x16\
\x1b\x02\x37\x39\xc9\x04\x81\x4f\x57\x2a\xc9\xf5\xeb\xd7\x49\xa5\
\x92\x88\x92\x10\xbc\xd6\x99\x7c\xe0\xf6\x50\x4d\x2f\x02\x43\xb0\
\xe9\x73\x6d\x3a\x7b\x86\x87\x0c\xf0\xbc\x30\xb1\x38\x5a\x13\x92\
\xd4\x28\x95\x8b\x18\x5a\x2b\x8e\x1e\x3d\x4a\xac\xba\xfa\xb6\x24\
\xac\x0d\x81\x8b\x04\x88\x08\x99\x48\x68\x8c\xc1\x18\x83\x48\x08\
\x3a\x9c\x3a\x8a\x45\x10\xa1\x2b\xbf\x84\x44\x7b\x5a\x87\xae\x0a\
\x59\x2b\xb4\xd2\x91\xb1\xc2\x12\x9e\xa4\x7c\xcf\xa3\xbb\xbb\x1b\
\xcf\xf7\xf8\xeb\xf9\x0e\xe6\x3f\x3d\xbf\xdf\x24\x95\x69\xd5\xd5\
\xd5\xbc\xfe\xda\xeb\x9c\xf9\xe4\x0c\xdd\x5e\x37\x9e\xe7\xa5\x03\
\x43\x7a\xa1\x6a\xdd\xf3\xfd\x5a\xa1\x94\x26\x82\xab\xb3\x1f\x09\
\xc9\x29\xa5\x14\x4a\xab\xdc\x44\x3a\x07\x5e\xe9\xc8\x44\x5a\xa1\
\x44\xf8\xe2\xb3\x2f\xf8\xc7\xbf\xff\x87\xbc\xdb\x83\x7c\x24\xd6\
\xbc\xbe\x86\x8e\x73\x1d\x88\x90\x96\x6b\x56\x22\x91\x77\xf6\x34\
\x9e\x0a\x7f\xd7\x0f\x01\xa5\x4f\x46\x2d\xa0\xb2\x63\x9d\x7e\x38\
\x3d\x69\x7a\xf2\x2f\x3e\xfb\x9c\x1f\xfe\xd3\x0f\x59\xb8\x70\xe1\
\x80\xc1\x67\x5a\x2c\x16\xe3\xb5\xd5\xaf\x71\xfe\x6c\x48\xa2\x2f\
\xaf\xab\x88\x1a\xb2\x58\x1c\x75\x22\xbf\x84\xb4\x3e\xa9\x23\x37\
\xeb\xc8\x24\x19\xe0\x4a\x85\x96\xff\xec\xaf\x9f\xb2\xe8\x27\x8b\
\x78\xee\x07\x3f\xc8\x0b\xb2\xad\xad\x8d\xb6\x7e\x16\x76\x3c\x1e\
\x67\xd5\xaa\x55\x9c\xf9\xf8\x2c\x08\x3d\xbd\xae\x22\x5e\x8f\x8c\
\x1d\xa5\xf6\xf5\x27\xa1\x43\x2a\x2a\x91\xb4\x35\xb4\xce\x4d\x2a\
\x02\x9f\x76\x7c\xca\x92\x17\x97\xf0\xcc\x33\xcf\xf4\x0b\xfe\xad\
\x8d\x6f\xb1\xb1\xee\xad\x7e\x49\xd4\xc4\x6b\x58\xbd\x6a\x35\x9f\
\x9c\xfe\x24\x63\xc4\x4c\xb8\xcc\x11\x89\x8c\x15\xec\x8d\x3e\xdf\
\x63\x2f\x24\x22\xf2\xb3\x97\x7e\x76\x06\x25\x23\x25\x53\xcb\xc9\
\x15\xa0\xb0\xd6\x72\xfe\xdc\x79\x5e\x5e\xfa\x32\xf3\xe6\xcd\xcb\
\x0b\x2a\x91\x48\x50\xb7\x69\x23\xa3\xc7\x8e\x06\x84\x33\x1f\x9f\
\xa1\x76\x75\x6d\xf6\xdc\xd0\x57\x6b\x6d\x6d\xa5\x71\x73\x23\xe3\
\x2b\xcb\xc3\x2d\x03\x7d\x44\x43\xe4\xd2\x2f\xd6\xfc\xf3\x83\x40\
\x16\x74\x0f\x0f\x58\x6b\xad\x76\xf5\x3b\x3a\xad\xc5\xa8\x35\x00\
\xce\x9f\x3b\xcf\x8a\x9f\xaf\xe8\x17\x7c\x6b\xa2\x95\x4d\xf5\x75\
\x94\x8d\x2b\xc3\x75\x5d\x0a\x0a\x5c\xca\x2b\xca\xa8\x6f\xa8\xef\
\x37\xd9\xc5\xe3\x71\x56\x2c\x5f\xc1\x47\xa7\xda\x11\x24\xeb\xf5\
\xcc\xba\x08\xd7\xa1\xfc\x26\x0a\xfe\x16\x02\x00\x38\xbc\xa3\xb5\
\xb6\xb9\xcc\x17\x4e\x92\x4c\x26\x99\xff\xf4\x7c\x66\xcf\x9e\xdd\
\x8f\x15\x5b\x68\xa8\xaf\x67\x5c\x79\x19\x8e\xeb\x64\xb5\xeb\xb8\
\x2e\x15\xe3\xc7\xd1\xd0\xd8\xd0\xef\x56\xbc\xa6\xa6\x86\xb9\x4f\
\xcd\xe3\xc6\x8d\x1b\xd1\xcc\x9b\x21\xe1\x6b\x19\xb4\xb5\xf7\x33\
\xb7\x10\xd8\x5e\xbf\xfd\xac\x12\xb5\x33\x1b\xd2\xd2\x6b\xa0\xb8\
\xb8\x98\x5d\xbb\x77\x71\xe0\xe0\x81\xbc\x12\x68\x68\x6c\x60\xdc\
\xf8\x71\x59\xf0\x2a\xa2\x5d\xd7\x75\x19\x3f\xa1\x82\xcd\x4d\x8d\
\x79\x49\xb4\xb4\xb4\xf0\x5e\xf3\xbf\x33\x7c\xf8\xbd\x59\xe0\x92\
\x2e\x18\x03\xbf\x5f\xbd\x7a\xf5\x2d\xd5\xba\x3e\xab\x12\x5a\xf4\
\x32\xa5\x74\x67\x34\x91\xb8\xae\xc3\xd8\xb2\x31\x34\x6d\x69\xe2\
\xe0\xc1\x83\x7d\xe8\xb7\x81\x8a\xf1\x15\xb8\x8e\x13\x09\xb5\x3d\
\xa3\x57\x81\xeb\x30\x61\x52\x25\x5b\xb6\xde\x7a\x28\x6a\x69\x69\
\xe1\xed\x5f\x6d\x67\xd2\xe4\x09\x59\xc9\x46\x9a\x17\x08\x75\x7d\
\x61\xed\x93\x40\x7d\x7d\xfd\x59\xad\xf4\xfa\xde\x89\xc4\x71\x1d\
\xc6\x95\x97\xb1\x65\x6b\x8e\x44\x4b\x4b\x0b\x9b\x9b\x1a\x19\x3f\
\xa1\x02\xd7\x75\x22\x11\x2b\x97\x98\x74\x44\xc7\x05\xae\xcb\xe4\
\x29\x13\xd9\xba\x6d\x0b\xad\xad\x2d\x3d\xc0\x4f\x9c\x54\x99\xdd\
\x42\x44\x9b\x58\xd6\xaf\x59\xb9\xa6\xcf\xca\x75\xde\xca\xdc\x82\
\x05\x0b\x74\x59\xf9\xd8\xb6\x4c\x31\x37\x1a\x8d\x8c\x35\xb4\x9f\
\x3a\xcd\xdc\xa7\xe6\xb2\x6b\xf7\x2e\xc6\x57\x56\xa0\xb5\x4a\xdf\
\x23\xa8\xf4\x06\x2c\xe3\xfe\xe8\x7e\x27\xba\xef\x39\x7e\xec\x04\
\x4f\x3c\xf1\x24\xcd\xcd\xef\xe5\x05\x0f\xb2\x27\xd9\xd9\xfd\x64\
\xbe\x8a\x75\xbf\xa5\xc5\x0d\x1b\x5e\x2b\x0d\x70\xdf\x17\xa1\xb4\
\xd7\x97\x13\x58\x6b\xf8\xea\xab\xeb\xdc\x73\xcf\x30\x1c\xc7\x41\
\x6b\x27\xdc\xe8\x39\x3a\xbb\x76\x94\x0a\x8f\x85\x60\xb1\x16\xac\
\x35\x98\xc0\x84\x5b\x66\x13\xe0\x79\x1e\x57\xbf\xbc\xca\x3d\xf7\
\xde\x93\x07\x3c\x97\xc5\x3a\x53\xfb\xd2\xfe\x80\x08\x00\xfc\xf2\
\x97\xeb\xca\x0c\xce\xfb\x88\x1d\xd1\x33\x37\x84\x63\xed\x38\x38\
\xda\xc1\x75\x9d\x2c\x91\x6c\xd8\x4b\xef\x56\x2d\x99\x9d\x66\x90\
\x3e\xac\x87\x15\x07\x3f\x5d\x79\x08\xd2\x27\xae\x5e\xed\x02\x26\
\x98\x5b\x5b\xbb\xae\xdf\x0a\xf5\x80\xca\xeb\x6f\xbc\xf1\xc6\x24\
\xe5\xd8\x66\x11\x29\xc9\x6d\xb1\x43\x69\xb8\xae\x8b\x93\x26\xa1\
\x9d\x90\x44\xf8\x59\xa3\xd3\x0b\x3a\x30\x99\x33\x75\x90\x2d\x2f\
\xe6\x4a\x2e\x61\x29\xb2\x57\xeb\x50\x98\xa7\x56\xad\x5a\x7b\xfa\
\x76\xd8\x06\xf4\x2d\xe5\xba\x75\xeb\x8e\xbb\xda\x4e\x55\x5a\xed\
\xc9\x2c\xcc\x4c\x75\xc3\x5a\x8b\x35\x16\x63\x0d\x36\x6b\xe5\xf0\
\x1c\xeb\x79\x1e\xa9\xee\x54\x58\x2b\x0d\x82\xec\xb6\xd9\x1a\x93\
\xbe\xdf\xf6\x65\xfd\xc3\x18\x3d\x73\x20\xe0\x07\xec\x81\x4c\xdb\
\xb0\x61\x83\x1a\x32\xc4\xad\xb5\xc2\x06\x20\x7b\x32\x0a\x23\x95\
\x83\xe3\xe8\x30\x6f\x28\x85\x64\x17\x31\x69\xfd\x5b\xac\x09\xf5\
\x1f\xca\x27\x2c\x98\x45\x5a\x27\xc8\xfa\xd1\x23\xc7\x36\x3d\xfb\
\xec\xb3\xc1\x40\x31\x7d\xad\x2f\xf9\xde\x6c\x78\x73\xa2\xb6\xd4\
\x02\xcf\xd1\xeb\x54\x17\x8d\x3c\x39\x2f\x41\xb8\x90\x2d\xc6\xdc\
\x12\x4c\xac\xc5\xee\x74\xc4\x2e\x5b\xb9\x72\xed\xd9\x01\xbc\xfe\
\x9b\x13\xc8\xb4\x4d\x9b\x36\x3d\x62\xc5\x7b\x05\xe4\xc7\xc0\xfd\
\x77\xf8\xf8\x39\x6b\x79\xc7\x51\xe6\x9d\xaf\x03\xfc\xae\x10\x88\
\xce\x53\x57\xf7\x46\xa5\x88\x9a\x85\xa8\xc7\xc1\x4e\xb0\x50\x08\
\xb6\x10\x64\x08\xe0\x21\x9c\x12\xcb\x49\x0b\x27\x31\xf6\x50\x6d\
\xed\x2f\x0e\xd0\x6b\x63\x76\x57\x08\xa4\xdd\xee\x00\xfa\x36\x5d\
\xa5\x7b\x66\x2c\x91\x6b\xa6\xf7\x6e\x36\xd2\x4d\xe4\x1a\xf4\x71\
\xcd\xd7\xfd\x1e\x47\x00\x6b\x2d\x22\xa2\xd3\xa0\xdd\xf4\xf5\x6e\
\x75\x0d\xd9\x54\xe0\x67\x00\xdc\x85\xee\x01\xdd\x80\x9f\xcf\x03\
\xaa\x8f\x1e\xb5\x7c\x5f\x1e\x71\xfa\xb8\x27\x13\xa6\xcd\x1d\x58\
\xb8\xb7\x37\x7a\x8c\x7b\xff\x7f\xc5\x7f\x01\x62\xb8\xee\x1f\x83\
\x08\xbe\x0d\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\x91\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x06\
\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\
\x06\x1a\x49\x44\x41\x54\x68\xde\xe5\x59\x3b\x8c\x1c\x45\x10\x7d\
\xaf\xba\x7b\x96\x33\xc7\x49\x18\x23\x40\x20\x10\x3f\x41\x40\x06\
\x96\x48\x2d\x24\x3e\xce\x40\x22\x86\xc8\x11\x01\x01\x82\x8c\x80\
\x90\x00\xf1\x11\x89\x13\x90\x90\x20\x85\x80\x00\x10\x21\x12\xc8\
\x29\x19\x60\x8b\x93\x41\x48\xb6\xc5\xed\xde\xf9\xce\xbb\x33\x5d\
\x45\xb0\x33\xbb\xb3\x33\x3d\xbb\xb3\xe6\xfc\x41\x94\x74\x37\x33\
\x35\xbd\xd3\xef\x55\xbd\xae\xee\xe9\xa1\x99\xe1\xbf\x6c\x72\xa3\
\x01\xfc\xef\x09\xf8\xbe\x0d\x49\xf2\x3a\x12\x56\xeb\xa9\xed\x5e\
\x04\x48\xca\xa9\x53\xa7\xee\x3b\x7e\xfc\xf8\x23\x79\x9e\x0b\x00\
\xa8\x2a\x0f\x1b\xb5\x88\x58\x08\x41\xcf\x9c\x39\xf3\x2b\xc9\xf3\
\x66\xa6\x87\x42\x00\xc0\xc6\x89\x13\x27\x5e\x39\x79\xf2\xe4\x3b\
\x25\xa1\x8a\xd8\xec\xbc\x46\x36\x79\x6c\x9e\xa7\xae\x2b\xdb\xda\
\xda\x7a\xfb\xf4\xe9\xd3\xef\x01\xb8\x7c\x58\x04\x98\xe7\xf9\x86\
\xaa\x76\x76\xda\xe5\xef\x7c\xe0\x92\xf6\xe3\xf1\x78\x03\x40\xaf\
\x07\xb6\x08\x7c\xf0\xcd\xce\x27\x47\x06\xf6\x4c\xfd\xde\x87\x5f\
\x6f\xb3\x98\x6c\x6f\xd6\x23\xbe\xec\x7c\xd9\xf1\x6a\x09\xf7\x22\
\xf0\xce\x57\x3b\x2f\x3c\x7e\x97\xbe\xea\xdc\x62\xa3\x03\x1a\x9c\
\x11\xd7\x8b\x80\x99\xf1\xaa\x32\x50\x98\xde\xab\x30\x68\x5c\x6c\
\x94\x47\x03\xcd\x5a\x80\xaf\x96\xc0\x61\x45\xbf\x45\x40\x15\x28\
\x62\xbb\x7a\xc5\x68\x50\x9d\x77\x9e\x02\x7f\x98\x04\xc6\xb7\x3e\
\xf8\xda\xbb\x5f\x9e\x7b\xf5\xa3\xef\x2e\x2d\x80\x19\xe7\x38\x7b\
\xee\xf2\x1d\xcf\x7f\xfc\x32\xf6\x3a\x09\xe4\x09\x02\x79\x34\xf8\
\x32\x03\xab\xa2\xbf\xae\x94\x9a\x46\x12\xc1\x73\x73\x73\x60\x9b\
\xb7\x0c\x16\xab\xe8\xe6\x00\xf7\xec\xec\x5f\x7c\x11\x38\xf6\x59\
\x9a\x00\x34\x99\x81\xa2\xcc\x40\x9f\xe8\xf7\x05\xde\x35\x4f\x99\
\xd9\x4c\x09\x49\x35\x18\xb3\xfa\xf5\xe2\x18\x58\x92\x01\xad\x65\
\xa0\x09\xbe\x0f\x81\x3a\xe8\xe6\xb1\x69\x6a\x86\x3c\x1a\x5c\x02\
\xcb\x44\x35\x74\x12\xe8\x92\x50\xa1\x06\xad\xb9\xd7\x91\x52\x05\
\xb4\xfa\xeb\x47\x60\xda\x67\x0a\x4b\x73\x01\xb0\x48\x20\x62\x89\
\x84\xda\x63\x60\xd5\x40\xae\x03\xaf\x83\xae\x03\x4f\x91\x50\x35\
\x14\x48\x67\x40\x1b\x8b\x8b\x05\x02\x11\xda\x2d\xa1\x15\x91\xef\
\x8a\xba\x96\x3d\xae\x93\x05\xb5\x69\x9f\x92\xc0\x62\xc6\x05\x67\
\x5b\x42\x45\x7b\xfd\x94\x17\x8a\x28\xd6\x1a\x88\x29\xf0\xcd\xa8\
\xa7\xfe\x56\x11\x88\x6a\xc8\x55\x21\x09\x2c\x31\x5a\x37\x01\x5b\
\x36\x0f\xc0\x92\xa0\x97\x81\x57\xd5\xab\x92\x91\xaa\x21\x76\x54\
\xa1\xa5\x12\x2a\xb4\x4b\x42\x80\x0a\x3a\x81\x37\xc1\xa7\x80\xaf\
\x93\x85\xa9\x84\x00\xa6\x82\xb9\x50\x4e\x7a\xce\xc4\xd3\x2a\x94\
\xae\x18\xcd\xe8\xd7\xc1\x2f\x23\xd2\x04\x5e\x9d\x93\x84\x9a\xa1\
\xd0\x8e\x31\xd0\x58\xe6\x34\x06\xf1\x92\x79\x40\xbb\x33\x50\x01\
\xed\x02\xdf\x24\x92\x92\x51\x9d\x48\x55\xce\x53\x19\x68\x8e\x8a\
\x05\x02\x79\xa1\x83\x03\x26\x08\x14\xc4\x65\xef\xf1\xe7\x4e\xd1\
\xaa\x3e\xab\xa4\x53\xf7\x35\xa5\x96\x92\x0f\x00\x5c\x8e\x1e\x57\
\x0a\x22\xe6\xed\x7b\xe3\xa8\x0b\x33\x31\x9f\x7c\xeb\xa7\xe7\x40\
\xbe\x44\x23\x9d\xc3\x43\xde\xf3\x99\x76\xda\x22\x8e\xb8\x09\x1e\
\xbc\xf7\xe8\xfc\x87\xd5\xbd\x29\x8a\xd9\xb1\xed\x9b\xfe\x33\x58\
\x79\x5e\x1d\x2a\x02\x2d\x8c\xf8\xe3\xc2\x1e\xf6\x63\x06\x36\xd7\
\xf5\x00\x8a\xc2\xbe\x8f\x11\x67\x8d\x66\x00\xbe\xe4\x9b\x9f\xff\
\x7e\xfe\xa9\x87\x6e\xdb\x2a\xbb\x71\xaa\x38\xd2\x22\x00\x83\xc0\
\x90\x05\xb7\x00\x7e\x0e\x23\x61\xb6\xda\xd9\xf5\xdb\xbc\x50\x28\
\x08\x26\x5e\x09\x44\xb0\x4f\x30\x02\xc0\x99\xdf\x76\x47\xfe\xec\
\x85\x38\x3e\xbf\xb3\x5b\x2d\x4f\x33\xc2\xda\x04\xcc\xe0\x45\xb1\
\xb9\x31\x48\x82\xb2\xf9\x69\x0b\x9e\xb5\xfc\x2b\xa9\x63\x7f\x5c\
\xa0\x30\x49\x12\x30\xf0\x00\xc0\x04\x00\x26\x79\x1c\xfb\xcc\x87\
\x51\xe6\xdd\xc6\xb2\x07\x9b\x19\x3c\x15\xc1\x27\x5e\xa1\x1b\xa0\
\x6d\x2d\x7f\xda\x82\x23\x68\xd2\xb1\xf4\xae\xf9\x4c\x46\x3e\xcb\
\xb2\x61\xe6\xdd\x5d\xb3\x8e\x3a\x2a\x83\x17\x45\xe6\xb3\xe6\x9d\
\x35\x01\x5b\x1b\xbf\xb5\x2f\x25\x07\x32\x3a\x20\x41\x60\x4a\xaa\
\xf4\x33\x0e\x7d\xe6\xfd\x28\x0b\xbe\x02\xca\xd4\x56\x8c\x99\x21\
\x88\x21\x04\xdf\xd1\xb9\xa5\xd4\xd3\xbe\x63\x35\x9f\xd5\xee\xd9\
\x1c\xd3\xfe\xb8\x80\x0f\x83\x25\xbb\x1f\xc2\xf9\x3d\x8e\x7c\x96\
\xf9\x61\xe6\xe7\x04\x54\x53\x7b\x49\x53\x09\x65\x35\x09\xd5\x2b\
\xc9\x22\xb8\xd6\x45\xad\xd5\xbc\x74\xc6\x18\x21\xe2\x40\x0a\xc8\
\xe9\x5c\x72\x30\x8e\x10\x97\xc1\xb1\x7b\x03\x50\xa4\x46\x80\x18\
\xfa\x41\x70\xa3\xe0\xca\x09\xc9\xc0\xe4\x86\x9b\x19\xbc\x10\xc1\
\x37\xea\x8f\x31\x35\x62\x12\xfe\x2a\xe2\xd5\xac\x0d\x78\x71\x08\
\x21\x40\x55\xa1\x1a\x71\x25\x57\x50\x04\x19\x25\x29\x9d\x39\x01\
\x52\x66\x73\x91\x1b\xf9\xcc\xcb\xd0\x97\xe5\x56\x15\x88\x89\x89\
\x0c\x06\x04\x01\x2a\x05\x25\x57\x15\x86\xa4\xbe\x53\x4d\xab\x77\
\x0b\x11\x05\x4c\x91\x6b\x84\x92\x08\x41\xe6\x3f\xec\x30\x27\x80\
\x94\xcd\x48\x19\xfa\xe0\x31\x9c\x65\x40\x8d\x45\x4c\xb3\x0f\x32\
\xad\x0e\xf3\x0e\x98\x28\x91\xa5\x3f\xe5\x9b\xb5\x35\x44\x00\xde\
\xcb\xf4\xc5\xc5\x22\x14\x9c\xcd\x31\xab\xcc\x3b\x50\xa4\xca\x80\
\x0d\xfd\x20\xc8\x6e\x95\x81\xa8\x24\x25\x25\xa1\x92\x40\x60\x32\
\x40\xf3\x59\x98\x09\x7f\xa3\xad\x01\xce\x11\x24\x50\xc4\x88\x08\
\x22\x0b\xcb\x65\xb3\x48\x80\x74\x25\x46\x11\xee\xfa\x81\xe7\x4e\
\x35\x63\x17\x31\x3d\xfb\xc1\x80\xe0\x80\x6c\x16\x24\x76\xd6\xf5\
\x45\xbf\xb5\xda\x1a\x00\x33\xc1\x78\x12\xa1\x10\x64\xa1\xf7\x26\
\x5c\x45\x00\xbe\x54\x82\x23\x76\xfc\x20\xd8\x5f\x65\x4e\x6c\x3a\
\xbc\xbb\x25\xe4\xeb\x83\xb8\x55\x2a\x99\xf0\x23\xe9\x3b\x18\x47\
\x44\x48\xf9\xbc\xf5\x76\xe9\x82\x27\x4b\x02\xf4\xa2\x7f\x79\x9f\
\xf9\x73\x0e\x26\x00\x22\xbb\xf2\x68\x04\x61\xc8\xf3\x62\xc9\xa3\
\xfb\x7d\x6b\xcb\x0b\x83\x42\xe6\x72\x5c\xd3\x82\x23\xcb\x6a\x28\
\x11\xfe\x9c\xdf\x7b\x8c\xdb\x77\xfe\x36\x0b\x04\xbb\x26\x10\x83\
\x43\xb1\xf4\xa3\x49\x3f\x40\xf4\x44\xb6\x66\xd4\xeb\xe6\x1d\x98\
\x05\x02\x06\x5c\x78\x98\xdb\xfe\x8d\xa3\x12\xbf\xd8\xd6\x3d\x03\
\x36\xd4\xba\x25\xb4\x0e\xc8\x6b\x69\xde\x4f\x33\x40\x60\xef\x8d\
\xa3\x12\x3d\x00\x64\x1e\x43\x03\x36\x62\x34\x61\xef\xaf\x66\x37\
\xc6\x9c\x98\x64\x1e\x20\x30\x04\xca\x37\xb2\x81\xc7\x48\xc1\xbb\
\xf3\x02\x4e\x6e\x7c\x90\x97\x9a\x08\x5c\xe6\x09\x81\x8d\x66\x04\
\x26\xca\xd1\x95\xc2\xb8\x77\x10\x5d\x8f\xef\x6a\x37\xd4\x48\x71\
\x26\x9e\xb7\x78\x4e\x09\x90\xe4\xa7\x3f\x5c\xbc\xb4\xab\x83\xad\
\xbd\x83\xc9\x7f\xe1\xbb\xb1\x4c\x62\xdc\xba\x4d\xc6\x97\xc8\x63\
\x24\x00\x07\xe0\xd1\x27\x9e\x7e\xf6\xf5\xcd\xdb\xef\xbc\xcf\x60\
\x02\xeb\x57\x12\xaf\xbb\x91\x20\xa8\x7b\x7f\x5f\x38\xff\xf3\x8f\
\xdf\xbe\x0f\xe0\x17\xda\x74\xdb\x3c\x03\xf0\x00\x80\xfb\x01\x1c\
\x03\x10\xfe\x4d\x3f\xd7\xd0\x72\x00\x17\x01\x6c\x03\xf8\xdd\xcc\
\x26\xac\x6d\x28\x09\xa6\x63\xe2\x66\x97\x91\x02\x28\xaa\x8f\xe0\
\xb4\x9b\x55\x2e\x3d\xed\x66\x8f\xf6\x4a\xfb\x07\x22\xa6\x3f\x63\
\xc5\x71\x10\x36\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\
\x00\x00\x40\xc2\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\xc0\x00\x00\x00\xc0\x08\x06\x00\x00\x00\x52\xdc\x6c\x07\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x06\
\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\
\x40\x4b\x49\x44\x41\x54\x78\xda\xed\x9d\x77\xb8\x24\x47\x79\xee\
\x7f\x55\xdd\x33\x27\xee\x59\x6d\xd4\x06\x49\x2b\xad\xa4\x55\x0e\
\x28\x60\x10\x22\x98\x1c\x24\x81\x08\x22\xc7\x07\x38\x18\x5f\x5f\
\x63\x0c\x18\x13\x2c\x21\x4c\x10\x19\x19\x5d\x7c\x64\x9b\x2b\xae\
\xb9\xb6\x89\x02\x0b\x8c\x81\x4b\x14\x92\x76\x41\x11\xa1\x1c\x56\
\x69\x57\xbb\xab\xcd\x67\x4f\x9a\xe9\xae\xef\xfe\xd1\xa9\xba\xa7\
\x7b\x66\x4e\x98\x99\xb3\xbb\xf3\x3e\x4f\x3f\xdd\x5d\xd3\xd3\xa1\
\xea\x7b\xeb\x0b\x95\x94\x88\xd0\x45\x17\x07\x2b\x74\xa7\x5f\xa0\
\x8b\x2e\x3a\x89\x2e\x01\xba\x38\xa8\xd1\x25\x40\x17\x07\x35\xba\
\x04\xe8\xe2\xa0\x46\x97\x00\x5d\x1c\xd4\xe8\x12\xa0\x8b\x83\x1a\
\x5d\x02\x74\x71\x50\xa3\x4b\x80\x2e\x0e\x6a\x74\x09\xd0\xc5\x41\
\x0d\xb7\xd3\x2f\x70\x20\x43\x0f\x97\x7b\x81\xa3\x80\x21\x60\x41\
\xce\x36\x08\x54\x81\x31\x60\x5f\xb8\x8f\xb6\xed\xc0\x03\x66\xa4\
\xb2\xbb\xd3\xdf\x71\x20\x43\x75\xbb\x42\xcc\x1e\x7a\xb8\x3c\x04\
\x9c\x10\x6e\x27\x5a\xfb\x23\x99\xbd\x96\xdd\x01\x3c\x00\xdc\x1f\
\xee\xef\x01\xd6\x9b\x91\xca\x03\x9d\xfe\xee\x03\x01\x5d\x02\xcc\
\x00\x7a\xb8\xbc\x10\x78\x26\xf0\x9c\x70\x3b\xa9\xe9\x3f\xab\x26\
\xaf\x6b\x5c\x2c\x5b\x81\xeb\x80\xeb\xc3\xfd\xcd\x66\xa4\x52\xe9\
\x74\xde\xec\x6f\xe8\x12\xa0\x09\xe8\xe1\xb2\x43\x5a\xe0\xcf\x02\
\x9c\xc2\x3f\xa8\x82\xe3\x7a\xd7\x45\x28\x2a\x0e\x69\x78\xdd\x38\
\xf0\x5f\xc0\x77\x80\x1f\x99\x91\xca\xbe\x0e\x67\xdb\x7e\x81\x2e\
\x01\xea\x40\x0f\x97\xd7\x01\x6f\x05\xde\x0c\xac\xcc\xbd\x28\x4f\
\xd8\x55\x83\xeb\xea\x63\x37\x25\xb6\xd2\x2b\x15\xca\x52\xc2\xc1\
\x45\x8b\x8b\x83\x8b\x43\x19\x80\x0a\x93\x54\xd4\x14\x15\x3c\x2a\
\x0a\x3c\xe5\xe0\xd1\x8f\x61\x11\x30\x00\x4c\x02\xff\x4d\x40\x86\
\x6b\xcc\x48\x65\x6f\xa7\xf3\x72\xbe\xa2\x4b\x80\x0c\xf4\x70\x79\
\x10\xb8\x08\x78\x1b\xf0\xb4\xdc\x8b\x54\xc1\xbe\xde\x71\x54\x65\
\xa7\x89\x50\xa1\x2c\x0f\xb1\x50\x94\x1a\x94\xa5\xba\x4f\x06\x55\
\x89\x92\x52\xa0\x74\xf0\x17\x95\x25\x4e\x78\x1b\xb1\xf6\x62\x82\
\x64\x31\x60\x7c\x7c\x26\xd4\x18\x3b\xf5\x56\xf6\x68\xf0\x58\x01\
\xfc\x3b\x70\xb9\x19\xa9\xdc\xd1\xe9\xfc\x9d\x6f\xe8\x12\x20\x84\
\x1e\x2e\x1f\x05\x7c\x10\x78\x3d\x41\x2d\x9a\x86\xa2\xbe\xc0\xc7\
\x69\x52\xdf\x04\x52\x6c\x65\x48\x76\xab\x45\x66\xb1\x33\x24\x4b\
\xb4\x83\x56\x2a\xf0\x94\x95\x0a\x05\x5e\x81\xb2\x84\x3f\x4b\x02\
\x91\xe0\x31\x22\x21\x1f\xa2\x63\x01\x13\xed\x4d\xb8\x55\xf1\xd8\
\xa9\xb7\xb0\x4b\xef\x65\x4a\x6d\xc4\x70\x25\xf0\x43\x33\x52\x31\
\x9d\xce\xf3\xf9\x80\x83\x9e\x00\x7a\xb8\x7c\x34\xf0\x61\xe0\x8d\
\x64\xc3\xc2\xb6\x70\x17\xd5\xfa\x4a\x6a\xaf\xcf\x1e\x43\x95\x3e\
\x79\x40\x1f\x6a\x16\x39\x0b\x65\xb9\xd6\x28\x47\x25\x02\xaf\xc3\
\x38\x91\x56\xd6\xa3\x74\xb8\x17\x6a\x49\x24\xd6\xce\x84\xc2\xaf\
\x02\xe1\x47\xc0\xf8\x09\x19\x4c\x48\x06\xdf\x0f\xf6\xec\xd5\xbb\
\x64\x54\x6d\x54\x63\xea\x2a\x26\xd4\x57\xcd\x48\xc5\xeb\x74\x19\
\x74\x12\x07\x2d\x01\xf4\x70\xf9\x58\xe0\x23\x04\x35\x7e\xda\xa1\
\xcd\x0a\x7c\x4a\xe8\xa5\x59\x13\x68\x8a\x12\xb7\xab\x45\x66\xc0\
\x59\x6a\x8e\x71\xca\x94\x1c\x1d\x08\xbc\x13\x0a\xb7\x0e\x4d\x9d\
\xf8\x38\x22\x80\x75\xbf\x58\x0b\x84\xe7\x62\x0b\x7f\x78\x10\x69\
\x02\x5b\x03\x88\x49\x13\x40\x0c\xf8\x16\x11\x8c\x87\x61\x87\xbe\
\x8b\xbd\xfa\xb3\x8c\xa9\xff\x63\x46\x2a\x07\xa5\x20\x1c\x74\x04\
\xd0\xc3\xe5\x95\xc0\x65\xc0\xeb\x28\x12\xfc\x5c\xe1\xcf\xd8\xf0\
\x79\x7b\xcd\xbd\x0c\xc8\x66\x96\x18\x57\x2d\x10\xc7\x85\x33\x1c\
\x87\x1e\x27\xac\xe5\x75\x24\xfc\x2a\x38\xd6\x96\x16\x88\x37\x49\
\x88\x10\xdd\x37\x57\x01\x58\x44\xb0\x09\x10\x0b\x3f\x09\x09\x44\
\x02\xe1\x8f\xcc\xa2\x88\x08\xbe\x0f\xa6\x8a\x61\xb7\xfe\x23\xa3\
\xea\x12\xf3\x59\xef\xea\x4e\x97\x4f\xbb\x71\xd0\x10\x40\x0f\x97\
\x15\xf0\x2e\xe0\x53\xc0\xc2\x74\x2e\x50\x20\xf8\x52\xdf\xfc\xd1\
\x6c\xa5\x57\xee\x67\x91\xc0\x62\x73\x2c\x25\x16\x23\xac\x77\x85\
\x53\x4a\x8a\x85\x5a\x07\x02\x6f\x13\x40\x47\x35\xbe\xb6\x4c\x1e\
\x95\x90\x22\x2b\xf8\x45\xc1\xa3\xd0\xda\x89\x0f\x22\x13\xc8\x76\
\x8a\xed\xda\x3f\x65\x0e\x85\x9b\x09\x49\xe0\x79\xa1\x46\x18\xd5\
\xb7\x52\xe5\x9d\xe6\x12\xff\xa6\x4e\x97\x57\xbb\x70\x50\x10\x40\
\x0f\x97\x4f\x06\xae\x04\x9e\x9a\xfe\x7a\xf2\x05\xdf\xae\xed\xf3\
\x84\xbf\xc4\x2d\x1c\xee\xc3\x42\x79\x92\x75\xb7\xeb\x1d\xc3\xe1\
\x65\xc5\xe1\x91\xe0\xbb\x3a\x30\x71\x1c\x95\x08\xbf\x26\x24\x81\
\x0a\xd2\x21\xa3\x09\xac\x67\xe5\x69\x81\x6c\xed\x9f\xd5\x04\xc6\
\xd4\x9a\x43\x91\xe0\x47\x24\xf0\xfd\x50\xf8\xc5\xd2\x06\x1e\x78\
\x3e\x88\xc1\x30\xaa\x37\x30\xaa\x5e\x61\x3e\xe9\x3d\xde\xe9\xb2\
\x6b\x35\x0e\x68\x02\xe8\xe1\x72\x1f\xf0\x51\xe0\x7d\x40\x29\xf9\
\x6a\x9a\x13\xfc\xb4\xf0\x0b\x3d\xfc\x9e\x23\xfc\x01\x06\xc5\x6e\
\xf9\x1d\x75\x84\x4d\xae\x70\x9c\xeb\xa0\x6c\xc1\x77\x23\xc1\x77\
\x02\x61\x8f\x85\x5e\x25\xe7\xb1\x06\xa0\xd6\xf4\x89\x0f\xf3\xa2\
\x40\xf1\x49\xb2\x4b\x99\x42\xe1\xde\x17\x20\xaa\xf1\x23\x0d\x90\
\x47\x04\x3f\x20\x80\x17\x92\x81\x8a\x1a\x65\x9c\x8b\xcc\x87\xcd\
\x7f\x77\xba\x1c\x5b\x89\x03\x96\x00\x7a\xb8\x7c\x3a\xf0\x6d\xe0\
\x98\xf4\x17\xe7\x6d\x75\x04\x5f\xe1\xd3\x2b\x1b\x38\xc2\x2c\xa3\
\x5f\x8e\x8d\xef\x1f\x84\x2e\x37\x2a\x9f\xd5\x25\x4d\x39\x12\xfc\
\x48\xd8\x1d\x05\x8e\x63\xd5\xfc\x96\x16\x88\xc3\x9e\x58\x61\x4f\
\x48\x85\x3d\x15\x81\x00\xd7\xb4\x03\x84\x90\xd0\x3a\xb3\x4d\x21\
\x43\x92\x10\x6b\x01\x12\xcd\x60\x42\x42\x88\x6d\x06\x49\x20\xf0\
\xb6\x5f\xe0\xf9\x50\xf5\x40\x7c\x8c\xec\xd1\xff\xa2\x1e\x75\xde\
\x75\xa0\x86\x4d\x0f\x48\x02\xe8\xe1\xf2\xdb\x80\x2b\x80\xde\xe4\
\x4b\x73\x36\xdb\xc6\xaf\x15\xfc\x0a\xfd\xb2\x81\x23\xcc\x11\xf4\
\xca\x9a\xe8\x36\x8e\x8e\x85\xff\x26\xa9\xf2\xa4\x92\x83\x76\x9d\
\xc4\xe4\x49\xd9\xfd\xa1\xc0\x3b\x59\xc1\xb7\xa2\x3f\x91\xc9\x93\
\xb7\x4f\xc9\x7e\x24\xed\x16\x24\x53\xfb\x47\x69\x71\xdb\x00\xd4\
\xb6\x0f\x48\x46\x1b\x58\x1a\xc0\x37\xa1\x06\x88\xb4\x81\x17\x1c\
\x33\xa6\xfe\xc8\x1e\xf5\x7c\xf3\x09\xff\x80\x33\x89\x0e\x28\x02\
\x84\xdd\x8f\xaf\x20\x68\xc5\xb5\xbe\x92\xfc\x5a\xbf\xc8\x14\x2a\
\x73\x23\xc7\x78\xab\x29\x07\xdd\x1f\x22\xa7\x35\x14\xec\x7d\xc6\
\xe3\x0e\x6d\xf8\x13\xd7\x09\x84\xde\x09\x09\x10\x0b\xbe\x13\x84\
\x97\x1c\x9d\xae\xfd\x23\x47\x17\xcb\xfc\xd1\xd6\x73\xb3\x21\x50\
\xdb\x17\x88\x8a\x49\xa9\x84\x07\x12\x57\xff\xc9\x79\x74\x9d\x91\
\x74\x5a\xdc\x46\x60\x6d\x7e\x64\x0e\xf9\x89\x46\xf0\x42\xb3\xa8\
\x1a\x6a\x85\xaa\x17\x10\x81\x8a\x1a\x65\x8c\xd7\x98\x8f\x98\xff\
\xea\x74\x39\xcf\x25\x0e\x18\x02\x84\x0d\x5a\xdf\x01\x4e\x4f\xbe\
\x0e\x9a\x32\x77\x92\x6d\x8a\xa5\x66\x3d\xab\xcd\x33\xc2\x94\x94\
\x60\x3b\x9a\xcd\x95\x49\x76\xbb\x8a\x13\x63\xe1\x0f\x09\xe0\xea\
\xc4\xec\x49\x69\x00\xab\xf6\x57\x04\x35\xbf\xdd\xea\x9b\xaa\xf1\
\x2d\x22\x66\x9d\xde\xbc\x52\x4a\xb5\x05\x90\xae\xfd\x53\x66\x50\
\x24\xfc\xd1\x35\x26\x4d\x02\x63\xc0\xb3\xf6\xb1\x29\x64\x02\xe1\
\xaf\x86\x1b\x3e\xbe\x8c\xab\xcb\xe5\x6f\xcc\x7b\x3b\x5d\xde\x73\
\x85\x03\x82\x00\x7a\xb8\xfc\x42\xe0\x3f\xb0\xc3\x9b\xcd\xd4\xfa\
\x76\xed\xaf\x79\x80\x63\x7c\x9f\x7e\x59\x07\x29\xa1\x8f\xb6\x2d\
\x53\x13\x68\x47\xb1\xdc\xae\xf9\xdd\xd0\xd9\x75\x2c\x42\x68\x1d\
\x68\x00\xed\xa4\x1b\xb9\x72\xe3\xfe\xa4\x49\x00\x89\x60\xe7\x58\
\x3d\x41\x0d\xaf\x72\x04\xde\xba\xb8\xa6\x5d\x20\x63\x06\x09\x61\
\xb4\xc8\xd2\x06\x9e\x9f\x38\xc6\xbe\x0f\x9e\x24\x66\x50\xe4\x1c\
\x57\xab\x01\x79\x98\x54\x1b\xd8\xa6\xce\x35\x97\xf9\xfb\x7d\x2b\
\xf2\x7e\x4f\x00\x3d\x5c\xbe\x08\xf8\x06\x45\x51\x9e\x7a\x82\x1f\
\x6d\x03\xf2\x1b\xd6\xfa\x67\xa3\xe9\x53\xca\xaa\xd9\x13\x12\x6c\
\x9f\x98\xa0\xcf\x51\x0c\x94\x42\xc1\x2f\x69\x70\x9d\x40\xf8\x5d\
\xcb\x04\x8a\x1a\xbb\x6c\xa1\x4f\x69\x01\x95\xd6\x00\xf6\x2b\xe7\
\x9f\x04\xb0\x8b\x29\x3a\xf4\x85\xa9\xaa\xc7\x58\xd5\x67\x4c\x40\
\x69\x4d\x8f\xa3\xe8\x55\x8a\x5e\xad\x82\xfc\x88\x05\x1f\xc0\xaa\
\xf9\xb3\xe6\x50\xd4\x3e\x10\x69\x81\x48\x13\x78\x96\x3f\x50\xb5\
\x49\x30\xa5\xee\x66\x4c\x9d\x61\x3e\xea\x4f\x74\x5a\x06\x66\x83\
\xfd\x9a\x00\x7a\xb8\xfc\x76\x60\x04\x7b\xd4\x55\x64\x58\x37\x27\
\xfc\x3b\x39\xcc\xdc\xcf\x62\xf3\x64\x48\x1c\xd9\x48\x98\xc3\xe3\
\x1d\x63\xfb\x58\xa8\x35\x6e\x39\xac\xe5\x4b\x51\xcd\x1f\x5d\xe7\
\x26\x91\x1f\x9d\xb1\xfb\x6b\x6a\x7f\x6a\xa3\x3d\xd1\x6b\x67\x5b\
\x7d\x25\xa8\xd5\xcd\x78\x85\x4d\x7b\x26\xd8\x39\x5e\x55\x8c\x4f\
\x51\x9e\xa8\x32\x38\x3e\xa5\x16\x1b\x9f\x81\x24\x0c\x14\xee\x45\
\x45\xbe\x7d\xb5\xb7\xc4\x96\x43\x86\xcc\xae\x05\x83\x94\xfb\xca\
\xb2\xac\xe4\xb0\x58\x40\xd9\x44\x48\x85\x4a\x25\xf1\x05\x22\x13\
\xc8\xf7\xa1\x1a\x9a\x42\x35\x24\xa8\xa8\x87\x98\x50\xa7\x99\x0f\
\xf9\xfb\x6d\x77\xeb\xfd\x96\x00\x7a\xb8\xfc\x5e\xe0\xf3\xe9\x44\
\x9a\xaf\xf9\x4b\xdc\xc2\xb1\xde\x2a\x4a\x1c\x1a\xd5\xfa\x39\xc2\
\xbf\x6d\x74\x8c\x3e\x07\x16\x94\xac\x5a\xbf\x14\x0a\x7d\x5c\xfb\
\xab\xc4\x49\xb6\x6b\x7c\xbb\xf1\x4b\xe5\x11\x00\x8a\x7a\x7c\x7a\
\x63\x15\x1e\xd9\xb2\x47\x8d\x3e\xba\x87\x23\x2b\x3e\x0b\x73\x07\
\xc4\xc4\x82\xaf\x92\xf3\x78\x0b\xd3\x4c\x78\x6e\x14\x5a\x31\xb6\
\x78\x91\xd9\xb8\x74\xb1\x0c\xf4\xf7\xca\x11\x08\x4e\xaa\xf7\xa8\
\xb5\xaf\xfa\x16\x11\x42\x12\xf8\x91\x3f\x60\x93\xa0\xaa\x36\x33\
\xc6\x19\xe6\xc3\x66\x6b\xa7\x65\x62\x26\xd8\x2f\x09\xa0\x87\xcb\
\x97\x12\x34\x70\x85\x5f\x41\x73\x66\x4f\xa4\x27\x7a\x64\x03\xc7\
\xf9\x67\xa2\x70\x75\x28\xc4\xa9\x1a\x3d\xd8\xef\xdd\x3b\xca\x76\
\xad\x58\x6b\xd7\xfc\x25\x37\x3c\x8e\xa2\x3f\xca\xb2\xfb\x6d\xe1\
\x8f\xfc\x80\x90\x94\xa9\xb8\x3f\x39\x61\x4f\xc1\x1f\x9d\xe4\xc1\
\xcd\xa3\x6a\xe2\xe1\x5d\x1c\xed\x4b\x4e\x97\xec\x08\x45\x64\x28\
\x22\x42\x4c\x02\xc0\x04\xe7\xae\x52\x7b\x97\xad\xf4\x1e\x5a\xbc\
\x50\x0e\x2d\xbb\x1c\x6a\x9b\x42\xd1\xde\x0b\x7d\x81\x98\x04\x7e\
\x5a\x13\x54\xaa\xe1\x3d\x3d\xb5\x9d\x7d\xea\x2c\xf3\x61\xff\xe1\
\x4e\xcb\xc6\x74\xb1\xdf\x11\x40\x0f\x97\x3f\x45\xd0\x6f\x3f\xfc\
\x02\xf2\x05\x5f\x93\x4f\x8c\x32\xbf\xe7\x38\xef\x74\x54\xd0\x3b\
\x33\x12\x7c\x3b\xaa\xe3\x6a\x18\x9f\x64\xbd\x5f\xe1\x29\xae\x0b\
\x65\x27\xac\xf9\x6d\xdb\x3f\x13\xf7\xd7\x24\xda\x23\x6a\xf1\xcd\
\x9a\x3e\x44\xe7\xd6\xbb\x2b\x81\xb1\x0a\xf7\xdd\xb4\x99\xfe\xd1\
\x29\x56\x17\x7e\xb8\x14\xb5\x88\x65\x8e\xf3\x88\x60\x48\x6b\x84\
\x90\x04\xf8\xa0\x8c\x62\x68\xc8\xdc\xbb\x6a\x8d\x3f\x54\x76\x58\
\x21\x19\x9f\x20\x45\x02\x2f\x31\x87\xaa\x76\x74\x28\x20\xc1\x1e\
\x46\xd5\x53\xcc\x47\xfc\xbb\x3b\x2d\x23\xd3\xc1\x7e\x45\x00\x3d\
\x5c\x7e\x37\x41\x9c\x3f\x7c\x7b\xa6\x27\xfc\x25\x6e\xe4\x78\xef\
\x14\x14\x3d\x6e\x46\x90\x6d\x02\x88\xe1\xfa\xbd\x7b\x39\xa7\x54\
\x0a\x84\xbe\x1c\xd9\xfd\x4e\xb2\x77\x2c\x67\xd9\xae\xf5\xdd\x8c\
\xe0\xa7\xcc\x9f\x4c\x03\x57\xd5\x63\xf3\xad\x5b\x18\xdd\x32\xc6\
\x71\x35\x1f\xdb\xa8\x58\xf2\x08\x91\xf2\x05\xa8\x35\x83\x6c\x8d\
\x60\x00\x3f\x38\x56\x46\x81\x0f\x8b\x96\x99\x3b\x56\xac\xf6\x57\
\x96\x14\x8b\x3d\xab\xc5\xb8\x86\x04\x96\x36\x98\xaa\x86\x8d\x65\
\x81\x39\xb4\x93\xbd\xea\x58\xf3\x77\xfe\xce\x4e\xcb\x4a\xb3\xd8\
\x6f\x08\xa0\x87\xcb\xe7\x03\x57\x13\x75\x61\xce\x13\xfe\x22\xc1\
\x8f\x6c\xfe\xe3\xbd\x13\x50\xf4\xc6\x76\xbc\x4d\x82\x24\x96\x7f\
\xff\xf6\x5d\xac\x29\x39\x94\xca\xa1\xc9\x13\x09\x7d\x59\xa7\x63\
\xff\x11\x09\xb2\x26\x4f\x54\xcb\x67\x1d\xdf\xe8\xd8\x08\xbb\xef\
\xdb\xc1\xc3\xf7\xee\xe0\xb4\xec\x77\xd6\x8b\xfb\xe7\x37\x06\x34\
\x41\x04\xa3\x8a\xb5\x41\x44\x02\x5f\x45\x1a\xc1\x2c\x3d\xcc\xbb\
\xe3\xd0\x43\xcd\x09\x46\x70\x63\x12\x18\x2b\x22\x14\x12\x20\xd2\
\x00\x95\x6a\x38\xd8\x26\x88\x0e\xdd\xcf\x16\x75\x82\xf9\xcc\xfe\
\x11\x22\xdd\x2f\x08\xa0\x87\xcb\x67\x03\xbf\x02\xfa\x93\x44\x9a\
\x13\x7e\x0d\xb8\xdc\xc6\x71\xde\xb1\x68\xfa\x4b\x19\x81\x2f\xa5\
\xb5\xc0\xd8\xf6\x5d\xf8\x1a\x86\x7a\x9c\xc0\xd1\x2d\x47\xa6\x8f\
\x93\x84\x3d\xb3\xc2\x9f\xd2\x02\x64\x42\xa0\x90\x32\x7d\xb6\xed\
\xe3\xb6\x5b\xb6\x70\x82\x4f\x38\xc0\x3d\x83\x6c\x97\x86\x6c\xfc\
\x3e\xb9\x30\xef\xcf\x19\x32\x44\xd7\x64\x85\xde\x26\x45\x64\x12\
\xf9\x21\x09\x42\x52\xf4\xf4\xca\xe6\x35\xc7\x55\x4b\xae\xc3\xb2\
\x88\x04\xd5\x28\x2a\x64\x85\x46\x2b\xe1\x16\x3b\xc5\x02\x8c\xab\
\x5f\x9a\xf7\x9b\x67\x77\x5a\x6e\x9a\xc1\xbc\x27\x80\x1e\x2e\xaf\
\x05\x6e\x00\x96\x27\x89\x34\x16\xfe\x28\xcd\xe5\x76\x8e\xf3\xd6\
\xa2\x19\x28\x65\x85\xde\xda\x97\x34\xec\xdd\xc7\xa6\x4a\x85\xd5\
\xe5\xb0\xd6\xb7\x85\xbf\x94\xd3\xf8\xa5\x2d\x02\xd8\xbd\x3d\x75\
\x28\x87\x76\x18\x14\xf0\x1f\xdc\xc1\xed\xf7\xef\xe1\xf4\x54\xe8\
\x33\x44\xca\x94\xcf\xe9\xcc\x96\x1a\xe2\x68\x75\x79\xa8\xf9\x33\
\xa4\x89\x50\xa4\x0d\x6c\x22\xe4\x92\x00\x94\x51\xde\xaa\x75\xd5\
\x7b\x17\x0e\xc9\x09\xc6\xa0\x8c\xb1\x84\xdf\xd2\x02\x11\x01\xaa\
\x91\x29\x64\x40\xc6\xd5\x57\xe5\x03\xe6\xdd\x9d\x96\x9f\x46\x98\
\xd7\x04\xd0\xc3\xe5\x43\x80\x0d\xc0\xba\xe4\x8d\xb1\x04\xbe\xc0\
\xe6\x8f\xce\x1d\xee\xe0\x78\x6f\x0d\x9a\xc1\x48\xc8\x63\x5b\x3e\
\x43\x00\x31\xfc\x61\xfb\x2e\x4e\x2d\x39\x50\xb6\x6a\xfe\xc8\x0c\
\x72\xc3\x68\x4f\x6c\xff\xdb\x5d\x1e\x92\x0e\x72\x49\xd8\x93\xb0\
\xdb\x83\x02\x31\x4c\xdc\xf1\x04\x0f\x6f\x9b\xe0\x78\xc8\x44\x82\
\x42\xa4\xfa\xf7\x90\xaf\x01\x4c\xce\xe6\x9b\x9c\x9b\x40\x63\x6d\
\x60\x47\x86\x24\xa9\xf9\xb3\x9a\x00\x1f\x86\x96\xf9\x77\xad\x3e\
\xc2\x3f\x56\x04\xd7\x8b\xb4\x81\x65\x02\x45\x61\xd1\x29\x2f\xe8\
\x47\x14\xde\x57\x64\xaf\xfa\x0b\xf9\x90\xb9\x82\x79\x8c\xf9\x4e\
\x80\x6f\x12\x4c\x51\x12\xbe\x2d\x8d\x6b\xff\xe4\x7c\x2f\xc7\x79\
\xfb\x28\xb1\xca\xcd\x38\xb1\xa9\x78\x7e\x20\xc8\xd5\x6d\x3b\x18\
\x57\xb0\xb0\x9c\x25\x80\x6b\x45\x87\x72\xa2\x3f\x51\x03\x58\x6a\
\x98\x23\x09\x11\x8c\xb0\xf3\x96\xad\x8c\xed\xab\x72\x78\xb6\xb1\
\xcb\x8e\xfd\xa7\xc6\xfa\x66\x85\x9f\x0c\x01\xac\xae\xcd\x71\xff\
\x7e\x93\xee\x1b\x94\xdc\xb8\x40\x1b\xa4\x4c\x20\xf2\x49\xe0\x87\
\xd7\x79\x30\xb8\xc8\xbf\xf7\xb0\xa3\xfd\xa3\x44\x28\x45\xed\x02\
\x71\x0b\xb1\x65\x0a\x55\xaa\x24\xda\xc6\xc3\x63\xaf\x7e\xa1\xf9\
\x88\xff\xf3\x4e\xcb\x52\x11\xe6\x2d\x01\xc2\x2e\xcd\xff\x92\xbc\
\x29\xb5\xb5\x7f\xb1\xf0\xc3\x0a\xff\x3a\x96\xca\xd3\x1c\x6d\x99\
\x31\x3a\x3f\x9c\x59\xa9\xf2\xfb\x9d\xbb\x39\xbb\xc7\x0d\x6a\xfb\
\xb2\x9b\x44\x7e\x4a\x6e\x12\xf3\x8f\x3a\xbc\xd5\xd4\xfe\xd4\x86\
\x3d\xb5\x82\xaa\xc7\xa3\x37\x6d\x63\x81\x27\x1c\x12\x45\x83\x60\
\x7a\x1a\xc0\xae\xfd\x25\x53\xf3\x67\x7b\x75\x46\xad\xb8\xa9\x1b\
\x42\x1d\x12\x90\x8e\x0a\x15\x91\xc0\x0f\x48\xd0\x77\x88\xd9\x78\
\xc4\x31\xde\x4a\x11\x7a\x53\x04\xb0\x4c\xa1\x4a\x35\x48\x8f\xef\
\x5f\x55\x7b\xd8\xa9\x0e\x33\x97\xfa\xf3\x72\xa6\xba\x79\x39\x3b\
\xb4\x1e\x2e\x1f\x07\x5c\x1e\x27\x34\xd3\xc8\x65\x6f\x3d\x72\x03\
\x4b\xe5\x69\x5a\x59\xd1\x1e\x4b\xf8\x33\x61\xcf\xbd\xbb\xf6\x70\
\x82\xdd\xa7\xdf\x55\x96\xdd\x1f\x3a\xbd\xae\xb2\x84\x5f\x25\xe1\
\x4e\x47\x25\x6d\x00\xb6\x03\x5c\xf5\x79\xec\x96\x6d\x1c\xaa\x14\
\xe5\x72\xd4\xf7\xdf\x6e\xf8\xca\xf9\xee\x54\xed\x4f\xa6\x23\x1b\
\xe9\xda\xdf\xd1\x49\xed\xaf\x15\x38\x02\x5e\x48\x3c\x3f\x1c\x14\
\x1f\xdf\x34\xea\x01\x2b\x99\x07\x87\x2a\x2a\x76\x5a\xe2\xe3\xa0\
\x77\x9d\x20\xa8\xa8\x97\x9d\x28\x26\x76\xeb\xa3\x1e\xba\xc7\x7d\
\xe4\x88\x75\xde\x32\x57\xd3\x87\x01\x71\xc2\xf7\x72\x82\x77\x70\
\x42\x12\x4a\x54\x6e\x25\x59\xa8\x06\xf9\x3e\xf0\xdc\x4e\xcb\x55\
\xae\xac\x75\xfa\x05\x6a\x5e\x68\xb8\x5c\x26\x98\xc9\x2c\x69\x09\
\x6d\x46\xf8\x13\xcd\xb0\x95\xb5\xfe\xf1\x2a\x14\x62\xc7\x49\x9b\
\x2e\xb6\x13\xec\x68\x98\x9c\xe4\x8f\x28\x06\x6d\x33\xc9\xee\xdc\
\x56\xd3\x2b\xd4\x6e\xf9\xd5\x89\x6f\x60\xf7\x03\x52\xb0\xf7\x8e\
\x9d\xf4\x39\x0e\xe5\x52\xc6\x91\xce\x73\xae\x73\xd3\x75\xfe\xef\
\x65\xd7\x32\xd1\xc2\xbd\xed\xaf\x94\x2d\x92\x47\x64\x4b\xf2\xd1\
\x52\x0b\x76\xbe\xe9\x50\x9b\x6a\x2b\x7f\xc3\x34\x89\x7e\x73\x04\
\x1c\x98\x1a\xd5\x47\x6c\x7a\xd0\xdd\xac\xc0\x8f\x83\x00\x8e\x15\
\x1c\x08\xf3\xcf\x7e\x86\xf4\xcb\x73\xf4\xdf\xeb\x37\x77\x5a\xb6\
\x72\xe5\xad\xd3\x2f\x90\x83\x4f\x03\x4f\x8a\xcf\xa6\x53\xf3\x2b\
\x11\x0e\x37\x8f\xe1\xb0\xc8\x6e\xd5\xb5\x0b\x26\xd3\xdf\x67\xd3\
\xce\xbd\x3c\xc9\x4e\x2b\xa9\x34\x59\xa2\x1a\xdf\xd1\x89\x36\x89\
\x87\x3c\x86\x35\x6e\xdc\x17\x28\xd0\x06\xde\xfd\xbb\xd9\x6a\x60\
\x49\x4a\x78\x5d\xcb\xa4\xca\x08\x6c\x4a\xf0\xdd\xe4\xfa\x72\x03\
\x72\x94\x9d\xda\x7b\x66\xaf\x8d\x06\xe0\x24\xf9\x99\x43\x02\xdb\
\xa4\xb4\x09\x11\x0a\xbd\x38\x92\x4a\x1f\xdf\xa5\x8f\x7e\x62\xab\
\xbe\x2b\xee\x0a\x12\xf9\x3f\x16\x19\x54\xb6\x6c\x16\x70\x85\xfe\
\xa4\xb3\xa4\xd3\xc2\x95\xc5\xbc\x32\x81\xf4\x70\xf9\x4f\x81\xf7\
\xc4\x09\xf5\x84\x5d\x67\x8f\x05\x06\xe4\x5a\x86\xe4\x19\x29\x01\
\xae\xb3\x8d\x8e\xf1\xb0\x52\xac\x2e\x59\xc2\x1d\xc7\xfa\x55\x6d\
\xed\x1f\x45\x7b\xe2\xc6\x2f\x32\x03\x5f\x80\x2d\x63\xfc\x71\xcc\
\xe7\xf4\x48\xf8\xf2\xba\x43\x64\x65\x32\x82\xed\x07\xd8\x26\x90\
\xb1\xfc\x01\x63\xc2\xc8\xa5\x24\x16\x8b\x6d\xfe\xe4\xf5\x40\xf5\
\xb2\xbe\x81\x92\xc4\x2f\x88\x5e\x24\x32\x87\xa2\xe6\x6b\xec\x97\
\x51\x88\x13\x9a\x43\xa1\x6d\xbf\xeb\x31\xf7\xe4\x9e\xc1\xea\x5d\
\x83\xfd\x72\x82\x91\xe0\x1d\x8c\x13\xbc\x5f\xd4\x50\xe8\xf9\x56\
\x39\xba\x32\x40\x89\x1f\x92\x9d\x99\xa3\xc3\x98\x37\x4e\xb0\x1e\
\x2e\xbb\xc0\x6d\x04\x0b\x4b\x84\x89\xa4\x05\x5c\x65\xd3\xb0\xd5\
\xf8\x83\x1c\xef\xaf\xd4\x9a\xbe\x7a\x66\x86\x65\xff\xef\x7b\x68\
\x33\x65\xd7\xa1\x5c\x76\xa1\xc7\x85\x9e\xb0\x36\xed\xc9\xb4\x0f\
\xd4\x68\x0e\x4b\xd0\xac\x31\xc2\x8c\x55\xb8\xfd\xde\x51\x4e\xc9\
\x8e\x05\x28\xea\x09\x9a\x45\x5d\x02\x14\x84\x40\x53\xd1\x20\x93\
\x08\xbb\xbd\x8f\x5b\x71\xb3\x0e\x72\xd6\x39\x4e\xb5\x0b\x58\xc7\
\xa1\x23\xac\x42\x67\x38\x72\x92\x01\x73\xc4\xa9\x95\xc7\x1d\xcd\
\x6a\xcf\x87\x8a\x9f\x84\x45\xa7\xaa\x81\x43\x2c\xd1\x33\xa3\xf6\
\x81\x3d\xea\x3d\xf2\x61\xf3\xe5\x4e\xcb\x9b\x2d\x62\xf3\x05\xff\
\x03\x5b\xf8\xa7\x67\xfa\xf8\xac\x31\x53\x28\xfa\x6a\xfa\xf4\xe7\
\x98\x3e\x8e\x86\xc9\x29\xb6\x29\x28\xbb\x96\xa3\x1c\xdb\xf4\xca\
\x32\x75\xec\xfb\x15\x69\x86\xc0\xf1\x7c\xf8\xfe\x31\x4e\xcc\x9a\
\x21\x6e\x01\x11\x63\xd3\xc5\xad\x35\x61\x9a\xd9\xdc\x8c\x49\xe4\
\xe6\x3c\x2b\x2f\xe2\x15\xe7\x2d\x14\x98\x43\x79\xa6\x10\x89\x3f\
\xe0\x58\xe9\xa0\x37\xdf\x57\x2a\x2b\x85\xa7\x74\x3a\x3f\x22\x8d\
\x99\x1a\xf0\xa0\x40\x0d\xf2\x69\x7d\xa9\x73\x68\xa7\x85\x2d\xc2\
\xbc\x30\x81\xf4\x70\x79\x39\x70\x49\x9c\xa0\xa6\xb3\x09\xf4\xb0\
\x81\x7e\x39\xc7\x16\xd6\xd8\x7e\xd7\x35\x76\x3f\xae\x86\x6d\xfb\
\x58\x92\x2a\x30\x4b\xd8\xb3\x2d\xbc\xb6\x93\x1b\x9b\x3c\x84\xbf\
\x05\x6f\xec\xdf\xbf\x8f\x72\xc9\xc1\x89\x35\x43\xd6\xfc\xb1\x34\
\x40\x5e\x24\xc8\x8a\x00\x4d\x89\x30\x29\xb0\xb0\x9e\x06\xd0\x12\
\x36\xde\x46\x56\x8b\xa9\x35\x81\x54\xe6\x19\xd1\x73\x3d\x3f\x3c\
\xc9\x9a\x43\x90\x36\x85\xe2\xca\x27\xb4\xb3\x44\x21\x22\xc1\xcc\
\xbe\xa1\x29\xe4\x4d\xa8\x65\x7b\x76\xeb\x3f\x0e\x2d\x34\x27\x9b\
\x28\x10\xe0\x04\xd1\x20\xc7\xc9\x34\xd4\x29\xc0\x91\x5e\xd5\xc3\
\x3f\x03\xe7\x77\x5a\xee\x60\x9e\x10\x80\xec\x74\x85\x59\x01\xaf\
\x49\x23\xad\x19\x56\x99\x25\x35\x43\x19\x2d\x81\xcf\x8c\xed\x45\
\x2b\x1e\x9e\x98\x62\x4d\x8f\x9b\x26\x48\xb6\xf6\x8f\x7b\x7b\x12\
\xda\xfe\xd1\x7d\x48\x8f\xfa\x9a\xa8\x72\xa7\xaf\x38\xa5\xa4\xa8\
\x19\x0d\x66\x0b\x63\xd4\x2f\x28\x24\xc2\x28\xf0\x23\xe0\xd7\xc0\
\xa3\xc0\x26\x11\x1e\xfb\xe1\x99\xb2\x1d\xe0\x45\x37\xaa\x92\x08\
\xcb\x44\x58\x66\x84\x33\x45\x78\x81\x2f\x3c\xcf\x08\x8b\x8c\x04\
\x02\xef\x5b\x82\xef\x2b\x50\xa6\x20\x9b\xec\x5a\x18\x40\x42\x73\
\x28\x4b\x82\xe8\x5c\x13\xfd\x90\x94\x90\x10\x93\x20\xb8\x46\x82\
\x4c\xf2\x61\xc7\x46\xf7\xc4\x81\xd3\x2a\x3b\xb5\x62\x71\x44\x7e\
\x2f\x09\x0a\x24\x21\xd9\x24\x2a\xf4\x22\xfd\x31\x67\x9d\xb9\xd8\
\xbf\xb7\xd3\x82\xd7\x71\x02\xe8\xe1\xf2\x93\x09\x56\x61\x09\x30\
\x2d\xd3\x07\x28\x71\x33\x7d\x72\x86\x2d\xe4\x99\x81\x2d\x35\xb5\
\xf9\xd8\x38\x5b\xb4\x66\x8d\x9b\xf9\x4f\xa4\x31\x6c\xdb\x3e\x43\
\x9c\x98\x0c\x91\x26\x50\x50\xd9\x34\xc9\x2a\x7b\x64\x58\x1e\x01\
\xc2\x50\xfb\x76\xa5\xf8\xbe\x0a\x7a\xb5\xfe\xfc\xea\xd3\x65\xaa\
\x28\x5f\x7e\x7c\x96\x54\x81\xcd\xe1\x76\x1b\xf0\xb5\xe7\xff\x4e\
\x39\x46\x38\xdb\x18\x5e\x60\x14\x2f\xd4\x86\xb3\x7d\x85\xa3\x4d\
\xba\x0d\x22\xb5\xce\x80\x25\xc3\x0a\x52\x3d\x4d\xbd\x54\xed\x9c\
\x21\x41\x4d\x84\x28\xbc\x91\x26\xd0\x02\x3a\xd1\x02\x08\xfa\x89\
\x87\xdd\x5d\x2b\xd6\x78\x8b\x7c\x15\xfc\xe4\x2a\xf0\xc3\xbc\x37\
\x59\x2d\xa0\x71\xe8\x97\xaf\x01\xe7\x76\x5a\xfe\x3a\xee\x04\xeb\
\xe1\xf2\x0d\xc0\x53\x92\x04\xd2\x8e\x6f\xd6\xe9\xcd\xb6\x04\x1f\
\x66\x6e\xd6\x43\x72\x46\xbd\xd0\xa1\xed\xcc\x96\x1c\xcc\xc6\x4d\
\x3c\x2a\xb0\xa6\xc7\x09\x9d\xdf\x70\x2b\x67\x9d\x65\x37\x1d\xfa\
\xb4\xb5\x49\x44\x82\xf1\x2a\xb7\x3e\x32\xc5\xe9\x35\xfd\x82\xd2\
\xc2\x3f\xae\xe0\x73\x4a\xf1\x99\xef\x9c\x2a\x63\x73\x95\x77\xcf\
\xbe\x41\x2d\xf2\x85\xb7\xf8\x86\xf7\xf9\x86\x55\x7e\x81\x03\x1c\
\xf5\xdd\xf1\xac\xce\x6c\x55\x93\x0c\x7b\x2c\x74\x8a\x6d\x47\xd8\
\xea\x30\x87\xaf\x50\x5e\x78\x1c\xed\x81\x95\x27\x55\x1e\x76\x1d\
\xd6\x54\xac\x11\x63\x95\x70\xcc\x40\xcd\x48\x35\x1f\x61\xb7\x3e\
\xc7\x7c\xd4\x5f\xdf\x51\xf9\xeb\xe8\xc3\x87\xcb\xcf\xc1\x16\xfe\
\xa6\x6b\xff\xf0\x37\x87\xbb\x59\x20\x67\xd4\xd8\xeb\x19\xbb\x3f\
\xe5\xb0\xc2\x0d\x15\x8f\xc3\xa3\x89\xab\x52\xb5\xbb\x15\xea\xb4\
\x4d\x22\xbb\xc6\x8f\x8f\x83\xf7\x9d\x7c\xbc\xca\xda\x6c\xff\x22\
\x8b\x44\x52\xd2\xfc\xab\xab\x39\xee\xbb\xa7\xc9\xc5\x73\x29\xfc\
\x00\xbf\x78\xaa\xec\xfa\xf5\x39\xf2\x45\x57\xb3\xd6\x75\x78\x97\
\xeb\xb0\x31\x76\xfa\x33\xc3\x3c\x33\xdd\xbe\x93\x3e\x4d\x76\x4c\
\x56\x65\x2a\xc3\xc8\x11\xb6\xf3\x3e\x74\x90\x25\x4a\x8f\xf6\xc0\
\xae\x4d\xae\xa7\xec\x4a\xc0\xaa\x0c\x52\xf7\x08\xee\xad\xe8\x95\
\x7f\xea\xa4\xfc\x45\x9f\xd8\x49\x7c\x30\x75\xa6\xa6\xb1\x57\xc0\
\x32\xb3\xcb\x76\x60\x73\xe6\xf2\x49\x45\x24\x1c\x05\x93\x53\x4c\
\x39\x1a\x1d\x9d\x47\x9b\x1b\xd9\xf6\x79\x02\x4f\x3a\x0a\x14\xfd\
\x3e\x51\xe1\x6e\xad\x19\xca\x76\xae\x0b\x85\x6b\x8b\xeb\x70\xee\
\xb7\x4f\x95\x37\x7d\xeb\x14\x79\xac\x95\x99\xf8\xab\x73\x64\xea\
\xda\xa7\xc9\x88\xab\x59\xe7\x6a\xde\xe4\x6a\xee\xca\x0a\xba\x1d\
\x11\x4b\x35\x10\x66\x5b\x8c\xf3\xd6\x41\xb0\xb5\x6e\x24\xf4\x71\
\xa4\x88\x58\x8a\x26\x77\xeb\xa3\x3d\x9f\x6d\x91\x39\x16\x4f\x07\
\x93\x95\xb2\xa8\xfc\x7a\xe5\x64\x7d\xa9\x73\x41\x2b\xf3\xa6\x11\
\x3a\x46\x00\x3d\x5c\x3e\x03\xbb\x7f\xc8\x74\x6b\x7f\xcd\xa3\x2c\
\x92\x27\xc7\x76\x7a\x46\xd0\xed\x38\xbd\xfd\xfb\xde\x31\x0e\xd7\
\x2a\xbf\x66\x8f\x46\x75\xd9\xf7\x89\xe7\xf5\x0c\x1f\x1f\xf9\x01\
\x0a\xc6\x9f\x30\xac\xcb\x1b\x55\xe6\x6a\x6e\x73\x35\x4f\xfe\xe6\
\xc9\x72\x7d\x3b\xf3\xf4\x37\x4f\x13\xef\xba\xa7\xcb\xbf\xba\x9a\
\x93\x5d\xcd\x1b\x5d\xcd\xee\x54\x37\x10\x55\x3b\xfb\x45\xb4\x15\
\xc2\x36\x37\x33\x6d\x2f\x89\x16\x48\x2e\x1f\xdd\xe1\x3c\x11\xcf\
\x81\x14\xee\xe3\xfb\x67\xb5\x80\x02\xfa\xe5\x4b\xed\xcc\xa3\x2c\
\x3a\xa9\x01\xfe\x26\x75\x96\xad\xdd\x1b\x6d\x8b\x64\xa3\x52\x38\
\x29\xe1\xcf\x73\x5a\x2d\x22\x68\x05\xa3\xe3\xac\xb1\xd3\x94\xce\
\x37\x81\x52\xd1\x9b\x1c\xb2\x54\x0d\x0f\x6b\x4d\xbf\x2d\xfc\x61\
\xcd\x7a\x8d\xa3\x39\xf7\xdf\x4e\x92\x47\x3b\x95\xb1\xd7\x3d\x5d\
\xcc\xfa\x67\xca\x37\x1c\xcd\x69\x8e\xe6\xb7\x71\x9e\x64\xfa\x44\
\xd9\x64\xc8\x35\x85\xf2\xca\x24\x36\x89\x32\xfd\x87\xc2\x78\xf0\
\xbe\xad\xce\x71\x40\xc5\xee\x16\xae\xec\x7b\x93\xb9\x67\x8f\x1c\
\xa5\x3f\xee\x3c\xbd\x53\x79\xd5\x11\x02\xe8\xe1\xf2\x31\xc0\x2b\
\x6b\x32\x23\xca\xd8\x28\xad\x66\x1f\xdb\x9b\x3b\x59\x66\xce\xce\
\xce\xc1\xe3\xd4\xa9\xfd\xc3\x02\xd9\xe6\x99\x60\x2a\xf3\xf8\x3f\
\x99\x88\x8d\x2d\xec\x0e\x69\xb3\xc8\x26\xc6\x84\xa1\x9a\x9d\x4a\
\xc5\xd5\x7c\xc3\xd1\xbc\xec\xff\x9e\x28\xf3\xa2\xeb\xef\x86\x67\
\xca\x23\xae\xe6\x59\xae\xe6\x63\xae\xc6\xaf\x69\xd8\xcb\xf8\x4b\
\xe9\xfc\xce\x86\x9f\x85\x3c\xb3\x28\xee\x2c\x17\xfe\x26\x06\x77\
\x72\x4a\x3d\x62\xaf\x78\x13\x0f\x0d\xad\x29\xeb\x60\x53\x65\xb9\
\xb8\x53\x79\xd4\x29\x0d\xf0\xbe\xd4\xb3\x8b\x6c\xfc\x22\xb3\xa8\
\x57\xee\x44\xd1\x97\x6d\x95\xad\x99\x8b\xdf\x36\x6d\x34\x4c\x55\
\x98\xc8\xab\xcd\xe3\xf0\x26\x39\x0d\x49\xd6\x6f\xd6\xb1\x3f\xa6\
\x38\x3a\x23\x40\x37\xba\x9a\x77\xfc\xeb\x09\x62\x9a\xc9\x80\x76\
\x61\xc3\x33\xc5\xbf\xe9\xd9\x72\x89\x13\x10\xe1\x91\x54\x0b\x79\
\x4e\xc8\xb8\x10\xb9\x66\xa8\x65\x02\x25\xad\xc3\xec\xdb\xa6\x45\
\x41\xba\x21\xd0\x29\xbe\xa7\xf4\xca\xd3\xf5\x27\x9d\x01\x3a\x80\
\xb6\x13\x20\x5c\x88\xfa\x8d\xd9\x4c\xa8\x11\x72\x28\x26\xc4\x90\
\xa8\x9a\xd9\xd7\x72\x7c\x81\x2c\x11\xc6\x27\x03\x7f\xac\x66\xde\
\x4e\xcb\xce\xcf\xd3\x08\xca\x22\x8d\x52\x20\xc2\xa3\x4a\x33\x60\
\xd5\xa2\xdb\x5c\xcd\x85\x5f\x3f\x5e\x26\x3b\x51\x88\xcd\xe0\xe6\
\x67\xcb\x6f\x1d\xcd\x69\x8e\xe2\xc7\x79\xc1\x82\x78\x4e\xa3\x54\
\xbe\x17\x68\x81\x4c\x54\x48\x32\xe5\x36\xb1\xcb\x39\xd6\xc0\x58\
\x6a\x0d\x34\xbb\x5d\x2d\x7b\x0f\x87\xb2\x88\x7c\xa8\x13\xf9\xd2\
\x09\x0d\x70\x21\xf6\xec\x0e\x8d\x22\x3d\x79\x84\x18\x92\x75\x79\
\xb5\x7c\x8d\x19\x93\xf1\x05\xf6\x8d\xd3\x1f\xcf\xcb\x9f\x43\x02\
\x7b\xed\x5e\xbb\x06\x4b\x45\x35\x14\x4c\x19\x76\x59\xad\xcc\x9e\
\xa3\x79\xd5\x55\xc7\xb5\x36\xd2\x33\x17\xb8\xe5\x39\xb2\xdb\xd1\
\x5c\xe8\x68\x7e\x92\x27\xfc\x11\x29\x0a\x91\xad\xac\x32\x4e\xb1\
\x2d\x4d\x53\x13\xea\x51\x20\x59\x06\x56\xd7\xbf\xa7\xea\xa1\x23\
\xe3\x05\x3a\x41\x80\xd7\xa7\x3e\x3e\xde\x4b\x1d\xa1\x17\x3b\xa3\
\xef\xc5\x65\x99\x5d\xeb\xe7\xf5\xbf\xc9\x23\xc7\x64\x95\xa1\x78\
\xde\x1e\xbb\x86\x0f\x9f\x13\xd7\xf4\x58\xf7\x23\x63\x16\x01\xfb\
\x34\xcb\x2d\xd3\xe1\x8b\x57\x1d\x27\xbf\xe9\x40\x3e\xce\x08\xb7\
\x3d\x57\xa6\xb4\xe2\x42\x47\xf1\xcb\x94\xe0\x5b\x9a\x33\xa5\x05\
\x6c\xe4\x69\x69\xdb\x07\x50\x89\x19\x34\xbe\x43\xbb\xf6\x62\x20\
\x35\x95\x58\x16\x3d\xb2\x5a\x7f\xcc\x79\x46\xbb\xf3\xa3\xad\x04\
\x08\x3b\xbd\xa5\x87\xc6\x65\x6b\xfc\xbc\x34\xfb\xb7\x7e\x79\xbc\
\x9e\x79\xa2\x73\xf6\x11\x09\xaa\x1e\xe5\x6c\x64\xc7\xae\xe5\xb3\
\xda\xc1\x76\xe4\x2c\xbf\x6f\xbb\x51\xac\x0e\x9f\xb9\xd7\xd1\x7c\
\xba\xcd\x65\x36\x6b\xdc\xfe\x3c\x99\xd0\x9a\xf3\xb5\xe2\xb7\xd9\
\x76\x92\x28\xec\x9b\x94\x4f\x9d\x88\x90\xb5\x65\xcd\xa0\xa9\xbd\
\xce\x91\x61\x77\xa5\x9a\x4e\x79\x85\xf7\xe9\x91\x4b\xda\x9d\x17\
\xed\xd6\x00\xaf\x06\x6b\x71\xea\x22\xe1\x4f\x09\x7d\x26\xfa\x30\
\x24\x83\xcd\x98\x3f\x39\x03\x43\xa6\xa2\x85\xe5\xac\x58\x7e\xaa\
\xc3\x98\x35\x87\x4f\x7a\x08\x82\x55\xfb\x8b\xb0\xdd\x32\x1b\x3e\
\xff\xcf\xc7\xca\x7e\x33\x0d\xa0\x8d\x3b\x9e\x2f\x63\x8e\xe6\xc5\
\x8e\x66\x83\xbd\x8e\x59\xdc\x72\x5b\x84\x54\x59\x59\x21\x51\xbb\
\x96\x20\x88\x06\xf9\x3e\x4f\xd8\xda\xb5\x68\xc1\xbf\xf8\xbf\x7d\
\x9c\xa3\xbf\xe5\xb4\xb7\x52\x6e\xe7\xc3\x28\x32\x7f\xb2\xb5\x4c\
\xf6\xf7\x24\x93\xa6\x18\x94\x13\x54\x81\xa0\x2b\x55\x87\x08\xc2\
\xbe\xa8\x80\x6d\x73\x27\x55\xf3\x43\x6d\xd7\x65\x15\xf9\x69\xc1\
\xf5\xa2\xa8\x84\x51\xa5\xed\x5a\xf3\xc5\x36\xe7\xdf\x9c\xe2\xae\
\x17\xc8\xa8\x56\xbc\xd0\x51\xdc\x96\x0a\x24\xd4\xeb\x22\x91\x35\
\x49\x93\xb2\x49\xd7\x1a\x40\x65\x52\xed\xb2\x7f\x56\xa9\x7b\x64\
\x13\x01\x2d\x3d\xdc\xc5\xcb\xda\x99\x07\x6d\x23\x40\xb8\x86\xd7\
\x9f\xa4\x12\xeb\x99\x3d\x50\x1b\x75\x70\xb8\x4b\x29\xfa\x75\x1d\
\x61\x57\x05\x64\x10\x61\x22\x63\xca\xa4\x56\x6b\x0c\xfb\xf6\xc4\
\xe9\x90\x7f\x5f\x24\x8c\xa7\x2b\x2e\xbf\xf2\x68\x19\x6d\x67\x61\
\xb5\x02\xf7\xbe\x48\x76\x6b\xc5\x6b\xb4\x62\x22\xdb\x78\x58\x83\
\x7a\x1a\xdb\x8e\x06\x85\x52\x35\xb5\x5b\x97\xa2\x08\x50\x61\xed\
\x6f\xdf\x43\x81\x94\xe5\x0d\xed\xfc\xfe\x76\x6a\x80\x97\xd4\x7c\
\x74\xf6\x58\xd5\x39\x06\x18\x90\xdd\x79\x43\x0d\x8b\x04\xdf\x4e\
\x33\x86\xa9\xec\x6d\x53\xe7\x19\x5b\xdf\xb6\xfd\x6d\x3b\xd6\xd7\
\xe8\xd0\xec\xfa\x5e\x1b\xf3\xae\xa5\x78\xe0\x25\x72\xb7\xa3\xf8\
\xdb\xac\x59\x59\x88\x3c\x73\x35\xe7\xbc\xb2\xd7\x39\x52\xac\xe1\
\x00\x75\x11\xfe\x47\x95\x39\xa7\x9d\xdf\xde\x4e\x02\x3c\x2b\xfb\
\xb1\x35\xe6\x4f\x5e\xa4\xc0\x4e\x5b\x20\x4b\xf3\xc2\x94\x76\xbe\
\xe7\x91\x41\x01\xc6\xe0\xa5\x1e\x6b\x39\x66\xaa\x40\xd8\xf3\xca\
\xd6\x87\xb2\xa3\x79\xf0\x1f\xd7\xca\x1d\x6d\xcc\xbb\x96\x43\x2b\
\x2e\xd7\x8a\x5f\x66\x35\x6a\xae\x19\x64\x67\x08\x92\x2e\x47\x92\
\xff\x88\xc1\x15\xc3\x58\x61\x44\x29\x2f\xad\x24\xcb\xf5\xc7\x9c\
\xc3\xda\xf6\xdd\x6d\x79\xc8\x70\x59\x01\xe9\x10\x97\x6a\x70\x9c\
\x6d\x7a\x07\x28\xcb\x52\x9d\x15\xfe\x1c\x61\xcf\x33\x75\xc4\xe0\
\xa3\xc8\xaf\xfd\x49\x54\x74\xd6\xfe\xb7\x1d\x38\x05\xe0\x30\xa8\
\x15\xd7\xb4\x23\xdf\xda\x89\x07\xcf\x13\x51\x8a\xb7\x68\xcd\xde\
\x94\xc9\x97\x45\x9c\xc1\x75\xa2\x43\x56\xe0\xc2\x37\xec\x4d\xfd\
\xb7\xf0\x7e\xf1\xa6\x44\xcb\x3b\xda\xf5\xdd\xed\xd2\x00\xa7\x00\
\xc9\x9c\x30\xcd\x9a\x3c\xd9\x6b\x1d\x96\xd4\x08\x3f\x75\xc8\x60\
\x6d\xae\x6b\xad\x2b\x40\x50\x7e\x92\x7d\x94\xa2\xf0\x15\x23\x33\
\xc9\x77\x58\xa2\x15\xff\xd9\xa6\x7c\x6b\x2b\x1e\xbd\x40\x1e\x51\
\xf0\x97\x71\x97\x0f\xd5\xe0\x0f\x79\x99\x94\x51\x9d\xbe\xa7\xc6\
\x52\xd3\x31\x36\x82\x02\x55\xe6\xbc\x76\x7d\x73\xbb\x08\xf0\xac\
\xbc\x0f\x8d\x73\xa4\x19\x7f\x00\x76\x2b\x45\x29\xbb\xd2\x4a\x76\
\xa5\xc5\x3c\x33\x48\x01\x25\x97\x52\x2a\x0a\x91\xfd\x5f\xe6\x55\
\xb2\xa3\x02\x11\x50\xc2\x14\x81\x13\xde\xd6\x6e\xce\xed\xc4\xe3\
\x17\xca\x55\x5a\xf1\x43\x3b\x24\x5c\x17\x99\xcc\xcf\xb6\x07\x78\
\x53\xaa\x2a\xcd\xdc\xc7\xbe\x57\x99\x93\xda\xf5\xbd\xed\x27\x40\
\x23\xd3\xa7\xe8\x58\xb3\x13\x5b\xf8\xf3\x8e\xad\x42\xcb\xda\xf4\
\x5a\xd3\x1b\xcd\xbc\x90\x07\x51\xf9\xee\x48\xea\x35\x82\xf1\xae\
\xbb\xfe\xe1\xc8\xf9\xdb\xe7\x67\x2e\xa0\x14\x1f\x4d\xcd\x63\xd4\
\xc8\x86\xaf\x23\xe0\xfe\x84\x8a\xd7\x2b\x96\x7a\xf7\xb0\xd3\x5c\
\xe9\xd1\x97\xb4\xa7\x8b\x74\xcb\x09\x30\x6d\xfb\x3f\xe5\x58\x59\
\xe9\x9a\x51\x5b\xa0\xb1\x8e\x6b\x2c\x29\xcb\x76\x8f\xd2\x1c\x87\
\xfe\x78\x40\x78\x34\x2e\xd5\x3a\x4c\x29\x1b\x6b\x0c\x78\x64\xfa\
\x84\xd7\x94\x1d\xc5\x03\xad\xce\xb3\x4e\x63\xdb\xcb\xe5\x56\xa5\
\xf8\x69\x4d\x0b\x6e\x9e\x23\x0c\xa4\x34\x79\xd6\x04\x9a\x54\x83\
\xb9\xc2\x9f\xbd\x4f\x86\x0c\xa2\xa5\x2d\xdd\x22\xda\xa1\x01\x8e\
\xc4\xb6\xff\xed\x8f\x2e\xca\x80\x7c\x0d\x30\x51\xb3\xbe\x6e\x36\
\x92\x63\xa5\x65\x23\x3c\x8e\x62\x30\x9a\xb1\xd8\x9e\x7d\x39\xf6\
\xd7\xac\x59\x99\xc3\x1e\x9f\xc9\x2b\x58\xc7\xae\xcf\xfd\x6d\xc8\
\xb3\x8e\x43\xc1\x65\x0d\x5b\x6f\x6b\xf6\xb5\x62\x2e\xbe\xea\x81\
\x64\x96\xbb\x46\x0f\x8d\x37\xd7\x9a\x1f\xb6\x85\x68\x07\x01\xd6\
\xa5\xce\xa6\x2b\xf8\x11\x5c\xa9\x40\xad\x90\xe7\x99\x3d\xf6\xb1\
\x4a\xee\xe5\x3a\x3a\x11\x7a\x92\x5d\x34\xfd\x65\xfc\x3f\xb1\x2a\
\xb4\x6c\x99\xb9\xc1\x34\x25\x07\x3c\x76\xbe\x4a\x7e\xa1\xe0\xa6\
\x5c\xf9\xcf\xab\xf9\xa3\x7d\x46\x03\x88\xa7\xfa\x1a\x6a\x80\xbc\
\x47\x38\x39\x2b\x67\xb6\x00\xed\x27\x00\x14\xc6\x8d\x0b\x8f\x83\
\x1a\xc1\xa4\x2a\x1b\x55\x7b\xcb\xf8\xd6\x19\x13\x88\x30\xad\xaf\
\x14\xf4\x07\x12\x49\x16\x74\x8b\x85\x5f\x12\xe1\xb7\x97\xd9\x85\
\x74\xe1\x95\x7c\x0e\x68\xfb\xdf\x86\x82\xcf\x34\x6c\xc1\x2d\x44\
\x90\x6b\x62\xe8\xaf\xe7\x7b\x15\xc2\xad\xb3\x66\xf2\x1c\xa2\xbd\
\x04\x68\x2a\x33\x25\x9f\x04\x6e\xdc\x5b\x21\x4e\xce\x8b\x9a\xa2\
\x32\x69\x96\x99\x54\x72\xd8\x15\x4f\x3c\xab\x92\xa9\x6f\x22\x22\
\x98\x3c\x89\xcf\x9a\xbd\x8a\xa5\x6d\xc8\xb3\x79\x01\xa5\xf8\x2e\
\xcc\xde\xe7\x11\xf0\x4d\x11\x01\xf2\xa2\x0d\x81\x23\xbc\x50\xff\
\x95\xd3\xf2\x89\xdb\xda\x41\x80\x63\x0b\x3f\x38\x37\xd4\x52\xf0\
\x5b\x49\x7a\xf2\xae\xcf\x8b\xe3\x17\xdc\x66\x4b\xa5\xc2\x03\x71\
\xad\x9f\xb5\x49\xc3\x63\x63\x4d\xde\x94\x55\xdd\xe1\xc4\x69\xc7\
\xb4\x21\xcf\xe6\x05\xf6\xbe\x5a\x7c\xe0\xab\x4d\x5d\x5c\x27\x52\
\x64\x0c\xde\xb4\xe7\x5f\x53\x68\x16\x64\xfa\x8e\xb5\x00\x9d\x33\
\x81\xf2\x8e\xeb\xff\x56\x3f\x9a\xdc\x38\xd6\xfc\x13\x57\x71\x67\
\xb4\xec\x68\xbc\x2c\x96\x35\xff\x7e\x54\x48\xf6\x64\x69\x90\x36\
\x87\x94\x70\x54\x1b\xf2\x6c\xde\x40\xc1\x7f\x4f\xe7\xe2\xd4\x3e\
\x84\x08\xaa\x29\x07\x38\x73\x2e\x8e\xb4\x3c\x14\xda\x52\x02\xe8\
\xe1\x72\x0f\xb0\xa6\xe1\x07\x37\xd3\xe2\x68\xa8\xce\xf2\x75\x7e\
\xac\xe1\x86\x68\x8d\x2d\xf1\x03\x3f\xc0\xd8\x1a\x81\x20\x2d\xb5\
\x38\x75\xa4\x0d\x92\x1c\x5b\xf1\x67\x9b\x83\xc8\xc6\xc1\x80\xc9\
\xd7\xcb\x1d\xc0\xe3\x85\x17\x34\xd1\xc8\x25\x50\x32\xcd\x68\x80\
\xec\xbd\x1c\x4e\x6b\xf5\xf7\xb5\x5a\x03\xac\x49\x3d\xa3\xd9\xda\
\x3e\x2f\x53\x7d\x55\xbf\x57\x61\xfd\x0c\x36\xc0\xcf\xdc\x7e\xbe\
\x05\x18\x13\x69\x00\xa9\xd5\x00\xf6\xc2\x14\xf1\x9a\x11\xa4\xae\
\xeb\x5b\x36\x41\xc7\xe6\xb1\xe9\x10\x8a\x97\x39\x95\xfa\x69\xca\
\x41\xc4\x34\x43\x93\xe8\x0f\xc4\xe5\xaf\x35\x8b\x5a\xfd\x61\xad\
\x26\xc0\xc2\x19\xff\x33\xab\x4e\x0d\xa6\x76\x45\xf4\x8c\xa9\x92\
\x09\x6f\x5a\xc7\xbf\xfb\xf7\x93\x64\xe7\xad\x2f\x96\xb1\x1e\xcd\
\x63\xc6\x12\x70\x3f\x53\xdb\xdb\xab\xb0\x8b\xed\x0f\x90\x90\x43\
\xc3\xf3\x5a\x9c\x6f\xf3\x0d\xff\xaf\xa9\xab\xf2\xc8\xe0\xca\xa4\
\x3f\xc3\xf9\x97\x45\xb1\xa0\xd5\x1f\xd6\x6a\x02\x0c\xd6\xa4\x64\
\x1b\x4c\x9a\x35\x85\x24\x69\xc4\x8d\xf6\xb9\x7d\xac\xb2\x24\x08\
\x84\xf7\xc7\xd1\x69\x8f\xe6\xfa\x68\x59\x21\xdf\x32\x77\xa2\xa5\
\x45\xe3\x49\x91\x8d\xe5\x27\x90\x08\x7f\x78\x7e\x76\x8b\xf3\x6d\
\xbe\xa1\x31\x01\x8a\x34\x81\x2b\x93\x33\x9a\x29\x29\x68\xfd\x6f\
\xf9\x5c\x41\xad\x26\x40\x3e\x83\x0b\x05\x5d\x8a\x7f\xf3\x93\x96\
\x83\xbc\xb8\x72\x8a\x18\x56\x14\x27\x44\xec\xc8\x95\x1c\xfe\x23\
\xbb\xc8\x74\xe4\x0b\x18\x13\xae\xb8\x9e\xf1\x0d\x22\x13\x28\x7a\
\xae\x81\x73\x2e\x7d\x50\x1d\x34\xe1\x50\xf3\x26\xd9\x04\xdc\x35\
\xbd\x7f\x85\x05\xe9\x48\x75\xc6\x33\x85\xa9\xfd\x9f\x00\x83\xb3\
\xfa\xb7\x4d\x06\x2f\x69\x52\xcf\x9a\x24\x11\xf2\x22\x37\x02\x15\
\x84\x1b\xa3\x74\xe7\x54\xae\x51\x8a\x8a\x6f\x92\x45\xa7\x3d\xdb\
\x27\xa0\x76\x51\x6a\xdb\x37\x30\x80\x2f\xf4\x88\xe2\xbd\x2d\xce\
\xbb\xf9\x86\x5f\xd6\xa4\x14\x75\x73\xb6\xcf\x5d\x66\x33\x4d\x64\
\x5f\xab\x3f\xaa\x33\x1a\x20\x8b\x66\xa2\x40\x3e\x8b\x25\xdf\xbc\
\x49\x47\x6b\x24\x63\x1e\x09\x5b\xbf\x73\x5a\xa2\x84\x6f\x3a\x46\
\xcc\x80\xc3\xef\xa2\x55\xd6\xe3\x95\x16\x4d\x5a\xe8\x63\x0d\x21\
\x89\x9f\x10\x87\x47\x83\xf3\x57\xb4\x38\xef\xe6\x1b\x36\xc6\x47\
\x71\x06\x67\xe3\x9d\xaa\x86\x14\xe2\xc8\xf8\x8c\x9f\xa8\xe9\x6d\
\xf5\x47\xb5\x8f\x00\xcd\xc7\x01\xf2\xaf\x15\x0e\x8b\xe5\x3c\x53\
\x2b\x5b\xb5\x7d\x9e\x09\xb4\x25\x7b\xab\x3e\x97\xcf\x65\x85\x3f\
\xde\x32\x24\xb0\x35\x4e\x64\x22\x19\xc0\x83\x75\x7f\xff\xb0\x3a\
\xb9\xc5\xf9\x37\x9f\xb0\x95\xbc\x8e\xfd\x8d\x06\xbb\x38\x32\x73\
\x19\x53\xb4\x3c\xdc\xdc\x39\x13\x68\x3a\x84\x08\xd0\x4f\x95\xc7\
\x73\x43\x95\x39\xad\xbb\x16\x21\xb6\x66\x6f\x74\xf3\x85\xf2\x83\
\xb2\x62\x57\x8a\x04\x96\x63\xec\xdb\x35\xbf\x09\x4c\x24\xdf\xa4\
\xcd\x23\x09\xd2\x3e\xd0\xe2\xfc\x9b\x4f\xd8\x56\x93\x12\x0b\xbf\
\x4a\xce\xed\x0d\xc0\x91\x99\xaf\x0e\xaf\x29\xb5\xfa\xa3\x5a\x4d\
\x80\x39\xbd\xbf\x54\xd4\xd6\x6c\x8c\xde\xee\xae\x10\xc7\xed\xd3\
\x84\xd8\x92\x77\xaf\x7e\x97\x6b\x7c\x3f\x10\x6e\xcf\x80\xef\x67\
\x34\x40\xb8\xd9\x1a\x21\x36\x85\xc2\xb4\xaa\xf0\xaa\x4b\x1f\x53\
\x6d\xe9\xb4\x35\x0f\x90\xae\x48\x24\x7f\xaf\x6c\x33\x28\x48\x9e\
\x39\x01\x64\x06\xd5\xe4\x34\xd1\x6a\x02\xcc\xed\x3c\xf9\x53\x8c\
\xd6\x08\x39\xb5\x2d\xb7\xd2\x04\x01\x7a\x1d\x2e\x35\x82\xf1\xfd\
\x50\xf8\x25\xbd\xa8\x5c\x6a\x35\xf6\x2c\x31\x12\xff\xa0\xd7\xaf\
\xf2\x95\x16\xe7\xe1\x7c\x41\x8d\x26\xad\xeb\xfc\x0a\xa8\x1e\x53\
\x61\x36\x2b\x91\x4a\xeb\x7b\xde\xb6\x9a\x00\xc5\x13\x47\xcd\xa4\
\x71\x64\x42\x95\xa5\x58\xd0\x53\xda\xd7\x8a\xe1\x6f\xcd\xbb\xd5\
\xef\x5e\x26\x0f\x0c\xb9\xdc\xe0\x19\x4b\x0b\xd8\xab\x2c\x66\xcc\
\x23\xe3\x5b\x1a\xc0\x04\x0b\x23\x86\x5a\xe0\xfc\x8f\x3f\xa6\x4e\
\x6d\x71\x3e\xce\x07\x3c\x41\xdc\x1a\x43\x61\xc8\x2d\x55\x00\xfd\
\x66\xcb\xb4\x9e\x90\x85\x61\xe6\x0e\x74\x93\x68\x9f\x06\x98\x8e\
\xc0\x17\x5d\x5b\xe5\x18\xbb\xe6\x8f\x1d\x55\x32\xe7\xe9\x96\xdd\
\xc2\x42\x18\xe8\xe1\xcf\x7c\x83\xf1\xc2\xa5\x43\xe3\xe5\x45\x25\
\x38\xb7\x1d\xe3\xd8\x3f\xb0\xb4\x41\x48\x08\xc7\xab\xf2\xbf\x5a\
\x9c\x8f\x1d\x87\x79\xb3\x78\xc0\x0e\x20\x89\x00\xa5\xc2\x6d\xf6\
\x50\xbb\xd0\x72\xe9\x9d\xc5\x1a\xbc\x01\x89\x5a\xbe\xd2\x4e\xab\
\xfb\x5b\x37\x37\x75\xa0\xd0\xa8\x15\x38\xfa\x7d\xa9\x4c\xf1\xa0\
\x71\x58\x9b\xd7\x7d\xc1\x3e\xb7\xd2\xf6\x16\xdd\x76\xc3\x05\x72\
\xfb\x29\xdf\x51\xd7\xef\xf3\x38\xd7\x73\xc2\x75\x74\x0d\x68\x3f\
\x98\xfb\xdf\x93\x60\xdc\x8e\xf6\x33\x33\x49\xeb\x80\x00\x4a\x05\
\x84\x98\x82\xa7\x7d\xec\x21\xf5\x55\xad\xb9\xc7\xd1\x3c\x52\x52\
\x3c\xec\x68\x36\xbe\x77\xe5\xfe\x39\x71\x6e\x1d\x8c\x02\x4b\x8b\
\x05\x9f\x34\x29\xb4\x2c\x9c\xd5\xd3\x0c\x7b\x5a\xfd\x41\xf3\x83\
\x00\x45\xc8\x21\x86\x8c\xab\x4d\xa6\x4f\xd6\xda\x82\x6e\xa4\xb0\
\xf6\xc7\x08\xcb\xeb\x3d\x62\xc0\xe5\x5d\x7b\x2a\xfc\xc1\xf3\xd1\
\x9e\x4a\xa6\x08\xaf\xfa\x81\x7a\x74\x54\x30\xc7\xb7\x36\x09\x01\
\x54\xb8\x30\x74\xdc\x69\x4b\xc1\x94\xe1\x5d\xd1\xf5\x55\x15\x64\
\xec\xa7\xb6\xa8\xaa\x56\xec\xd5\x9a\xdd\x8e\xc3\x4e\xa5\x78\xc2\
\x51\x6c\x55\x8a\x2d\xc0\xe6\xc7\x47\xd9\xf9\xf8\x5e\x1e\xfd\xdf\
\x27\xca\xb5\x2d\x2e\x87\xb9\x42\x92\x97\x29\x12\x04\x9b\xb2\xc9\
\xa0\xa8\x08\x0c\x4d\xfb\x09\xd6\x7d\x95\xb0\xab\xd5\x1f\xd4\x6a\
\x02\xe4\xab\xb0\xc2\x1a\x3f\x1c\x8d\x5e\x4f\x1b\x4c\x28\x57\x44\
\x0a\x05\x3f\x87\x14\x2b\xeb\xbd\xe0\xfa\x97\xc9\x1d\xa7\x7e\x47\
\xfd\x72\xd4\xe7\x39\x55\x03\x8e\x1f\xce\x90\x6c\xc2\xd9\x92\x8d\
\x35\xf7\x90\x3d\xfd\x8a\x06\x65\x82\x74\x4f\xa5\xbe\x20\xf8\x04\
\x01\xd1\x94\xc4\x61\x89\x18\x96\x18\x38\x5a\xab\x60\xb1\xf5\x1d\
\x13\xf0\xdd\xdb\xe1\xbe\x27\x82\xff\xfc\xfc\x3e\x35\x75\xc6\x61\
\xfc\xdd\xf7\xcf\x94\xcf\xb4\xb8\x3c\x66\x0c\x7d\x95\x5e\x80\x30\
\x90\x32\x71\xa4\x78\x53\x0b\xfc\xc7\x04\xd6\x4e\xeb\x21\x19\x83\
\xc9\xf8\xa1\xc9\xd5\xca\xef\x6a\xf1\xfd\x6b\xcd\x8f\x6c\x0b\x62\
\xd6\x99\x2a\x42\xf4\xbf\x2a\x27\x19\xc3\x94\x2d\xf8\xd9\xd6\xdb\
\x0c\x29\xea\x12\x00\x60\x41\x99\x8b\xb4\x30\xee\xf9\x81\x09\x54\
\xf5\x83\xcd\x0b\x8f\x53\xfe\x80\x49\xc2\xa6\xb6\xb3\xec\x49\x8e\
\xaf\x60\x02\xe7\xd9\xee\x67\x34\x56\x81\x2b\xd7\x27\xc2\x0f\xb0\
\x75\x1f\x3d\xbf\x7d\x88\xcb\x5e\x7a\xa3\xfa\x66\x8b\xcb\x63\x36\
\x48\xf2\xd1\x6e\x09\x8e\x08\x61\xdb\xff\x02\x6a\xd0\xef\x99\x96\
\x03\x90\x17\x51\x92\x9c\xb6\x87\x39\x46\xab\x09\xf0\xc8\x8c\xff\
\x59\xdc\xc2\x38\x64\xc6\xd4\xad\x59\x41\xf7\x33\xfe\x40\xb3\x1a\
\x00\xe0\xba\x0b\x64\xe7\x92\x5e\x3e\x54\xf1\x12\x67\x38\x22\x41\
\x35\x24\x85\x67\x6d\xbe\x4d\x0a\x6b\xef\xe5\x91\x25\xbc\x5f\xe4\
\x34\x3f\xb4\x03\x76\x5a\xb1\x0d\x57\x43\x5f\x29\x58\x65\xfe\xce\
\x6d\x5c\xf4\xfa\x3f\xa8\x7f\x6c\x71\x99\xcc\x14\x2b\x6b\xca\xa5\
\x68\x53\x88\x51\xac\x98\xf5\x13\xfd\xfc\x08\xde\x5c\xa2\xa5\x04\
\x30\x23\x95\x31\x60\x53\x9c\xd0\x20\x6e\x5c\x78\x5d\x16\x7b\x95\
\xd8\x8d\x53\x51\x78\xb2\x80\x04\x0d\x09\x00\xf0\xbb\x97\xc9\x97\
\x17\x94\xb8\xbd\x62\xa0\xe2\x25\xc2\x5e\x0d\x85\xb8\x12\xb5\x17\
\x58\x84\xf0\xa3\xeb\x32\x42\xef\x9b\x74\x9b\x42\xa4\x2d\x8c\x0f\
\x8f\x59\x3a\x51\x2b\xe8\x2d\x05\x5b\x5f\x09\xca\x2e\xdc\xb7\x83\
\xe1\xb7\xdf\xa9\x3e\xd9\xca\x72\x99\x11\x84\x95\xb5\xb5\x3d\x71\
\xc8\x53\x99\x64\x96\x01\x7d\x88\xff\x08\xf6\x4a\x40\xf5\xef\x5b\
\x7b\x1e\xf9\x00\x46\xb5\x7c\x0a\x9a\x76\x8c\x09\xbe\xb7\xe1\x87\
\x37\x12\xf8\xac\x36\xf0\x38\xdd\xf8\x8c\xa6\x3a\xaf\xe5\x74\x62\
\x0b\xd3\x9b\x22\x00\xc0\x21\x3d\x5c\x88\xa1\x52\x0d\x05\xbe\x12\
\x0a\x7b\xc5\x58\xa6\x51\x28\xd0\x55\xcb\x5c\x8a\x6a\xf9\x1a\x32\
\x64\x8e\x23\x42\x40\x3c\x4d\x0b\x7d\xa5\x84\x04\x3d\x6e\xb0\x7e\
\xef\xc3\xa3\xfc\xed\x9f\xdf\xa7\xfe\xba\x0d\x65\x33\x1d\xac\x4c\
\x47\x79\x72\xcc\x9f\x88\x04\x03\x7e\x7f\xea\x9f\x33\x09\x86\x06\
\xf7\x6d\xf9\xca\x9b\xed\x20\xc0\x7d\xb5\x1f\x57\x70\xdc\xe8\xb7\
\x04\xbd\x66\x54\xdd\x56\x23\xfc\x36\x09\x92\x98\x7d\xd3\x04\xb8\
\xee\x02\x79\xe0\xd0\x7e\xfe\xba\xea\x23\xb1\xf9\xe3\x25\xa6\x50\
\xc5\x80\xe7\x25\x24\xf0\x2c\x13\xc7\x0b\xcd\xa7\xd8\x27\xc8\x98\
\x40\xd1\xb5\x67\xae\x82\x5e\x37\xdc\x22\xe1\x77\x83\xda\xbf\xec\
\x42\xb9\x14\x38\xe1\x5b\xc7\xf9\xec\x7b\x1f\x50\x6f\x6d\x43\xf9\
\x34\x8b\x95\xb9\xe6\x8e\x5d\xfb\x0b\x28\x97\x29\x91\x39\x9a\x3a\
\xa6\xca\xc3\xad\xfe\xa8\xf6\x6b\x80\x99\x0a\x7f\x56\x0b\xec\x53\
\x43\xb6\xe9\x13\x37\x58\x65\xba\x32\x18\x61\xc1\xb9\xd7\xaa\xa6\
\x07\x56\xac\x7f\xa9\x7c\x65\x69\x0f\xdf\xa9\x78\x89\x16\xa8\x58\
\x64\x98\xca\x0a\x7d\x78\x1e\x6b\x02\x2f\xed\x0b\xc4\xa6\x52\x98\
\xe6\x2a\x78\xd6\xda\xda\x9a\x3f\xbb\x69\x8d\xda\x51\xe5\x5f\xde\
\xff\x80\x7a\x69\x1b\xca\xa8\x31\x84\x95\x85\x31\x7f\xab\xe9\x5d\
\x2d\xf2\x1e\x95\xda\x59\x44\x9b\x7d\x46\xb2\x79\x54\xcc\x65\xfe\
\xbd\x33\xb9\xcd\x74\xd0\x5e\x02\x34\x8c\xf2\x40\xac\x5a\xf3\xfe\
\x63\xab\x5e\xc3\xa9\x66\x42\xfd\xd1\xcf\xd6\xfa\x99\x48\x50\x48\
\x8c\x33\xa7\xf3\xc2\x37\xbf\x42\x2e\x1a\x74\xb9\xaf\x12\xd6\xfe\
\x15\x0f\xa6\x42\x0d\x50\x8d\x8e\xbd\x8c\xb3\x9c\x71\x9c\x23\x52\
\x44\x5a\x20\xf2\x1f\x3c\x03\xa7\x2e\x87\x67\x1d\x09\xfd\xe5\x7c\
\xe1\x2f\x3b\x50\x72\xc0\xd1\xa8\x55\x2e\xdf\xbb\x76\x8b\xfa\xf3\
\x36\x94\x53\x7d\x08\x67\xa6\x22\x3d\x86\xc0\xe4\x89\xfb\x9c\x84\
\xe5\xd2\x63\xa6\xdf\xf9\xcd\x6e\x53\x88\x50\x69\x7d\xed\x0f\x9d\
\xf2\x01\xec\x58\x72\x74\x5e\xef\x38\x8f\x38\x02\xb2\x4b\x8d\x67\
\xbb\x28\xc4\x1a\x20\xad\x09\x2e\x98\xee\x4b\x2f\xe9\xe3\xe9\x2e\
\x8c\x4e\x85\x24\xf0\x3c\x98\xf2\x12\x12\x54\xfc\xe0\xdc\xcb\xd4\
\xfc\x95\xc8\x6c\xf2\x42\x73\xc9\xb3\xa2\x48\x7e\x12\x2d\x3a\x6a\
\x21\x3c\x6d\x15\x0c\x94\xa0\xa7\x54\x2b\xfc\xe5\x70\x9b\xd4\xe8\
\xb3\x4b\x7c\x65\xfd\x16\x75\x51\x1b\xca\x2a\x17\xfa\x6b\xfa\x68\
\x8c\x3a\x21\xd7\xfc\x91\x94\xf3\x7b\x97\x48\x66\x26\x87\x19\xd9\
\xff\x20\x15\x36\xb4\xe5\xdb\xda\xf0\x8c\x07\x28\xea\x13\x24\x34\
\x16\xfe\xec\xb9\x9d\xee\x71\xb6\x99\xe2\xfe\xd4\xe8\x2e\x93\x89\
\x0c\x05\x44\x98\x36\x01\x7e\x73\x9e\x6c\x5d\x39\xc0\x33\x34\x8c\
\x4d\x45\xb5\x7e\x28\xf4\x15\x4b\xd0\x2b\xe1\x16\x13\xc1\xda\x2a\
\x19\x6d\x90\x75\x9a\x17\xf5\xc0\x93\x96\xc1\x60\x39\x23\xfc\xd6\
\xf1\x3e\x05\xbd\x0a\x4e\x29\xf1\xcd\xdf\x6f\x51\xcf\x69\x43\x79\
\xd5\xc2\x70\x5e\x36\xce\x8f\x15\xf5\x89\x34\x81\x1a\xf4\xa7\xd7\
\x35\xbc\x28\x02\x24\x0a\x55\x51\x3f\x6a\xc7\xa7\xb5\x9c\x00\x66\
\xa4\xe2\x01\xd7\xd5\xfd\xf8\x66\x89\x50\x4b\x02\x65\x76\xea\xad\
\x45\x5a\xc0\x1a\xf1\x75\xec\x93\x7f\xa5\xa6\x3d\xdb\xf0\xb5\xe7\
\xcb\xad\xab\x06\xf9\x53\x05\x13\x53\x96\x43\x1c\xf9\x07\x53\xc6\
\x3a\xb6\x49\x61\x0b\x7e\x46\x1b\xd8\x04\xa9\x1a\x28\x29\x38\x7a\
\x20\x20\x81\x5d\xf3\x97\xdc\xa0\x8d\xa0\xaa\xa0\x02\x0c\x28\x38\
\xa6\xcc\xcf\xae\x79\x44\x1d\xdd\xea\x32\xab\x2d\x2b\x75\x7e\xae\
\xbd\x6f\x99\x40\xce\x62\xff\x31\x23\x0c\xe5\x59\x33\xd3\x7b\x16\
\x20\x18\x3c\x7e\xd0\x8e\x4f\x6b\xd7\x0a\x31\xbf\xaa\xf9\xc8\xa2\
\xe3\x54\x88\xad\xe8\x7a\xcb\x4f\xa8\xf2\x27\xfe\x14\x0f\x9a\x1c\
\xe1\xcf\x0c\x7b\x9c\xb6\x16\x00\xb8\xf6\x3c\xf9\xfd\xea\x01\x9e\
\xad\x0c\x93\x93\xa1\x06\x98\x8a\x34\x81\x97\xec\xa3\xe3\x6a\xe8\
\x1c\x57\xbc\x4c\x04\x29\x32\x91\xbc\x84\x28\x91\x49\x84\xc0\x12\
\x07\x06\x9c\xa4\xe6\x2f\xe9\x20\x24\xea\x3a\x70\x8d\x81\xaf\x09\
\x5c\xa9\x50\xb7\xf5\x70\xcb\x07\xef\x55\xfd\x33\xf9\x96\x99\x40\
\xff\xb3\x33\x84\xf0\x8c\x42\xdb\x3f\x3a\xee\xf7\x67\x36\xf0\x25\
\x6b\x56\x01\x4c\xb1\xc5\x5c\xe6\x4f\xb4\xe5\xfb\xda\xf1\x10\x6c\
\x02\x4c\xa7\xa6\xb7\x8f\x8b\xd2\x04\xd7\x6c\xd7\xbb\xa2\x61\x8b\
\x71\xf8\x31\xab\x11\x66\xe0\x07\x44\xb8\xf6\x7c\x59\xbf\x7a\x80\
\xe7\x1a\x9f\xea\x54\x35\x31\x83\x62\x32\x58\x84\xa8\x54\x83\x73\
\xcf\x04\xd7\x44\x42\x5f\xb5\xf6\xd5\x2c\x19\xfc\x20\x52\x54\xf6\
\xa0\x24\x01\x01\x5c\x9d\x6c\x77\x2a\xd8\xaa\x83\xfe\x47\x7d\xfd\
\x2c\x18\x3a\x84\x47\x2e\xb9\x41\x95\xdb\x52\x72\xc2\x0b\x10\x4a\
\xe9\x78\x7f\x68\xfb\x87\x24\x70\x97\x7a\x0f\x18\xc9\x99\xc1\xa1\
\xa9\xa0\x47\x6d\xba\xaa\x72\x5b\x5b\xbe\x8d\xf6\x11\xe0\x46\x60\
\xac\x36\x03\xac\x4e\x55\xd9\x8c\xc9\xd3\x00\x45\x5a\xc1\xe7\x4c\
\x7f\x9f\xda\x90\xad\xfd\x33\x24\x78\xea\x93\x7e\x31\xf3\xb9\x7c\
\xae\xbd\x40\xae\x5b\xd5\xcf\xa7\xa6\x3c\x98\xaa\x86\x9b\x97\x6c\
\x95\xec\x71\x35\x6d\x0e\xd9\x1a\xc1\xd6\x02\xd9\xad\x32\x06\xde\
\x44\x50\xf3\x47\x44\x70\xec\x4d\xc1\x82\x41\x96\xf4\xac\xe5\xc1\
\xb6\x94\x9c\xe1\xfc\xa4\xb6\x27\xe8\x1a\x6b\xd5\xfe\x4a\x98\x94\
\x5e\xb3\x3a\x5b\x3c\xd3\x86\x65\xff\x9b\x49\x7e\xd6\x96\x6f\xa3\
\x4d\x04\xa8\xf1\x03\x8a\x04\xba\x51\x6d\x9f\x4a\x4b\x9b\x49\xb2\
\x4b\xad\xf2\x7c\x26\x53\xb5\x7f\xfa\xd8\xf1\x0d\x2f\x9e\xcd\x77\
\xac\x7f\x99\x5c\xbc\xac\x8f\xdb\x2b\x3e\x4c\x7a\x69\x02\xc4\x9b\
\x9f\x1c\x4f\x84\x44\x88\x35\x80\x1f\x68\x88\x6a\xa8\x29\x2a\x16\
\x31\x2a\x96\x86\x18\x1b\x85\x3d\xbb\x02\x61\x77\x9d\x60\xaf\xa3\
\xae\xda\x21\x11\x16\x1d\xc2\xea\x4f\x6c\x51\xf7\xb4\xb2\xdc\xf4\
\x95\x8e\x83\xf0\xe2\x94\xe3\x1b\xd9\xfe\x3e\x41\xed\xbf\xa2\x7a\
\xbf\x48\xce\xf4\x25\xd3\x61\x42\xc6\x04\x52\x95\xf6\x75\x0a\x6c\
\xe7\x4a\xf1\xbf\xaa\xf9\xe8\x7a\xc7\x39\x73\xcc\x34\xf8\xcf\xe1\
\xfe\x2e\xb5\xde\xab\x15\x7c\xfb\xf8\x5d\xb3\xfd\x88\x43\x7a\x78\
\x61\x8f\xcb\xa4\xe7\xc3\xa4\xa5\x05\x22\xff\x60\xd2\xda\x22\xad\
\x30\x69\x11\x21\xd6\x08\x96\x06\x98\xca\xf8\x0b\x53\x1e\xec\xd9\
\x0b\x8f\x6d\x06\x31\x89\xd0\xeb\x70\x8b\xba\x64\x2f\x5f\xc2\xba\
\x8f\x6f\x56\xbf\x6f\x59\x89\x09\xaf\x44\xd4\x92\xd8\xf1\xf5\x6d\
\xc7\x57\xe1\xf4\x9a\x6d\xbe\x23\x27\x4e\xbb\xe6\xcf\xab\xf4\xa2\
\xf3\x29\x76\x9b\xcf\xf8\x6d\x5b\x86\xaa\x9d\x04\xf8\x49\x7e\x06\
\x64\xfa\x96\x17\x65\x50\x33\x5a\x63\x5c\x3d\xd5\x9f\xe2\xde\x6c\
\xef\x4c\x6b\xb8\xe3\x53\x4f\xfe\xa9\x9a\xd5\x22\xcc\xbf\x39\x5f\
\x36\xaf\x1c\x08\x66\x85\x33\x12\x90\x60\x32\x34\x89\x62\xad\x60\
\x13\x23\x24\xc2\xa4\x6d\x3a\x55\x43\x5f\xc1\x4b\xb7\x32\x47\x3e\
\x44\x44\x90\xdd\xfb\xe0\xee\x07\x83\xb4\xc8\xfc\xd1\xd6\xa6\x14\
\xac\x3e\x94\xb3\x2e\x79\x54\x35\x37\x79\xed\x34\xa0\x47\x1c\x17\
\xc3\xc7\x6b\x9c\xdd\xc4\xf4\x31\x6a\xb1\xd7\x8f\x24\x32\x34\x17\
\xe6\x0f\x93\xdc\x3d\xd7\xdf\x52\xf7\x3b\xdb\xf5\x20\x33\x52\xb9\
\x19\x48\x54\x76\xb3\x76\x7f\xdd\xdf\x6b\xc8\xd3\x63\xb6\x6b\xd7\
\xf3\x19\xb7\xbb\x2f\xa7\xba\x32\x0b\x9f\x38\xe5\x67\x4d\x2d\x01\
\x5d\x88\x1b\x2e\x90\xaf\x2e\xef\xe7\x37\x71\x20\xca\x87\x89\x2a\
\x4c\x56\x60\xdf\x14\xec\xad\xc2\xee\x4a\x90\x36\x55\x4d\x34\xc5\
\xa4\x97\xd6\x12\x15\x8b\x30\xd1\x6f\x76\xf7\x8b\x8a\x0f\xa3\x13\
\x70\xcb\x3d\xb0\x77\x5f\xa8\x01\xac\x41\x39\x11\x11\x8e\x5c\xc5\
\x73\x3e\xf2\x88\xfa\xf6\x9c\x16\x98\xf0\x56\x44\x1d\x8b\x10\xd8\
\xfd\x76\xed\xef\x43\x79\x45\x75\x93\x21\x58\x79\x53\x6a\xff\xdb\
\xcc\xfd\x73\x23\x40\x32\xc9\xbf\xcd\xe9\x77\x34\x40\x3b\x35\x00\
\xc0\xff\xcd\xcd\x04\x28\x10\x72\x55\x93\x41\x85\xd7\x46\xe7\x86\
\xb5\xde\x76\x7d\x53\xd6\x14\xb2\x48\x70\xaa\x67\x78\xf5\x6c\x3f\
\x64\x41\x2f\xe7\xf7\xbb\xc1\x34\x2d\x10\x74\xbf\xde\xed\xc3\x23\
\x15\x78\x68\x02\x1e\xaf\xc0\x36\x81\xb1\x60\xb8\x64\xca\x34\x9a\
\x0c\x4d\xa6\x89\xd0\x4f\xb0\xfd\x89\x49\x4b\x5b\x44\xc7\xa3\x93\
\x70\xc3\x5d\xb0\xf9\x89\xb4\xe0\xc7\x4b\x42\x29\x38\xfa\x30\x5e\
\xf9\xfe\x07\xe6\x66\x2c\x81\xfe\x47\xa7\x17\xa3\x2e\x8e\x84\x3d\
\x15\xf3\xf7\x15\xa5\x21\xff\x61\x4f\xcb\xe1\x76\xde\x37\x5d\xfb\
\xd7\x89\xfe\x50\x61\x5c\x1d\xab\xae\x98\x8b\x6f\x68\xfa\x5b\xdb\
\xf9\x30\xb0\xd8\x5d\x64\x06\xc5\x69\xe4\xa7\xd5\x5c\x93\x43\x92\
\x0a\x4f\xf7\xf6\x25\xfe\x40\xd5\xee\x99\x19\xf4\xcf\xb9\xf4\xc4\
\x9f\xa8\x59\x0d\x07\xfd\xed\x4b\x64\xef\xf2\x7e\xde\xa2\x54\x30\
\x19\xdd\x98\x90\xb0\x81\x30\x86\xaf\x41\x95\xa1\x34\x04\xa5\xfe\
\xa0\x22\x8d\xb5\x41\x35\xed\x2c\x4f\x58\xc2\x3f\x51\xad\x75\xb2\
\xc7\x27\xe1\xb7\x77\xc0\x9d\x0f\xe5\xac\x46\x1a\x12\xe2\x84\x35\
\x0c\xbf\xf7\x3e\xf5\x89\x59\x97\x92\x51\x7f\x8e\xb0\xba\xc6\xf4\
\xf1\x15\xda\x91\x7d\xb2\xc0\x5f\x99\x57\x2f\xa5\xca\xab\x59\x58\
\xe6\x8f\x8c\xf1\x13\xf3\xd6\xa8\xc3\x78\x7b\xd0\x56\x02\x98\x91\
\xca\x03\xc0\xfa\xd4\xc7\xdb\x99\x90\x9b\xa6\x1a\x68\x09\x6a\xb5\
\x80\x80\xec\x51\xc7\x57\x2b\x3c\x14\x35\x36\x45\x43\x1c\xc3\x3e\
\x39\xc7\x7a\x86\x59\x77\x35\x5e\xff\x52\xf9\xde\xf2\x3e\x7e\x20\
\x90\x9a\xc1\xa9\x64\x35\x68\x45\x44\x58\x30\x08\x87\xad\x82\x55\
\x2b\xa0\x54\x0e\x85\xdd\x32\x8f\xa6\xaa\x69\xc1\x8f\xc9\x90\x39\
\xbe\xe1\x5e\xf8\xcd\x1d\xc1\x73\xec\xe5\x62\x15\xe0\xba\x70\xd2\
\x51\x7c\xe8\xdd\x77\xaa\x19\xcf\x5c\xad\xbf\xea\x0e\x21\xfc\x2d\
\x3e\x89\xe9\xe3\xc7\x51\x1f\x29\x2d\xaf\x8e\x1b\xa1\x3c\xab\xda\
\x5f\x72\x37\x51\x13\xea\xa3\xb3\x2d\x93\x69\x7f\x6f\xbb\x1f\x48\
\x9e\x19\x64\x67\x4a\xbd\x8c\x82\xe6\xb5\x80\x70\x88\xd9\xa6\x9d\
\xaa\xc7\x13\x76\x7f\xfc\xa8\x0b\x82\x67\xf8\xbb\x75\x3f\x9e\xfd\
\x5a\x5f\x03\x83\x5c\x34\xe8\xf2\x84\x6f\xa5\xc5\xad\xb9\x51\x8b\
\xae\xd5\xaa\x7b\xc8\x02\x38\x69\x2d\x3c\x69\x1d\x2c\x59\x08\x13\
\x7e\xc6\x34\xaa\x5a\x5b\x81\x86\xb8\xf5\x21\xb8\x7a\x43\x10\x72\
\x0d\xbf\x3e\xde\x97\x5d\x38\xed\x68\x3e\xff\xf6\x3f\xa8\x37\xcd\
\xe8\x83\x0c\xef\xc3\xb0\x24\x15\xf2\x0c\x35\x41\xef\xca\xea\x83\
\x5e\x38\x33\xc4\xdc\xd6\xfe\xc0\x38\x0f\x9a\xcb\xfc\x3b\x66\x5b\
\x1e\xd3\x45\x27\x08\xf0\x4d\xc0\x8b\x33\x20\xde\xab\x62\x81\xcf\
\x17\xf0\xc6\xe6\x91\x70\xb8\xbf\x4d\x6f\xaf\x7a\x8c\xc5\x63\x7d\
\x93\xfe\xfb\x87\x79\x86\x0f\xcd\xf6\x63\xae\x7f\x81\x54\x97\xf6\
\xf2\x4a\xd7\x9a\x22\x25\x12\xf6\xb0\x4b\x73\xac\x05\x52\x71\xfc\
\x01\x38\xe3\x58\x78\xd1\x59\x70\xec\xea\xa0\xb1\x6e\x32\xd4\x02\
\x13\x55\x18\xf7\x60\x22\x74\xa4\xe3\x7d\x35\xb9\xe6\xae\x4d\x70\
\xd5\x2f\x61\x34\x1c\x5f\x6c\xbb\xf5\xfd\x3d\x70\xd4\x2a\xfe\xf7\
\x19\x57\xab\x93\xa6\xf3\x2d\xfa\x0a\xf7\x58\x0c\x7f\x55\x53\xf3\
\xfb\x8a\x9e\xe5\xd5\x07\xab\x4a\x8e\x16\xbb\xdc\x98\xbe\xcc\xe7\
\xd7\xfe\x0a\x19\x63\x64\xb6\x65\x31\x13\x28\x99\xc5\xe4\x5d\x33\
\x85\x1e\x2e\x7f\x1b\x78\x65\xf0\x06\x04\x34\x54\x80\x96\xcc\xb9\
\xb5\xcf\xfe\x96\x3d\xb6\xff\x9f\xfd\xdd\xe1\xc6\xd2\x0a\x73\xba\
\xab\x71\x4b\x56\x0b\xab\xeb\x20\x25\xcd\x05\x0f\x9e\x27\x3f\x9c\
\xed\x37\xad\xfa\xa6\xfa\xe9\x96\x29\x9e\xd7\xe3\xc2\x82\x5e\x18\
\xea\x81\xc1\xde\xa0\xcf\xff\x40\xb8\xf5\xda\x43\x20\xdd\xa4\xd7\
\x67\x39\xf4\x46\xee\x7e\x2c\xa8\xdd\xf7\x86\x6d\xe6\x4a\x27\x2b\
\x4a\xd5\x58\x79\x12\x6c\x83\x7d\xf0\x86\xa7\xc3\x91\xcb\x82\xdf\
\x1e\xdf\x0d\xbf\xf8\x23\xdc\xbc\x11\x06\x4a\xdc\xb6\xf1\x35\x72\
\x7a\x53\x65\x72\x85\x3b\x88\x61\x03\xbe\x3a\xb1\x46\xf8\x97\x78\
\x0f\x7b\x65\xb3\x86\xe8\xb9\xe4\x68\x80\x46\x62\x94\xad\x9c\xc2\
\x79\x2b\xf1\x81\x8a\x9a\x64\xa7\x1a\x32\x9f\xf4\x67\xbb\x12\xe8\
\xb4\xd1\x29\x02\x9c\x05\x24\x0d\x38\x29\x81\x97\x50\xe0\xc9\x21\
\x83\xd4\xa6\xd7\x90\x41\xf2\x7f\x2f\x71\x5d\x69\xb9\x79\x8a\xab\
\x71\x22\xdb\x3c\xec\x70\xb6\xc7\xd5\x3c\x79\xe3\x79\x32\xeb\xd1\
\x47\xcb\xfe\x5d\xdd\x54\x51\x9c\x31\xd8\x17\x10\x60\xa0\x27\x10\
\xfc\xfe\x70\xb3\x85\xbf\xc7\x1e\x06\xe9\x26\x9d\xdf\x94\x82\xfb\
\x1f\x87\xf5\xf7\xc2\x63\x3b\x20\x5b\x3c\xd9\xd2\x8a\xce\x97\x84\
\x13\xd1\x6f\x1f\x4d\x5f\xb3\x62\x90\x53\xef\x7c\x85\xdc\x5e\xb7\
\x3c\xae\x70\x15\x86\xef\xe0\xab\x97\xc7\xe1\xce\x48\xf8\x0f\xf1\
\x1e\xf5\x7b\xcd\xe1\xb1\xd0\x4b\xf2\xdc\x69\x13\x20\xbb\x85\x8e\
\xb5\xda\xc5\x8f\xfc\x4b\xcc\xac\xda\x67\x66\x8a\x4e\x98\x40\x98\
\x91\xca\x8d\xc0\x2f\x52\x99\x93\xdd\x4f\x67\xa3\xc1\xb9\x00\x1e\
\x4f\xab\x6e\xd3\xbf\xf3\x0c\xd5\x54\x1f\x7d\x9f\x85\x55\x9f\xef\
\x1f\xf9\x43\x35\xeb\x15\x09\x9f\x78\xad\x9c\x79\xa8\xcb\xd9\x03\
\xb0\x41\x1b\xa6\x22\x73\xc7\x75\xd2\x7d\x79\xec\x56\x5d\x3b\xa4\
\xa9\x54\xa0\x99\x4e\x3a\x1c\xde\xf9\x5c\x78\xc7\x73\x82\x63\xa5\
\x6a\x05\x2e\xf5\xa9\x12\x08\xbe\x2d\xfc\x91\x9c\x4a\xb9\x89\x3e\
\x43\x3e\x1f\x8a\x85\xdf\xae\xf9\x87\xfc\x7b\xfc\x3e\x73\x58\xfc\
\xac\xd9\x08\x7f\x5e\x5a\xa8\x05\x64\x4c\x5d\x3c\xdb\xbc\x9f\x29\
\x3a\xa2\x01\x00\xf4\x70\xf9\x05\xd8\xab\x90\xd7\x33\x77\x9a\xd5\
\x02\xf6\x35\xb9\xda\x81\xc0\x1c\x5a\x6e\x4e\x72\x1d\xfa\x5c\xbb\
\xd7\xa5\xc3\xd5\xae\xe6\x15\x0f\x9f\x3f\x77\x19\xf2\xd4\xff\x52\
\x67\xe9\x32\x97\xf5\xf7\xf3\xd4\x25\x43\xf4\xf5\x97\xad\xc1\xf0\
\x0e\x9c\x5a\x82\xa3\x1c\xf8\x55\xe4\x23\xd8\x9d\xdf\xac\x78\xff\
\xae\x7d\x70\xed\x5d\xb0\xe1\xc1\xa0\xc5\x38\x42\xbd\xf5\xd1\x00\
\x7a\x1c\x76\x6c\x79\x83\xd4\xed\x00\xa8\x2f\x77\x5f\x84\xaf\x7e\
\x88\x41\x27\xd1\x1e\x45\xcf\x42\xff\x36\xd3\xe7\x9f\x62\x82\xd2\
\x88\xcd\xb0\x59\x9b\x3e\x99\xda\x9f\x7d\x3c\x6c\x3e\x6c\x8e\x9c\
\xab\x3c\x9f\x2e\x3a\x46\x00\x00\x3d\x5c\xbe\x05\x38\x3d\x78\x13\
\x9a\xf3\x05\x9a\x32\x83\x48\x93\x20\x7b\x9d\xc3\x6d\xee\x32\x73\
\xb4\xeb\x32\xe8\x5a\x4e\xaa\xab\xf9\xc8\xe6\x97\xc9\xec\xe3\xe8\
\x39\x78\xee\x2f\xd4\xba\x72\x99\xcf\xae\x1c\xe2\xd9\x17\x2d\x67\
\xf0\x9c\x72\xb0\x7e\x94\x00\xff\xe0\x82\x67\xbd\x47\x24\xf8\x4e\
\x46\x3b\x8c\x4f\xc1\xf5\xf7\xc2\xb5\x77\x07\x2d\xc4\x50\xd0\x3e\
\x18\x1e\x2c\xee\xe3\x03\x0f\xbe\x5a\x3e\x5b\x98\xff\x5f\x76\x8f\
\xc1\xa8\xdf\x63\x38\xc4\x16\xfe\xde\xa5\xde\x46\xcf\x35\x47\xa5\
\x64\x57\x6a\x15\x75\xed\x49\x0e\xea\xd9\xfe\x46\x21\x5b\xd4\xfb\
\xe5\xd3\xfe\xe7\x5a\x91\xe7\xcd\xa0\xd3\x04\x78\x2d\x76\xe3\x58\
\x23\x41\x2f\x22\x48\xae\x36\x90\x62\x2d\x11\xa4\xdf\xeb\x2c\x31\
\xbd\xa5\x32\x47\x38\x09\x01\x8c\xab\x79\xf9\xe3\x17\x4a\x4b\x47\
\x23\xdd\xb0\x51\x9d\xb3\x53\xb8\xfc\xb8\x7e\x4e\x5b\x5b\xc6\xfd\
\x2f\x07\xee\x73\x13\x6d\xa4\x33\x24\x00\xab\xd5\x97\xa0\x51\xef\
\x77\x0f\xc0\xf7\x7e\x1f\x68\x84\x14\x09\xc2\x93\x3e\x97\x47\x1f\
\x7f\x83\x1c\x51\x98\xf7\x5f\x2e\x2d\xc2\xe7\x5a\x0c\x27\x29\x3f\
\x0e\x79\x9a\xde\x15\x95\x27\xaa\x70\x68\x56\xf8\xe3\x63\xfb\x26\
\x33\xa9\xfd\xed\xbe\x45\x63\x3c\x6a\x3e\x64\x8e\xa0\x83\xe8\x88\
\x0f\x60\xe1\x5b\x04\x63\x86\x03\xc8\x34\xf6\x8d\x6c\xfe\x46\x13\
\xb8\x1a\xd6\xf9\xdb\xf5\x21\x53\xfb\xd4\x06\xcb\x1f\xd0\x15\x9f\
\xef\x1e\xfa\x3d\xf5\x67\xad\xfc\xe8\xa7\x1e\x25\xd7\xbf\x64\xad\
\x9c\x75\xcc\x0a\x29\xfd\x7c\x2f\xab\x6f\x7c\x8c\x6f\xdc\xbf\x99\
\xad\xe3\x53\x48\x34\x11\x6f\xdc\xca\x9b\xb3\x77\x1d\x38\x67\x1d\
\xbc\xfd\x4f\xd3\xb5\xbe\x15\xa1\x91\xbe\x32\xef\x2c\x7a\xbe\xfe\
\x52\xe9\x18\x3c\xd6\x2b\x5f\x9d\x14\xd5\xfc\x8e\xc3\xae\x9e\x55\
\x95\xb1\x39\x13\xfe\x3c\xa4\xf3\x5f\xd8\xa5\xde\xde\xca\x7c\x6e\
\x06\x1d\xd5\x00\x00\x7a\xb8\xfc\x32\xe0\xea\x24\x81\xe6\xb4\x40\
\x23\x5f\xa0\xc8\x1f\xa8\xdd\x84\x1e\xf9\x4d\x69\xb1\x9c\xeb\x6a\
\x1c\x47\xc7\x76\xf8\x97\x1c\xcd\x5f\x6f\xb9\x70\x46\xeb\x9c\xcf\
\x08\x1f\xd8\xa2\x9c\xc7\xb7\x70\xc9\xe2\x01\xde\x7c\xf8\x72\x0e\
\x5b\xb2\x20\xa8\xf8\xed\xda\x1f\x95\x9e\x3c\xfb\x93\xdf\x0f\x42\
\x9f\x90\xc8\xe4\x60\x89\xf5\x9b\x5e\x2f\x4f\xcd\xcd\xef\x2f\x96\
\x9e\x8e\xe1\x6a\xe5\xab\x25\x51\x4d\xdc\x33\xe0\xdf\x27\x0b\xfc\
\x23\x8d\x04\x8b\xd2\xd9\x4e\xef\x8c\xec\x7e\xfb\x9a\xa2\xda\x7f\
\x37\xbf\x35\x17\x9b\xa7\xb7\x2b\x6f\x8b\xd0\x71\x02\x00\xe8\xe1\
\xf2\x4f\x80\xe7\x07\x6f\x44\x7d\x21\x4f\x99\x3d\x75\xcc\x9c\x22\
\x7f\xa0\x68\x73\xb8\xcd\x59\x6c\x16\xb9\x25\x8e\xb0\x46\x61\x5d\
\xe3\x68\x5e\xbb\xed\xe5\x32\xd6\xc4\x67\xcc\x39\xde\x78\xab\xfa\
\xcb\xc5\x03\xfc\xf9\xea\x65\xac\x5d\xb1\x30\x98\x6b\x33\xdb\x8f\
\xf5\xdf\xaf\x87\xeb\xc2\x00\xae\x04\x9f\xed\x2d\x2a\xb3\x6e\xe3\
\xeb\x64\x63\x4d\x3e\x7f\xa1\xf4\x46\xe5\xab\x7f\xc6\x50\x0e\x87\
\x35\x56\xfb\x96\x57\xef\xaf\x6a\x39\x5e\x24\x6e\x72\x48\x84\x5f\
\xa8\xe2\x33\x21\x46\x55\x44\x4b\x19\x87\x1e\xa0\x4c\xa3\x35\x3e\
\xf3\x1a\x2b\x6d\xe1\xaf\x52\x65\x93\x3a\xc6\x7c\xc1\x9f\xf9\xe4\
\xc9\x73\x84\xf9\x42\x80\xe3\x81\x3f\x40\xb8\x2c\x66\x56\x0b\xd4\
\x73\x88\x1b\x3a\xc3\x34\x47\x80\x60\x9b\x50\x7d\xb2\xc1\x5d\x28\
\xe7\xba\x1a\x37\x24\xc1\xad\x8e\xe2\xbc\xed\xaf\x94\x4d\x8d\xbf\
\xa4\x75\x78\xed\x4d\xea\x75\x8b\x16\xf0\xc1\x23\x96\x72\xc2\xaa\
\xc5\x44\x0d\xcf\xfc\xfa\x2e\xf8\x96\x35\x83\xce\x82\x1e\xfe\xcf\
\xa6\xd7\xca\x9b\x53\xf9\xfb\x85\x92\xc2\xf0\x31\x65\xd4\x47\x23\
\x41\xec\x59\x60\x36\xaa\x41\x7f\xb9\x6f\xe8\xa5\xaa\xb6\x98\x2a\
\x63\xc6\xc3\x13\x4f\x39\xe2\x33\x80\x61\x29\x42\x6f\x6c\x4e\x46\
\xce\xab\xb0\x07\xd8\x8a\x66\x0a\x57\xfa\xe8\x93\x7e\x06\x64\x09\
\xda\x5a\xd3\xb7\xc8\xf1\x0d\x23\x3f\xf2\x84\xba\x52\x3e\xe1\x0f\
\x77\x32\x3f\x23\xcc\x0b\x02\x00\xe8\xe1\xf2\xe7\x80\x60\x42\xd8\
\xbc\xc6\xad\x66\x4c\xa1\x7a\xe6\x50\xf3\x24\x00\xcd\x7d\x7a\xa1\
\x4c\xb9\xbd\x72\x72\x18\xb7\xdf\xec\x68\x2e\xd8\xf5\x2a\xb9\xa9\
\xd3\xf9\x04\xf0\xea\x9b\xd4\x0b\x16\xf6\xf3\xb1\xc3\x96\x72\x7a\
\xc5\xa3\xe7\xf2\x9f\x84\xad\xc2\x65\xae\x79\xfc\xf5\x92\x1a\xfc\
\xaf\x3f\x5f\xea\x53\x46\x7d\x0d\xc3\x6b\x30\xe0\x38\xec\xeb\x5d\
\x56\x1d\xad\x56\x95\xf2\x26\xd4\x4e\xbf\xc2\x51\x08\x7d\xc5\xed\
\x2c\x2a\x2d\xc4\x79\xe1\x4c\xa1\x8a\x56\x37\xcb\x90\xe9\x53\x0b\
\xcd\x3a\x9c\x88\x38\xd4\x9a\x3e\x13\xec\x66\xb7\x5a\x6e\x3e\xdd\
\xfe\x56\xdf\x3c\xcc\x27\x02\x0c\x11\x0c\x98\x09\xe6\x95\xcf\x8b\
\xfa\x34\x32\x85\xea\x12\x80\xe6\x49\x10\x3c\xdf\x50\xe2\x7a\x67\
\xa1\x59\xe3\x96\x38\xdc\xd1\x78\x5a\x71\xa5\xa3\xb9\x78\xd7\xab\
\x64\x7b\xa7\xf3\x2b\xc2\xf3\x7e\xa5\x4e\xbe\xe7\x71\x56\x21\x6c\
\x7f\xe4\x75\x72\x73\x9c\x9f\x9f\x2f\x29\x65\xd4\x6b\x31\x7c\x0a\
\xc3\x11\x18\x55\x71\x07\xfd\x3f\x88\x92\x7e\xaf\xc2\x2a\x84\x43\
\x1a\x37\x3c\xaa\x3c\x61\x2f\x48\x8b\xb5\x84\xc1\x65\x23\x4b\xfc\
\x1e\x86\xe4\xb0\xec\x80\x7a\x79\x5c\x0d\xcb\x67\xfc\x2b\x3b\x9d\
\x6f\x11\xe6\x0d\x01\x00\xf4\x70\xf9\x4d\xc0\xd7\x93\x04\x1a\x9b\
\x42\x79\xce\xee\x6c\x49\x40\xea\xdc\xa3\xc4\x7a\x67\x28\x20\x82\
\x56\xec\x71\x34\x9f\xd4\x8a\x2f\xef\xbe\x48\xa6\x3a\x9d\x67\x79\
\x70\x3e\x5b\x3e\x17\xc3\x17\x30\x9c\x8d\xa8\x29\x71\xcc\x6f\x29\
\x99\x35\x08\xc7\x34\xd5\x92\x9e\x27\xfc\x45\x24\xb0\xe7\x05\x35\
\x99\x74\x87\x5b\x59\x61\x56\xd1\x67\x96\xe3\x2b\x18\xe5\x1e\xf3\
\x51\x73\x7c\xa7\xf3\xc7\xc6\xbc\x22\x00\x80\x1e\x2e\x5f\x0d\xbc\
\x2c\x78\x3b\x9a\x33\x77\xa6\x45\x00\xea\x93\x80\xc2\x34\x8f\x12\
\xeb\xf5\x02\xb3\xc2\x2d\x71\x8c\xd6\x3c\xa4\x15\x1f\xdc\xf7\x1a\
\x99\x37\xcb\x1a\x39\x9f\x29\x1f\x8d\x70\x19\x86\x57\x80\x1a\x15\
\xd7\xbf\x0b\x47\xd6\x22\xd9\x95\x1d\x0b\xb6\xf8\x37\x55\x7c\x4d\
\x0d\x01\x54\xfe\x6f\xb1\xdd\xaf\x0c\x25\xb9\x85\x65\xfe\xb1\xec\
\x52\xcf\x35\x9f\xf3\x5b\x37\x88\x7f\x06\x98\x8f\x04\x58\x0c\xdc\
\x06\x1c\x16\xbc\x21\xcd\xd9\xfb\xd3\x26\x41\xe6\x7a\x68\x8e\x10\
\x00\x9a\x3b\x55\xaf\x6c\x77\x06\xe5\x34\x47\x73\x97\x56\xfc\xcd\
\xd8\x6b\xe5\x37\x9d\xca\x33\xe7\xb2\xf2\xa1\x08\x7f\x83\xe1\xdd\
\x68\x6e\x97\x92\x99\x40\xc9\x93\x91\xd0\x31\x6d\x58\xdb\xdb\xbf\
\xab\xfa\xbf\xa7\x84\xbc\x8e\x89\x14\x69\x86\x24\x6d\x0c\xe1\x62\
\x33\x52\xf9\x7c\xa7\xf2\x29\x0f\xf3\x8e\x00\x00\x7a\xb8\xfc\x0c\
\xe0\x97\x44\x0d\x75\xcd\xd8\xfb\x8d\x7a\x83\x36\x22\x01\x34\x27\
\xfc\xe9\xe3\x49\x1c\x6e\x56\xfd\xe2\x3a\xbd\xb2\x40\x6b\x7e\xaa\
\xe1\x7b\x4a\xf1\xdb\xb1\xd7\xb6\xb6\xfd\xc0\xb9\xac\x7c\x38\xc2\
\xcb\x11\x2e\x44\xb1\x52\x5c\xb3\x1b\x47\x56\x23\xac\xce\x0d\x43\
\x36\x45\x02\x55\xff\x1a\x93\xb9\xd6\x50\x5f\xf8\xb3\xe9\x01\xbe\
\x0e\xbc\xd3\x8c\x54\x2a\xad\xcc\x9f\x66\x31\x2f\x09\x00\xa0\x87\
\xcb\x1f\x03\xfe\x2e\x78\x4b\xf2\x6b\xfa\xe9\x76\x89\x2e\x34\x7b\
\x64\x26\xc2\x9f\x4d\xf3\x50\xdc\xa5\xca\xb2\x43\xf5\x82\x2e\xcb\
\x5d\x4a\xf1\x03\xad\xf8\xc5\xc4\xeb\x64\x4e\x22\x1e\xce\xa7\xcb\
\xeb\x10\x5e\x89\x92\x73\xd1\xf4\x88\x23\x3d\x68\x39\x05\x09\xd7\
\xe3\xad\x27\xf8\xf6\x71\xae\x80\xe7\xd4\xfc\xd1\x35\x26\xe7\xda\
\x5c\x5f\x80\x62\x02\xa4\xf1\x6b\xe0\xe5\x66\xa4\xb2\x73\x2e\xf2\
\x65\x36\x98\xcf\x04\x70\x08\x32\xea\x69\xc1\x9b\xd2\x9c\x90\x4f\
\x87\x04\xf6\x7d\xeb\x99\x43\xd0\x1c\x11\xec\x3d\x4c\xa2\x79\x04\
\xc5\x16\x14\x77\x29\x2d\x9b\x28\xf1\x84\x76\xd9\x81\x23\x5b\xb5\
\xcb\xe3\xc0\xe6\xa9\xd7\x27\x8d\x6c\xce\xa7\xcb\x0e\x70\x28\x5a\
\xd6\x00\x2b\x50\x2c\x11\x25\x03\x68\x0e\x43\xc9\x89\x08\xc7\xa2\
\x58\x03\xd6\x98\xdc\x7a\xfb\xa6\x48\xd0\xc0\xe4\x69\x26\x2c\xda\
\xc8\x2f\xc8\xc7\x7d\xc0\x4b\xcc\x48\xe5\x3e\x3a\x88\x79\x4b\x00\
\x00\x3d\x5c\x3e\x82\x60\x5e\xd1\x60\xbc\x53\x8d\x40\x4b\x83\x46\
\xb0\x3a\x3e\x01\x14\x10\x43\xd2\xbf\x35\x3a\xae\xb7\xa7\xe0\x3c\
\x48\xf3\x09\xd6\x4d\x18\x25\x58\x4f\xb9\x17\x38\x04\x18\x42\x72\
\x16\x30\x97\x9c\xe3\xa2\xb4\x22\x32\xd4\x9c\xab\xda\xdf\xa7\x2b\
\xfc\x8d\x4c\xa3\xfa\xe2\xb5\x13\x78\x91\x19\xa9\xfc\xae\x29\x81\
\x68\x01\xe6\x35\x01\x00\xf4\x70\xf9\x29\x04\x83\x67\x82\xd9\x87\
\xa7\xa5\x05\x20\x77\x6c\x00\x50\x5f\x23\xc8\xcc\x85\xde\x16\xf6\
\xa2\xe3\xe9\x42\x1a\x1c\x4f\x5b\x0b\xa8\x3a\xa4\xa0\x58\xf8\xeb\
\xb6\x03\x30\x5d\xe1\x8f\xb0\x19\x38\xd3\x8c\x54\xb6\xcc\x22\x87\
\x66\x8c\x79\x4f\x00\x88\x3b\xcc\x7d\x97\x3c\xa7\x38\x4f\x0b\x14\
\x99\x43\xd3\x8b\xff\x93\xf2\x0d\xa0\x39\x42\xd4\x3b\xae\x97\x96\
\x85\x34\x48\x6b\x46\xf8\xa3\x7d\x5e\x88\xd3\xfe\xad\x11\x09\xb2\
\xd1\x9e\x5c\x02\x14\x3a\xbd\xcd\xe0\x3a\xe0\x4f\xcd\x48\xa5\xed\
\xad\xc3\x9d\xee\x0e\xdd\x14\xcc\x48\xe5\xfb\xc0\x7b\x92\x04\x1a\
\xd7\x4c\x35\x85\x33\x8d\xf0\x5e\xca\x91\x6b\x50\xf3\x99\x9c\xff\
\xd7\xb3\x8f\xa3\xb4\x46\x5b\xde\xf5\x52\xe7\x99\x4d\x85\x2c\x9b\
\x10\xe4\x9a\x77\x69\xb9\xf0\x43\xe0\xe7\x7d\xb1\x6d\x02\x65\x61\
\xbf\xd0\x00\x11\xf4\x70\xf9\xf3\x40\x32\xe9\x53\xb3\x9a\x20\xa5\
\x15\xc4\x8e\xe7\x4f\xd3\xe9\x95\x99\x9b\x40\x79\xe7\x8d\x30\x17\
\x5a\xa0\x68\xf2\xe1\xa6\x4c\x1f\xd5\x98\x58\x99\x85\x33\x66\x20\
\xfc\x36\xde\x6a\x46\x2a\x57\xcd\xea\x0e\xd3\xc4\xfe\x46\x00\x45\
\x30\xaf\xd0\xab\x82\xb7\xcf\x6e\x4d\x44\x7e\x1a\x99\x44\x14\x9c\
\xdb\xe9\x79\x44\x80\xe6\x48\xd0\x28\x5d\x9a\x48\x6f\x48\x02\x55\
\x40\x86\xcc\x71\x21\x09\x54\x03\xa1\xa7\x15\xc2\x0f\xc1\x04\x7b\
\x4f\x36\x23\x95\xdb\x67\x7d\xa7\x26\xb1\x5f\x98\x40\x11\xcc\x48\
\x45\x80\xd7\x03\xff\x01\xd4\x8f\x52\xd4\x35\x0f\x72\xcc\x81\x22\
\xd3\x23\x57\x10\x54\x62\x52\x34\x72\x0c\x0d\xd3\x33\x83\x8a\xae\
\x6b\xca\xbc\x52\xf9\x42\x59\x37\x64\x49\xad\xc9\xd3\xcc\x7b\xe5\
\xe5\xf5\xec\xd1\x0b\x5c\xd6\x6a\x39\xb2\xb1\x5f\x69\x80\x08\x7a\
\xb8\xac\x81\xaf\x82\x35\xec\x6f\xc6\x63\x00\xa4\xb1\x26\xa0\xd9\
\xbd\x34\x67\xf6\xcc\xa5\x13\x5c\x54\xdb\x67\xf7\x8d\x6a\x7f\x9f\
\x60\x49\x4a\x8f\x60\xf3\xc3\x4d\xa8\xed\x80\x18\x0d\x54\x9e\x5e\
\xb4\x67\x3a\x78\x86\x19\xa9\x5c\x3b\xe7\x77\xcd\xc1\x7e\x49\x80\
\x08\x7a\xb8\xfc\x59\xe0\x7d\x49\x02\x33\x24\x81\x75\x2d\xcc\x8c\
\x08\xb9\x69\x52\x7b\xcd\x4c\x60\x0b\x7b\xea\x9c\xe6\x84\x3f\xda\
\x67\x8f\x0d\xc1\xca\x6d\x63\x0a\xec\x7e\xad\xcd\x98\x67\x0e\xc1\
\x1a\xaf\x0e\xad\xb0\x23\xae\x33\x23\x95\x73\xe7\xfc\xae\x39\xd8\
\xaf\x09\x00\xa0\x87\xcb\x1f\x06\xfe\x3e\xf9\x22\x9a\x1f\x0e\x09\
\xc5\x84\xa1\xe0\xda\x7a\xfb\x7a\xc7\x79\xe7\x8d\x30\x5d\x27\x38\
\x2f\x2d\x4f\xf0\x27\x09\x56\xe0\x1e\x0f\xcf\x8b\xf2\x26\xef\xd9\
\xd1\xbd\x6c\xb3\xc7\x01\x7a\x42\x32\xcc\x1d\xce\x33\x23\x95\x96\
\x2f\x96\xbd\xdf\x13\x00\x40\x0f\x97\xdf\x0d\x5c\x4e\x54\x04\xd3\
\x71\x7a\x73\x49\x21\xf5\x35\xc0\x74\x1d\xdf\xd9\x6a\x00\xa8\x25\
\x43\xb3\x5a\xc0\x3e\xf6\x81\x1d\x0a\x26\xac\xef\x28\xaa\x2c\xea\
\xbd\x43\x1c\x56\x25\xd1\x24\x02\xb8\x04\x44\x98\x1b\x8d\x70\x1b\
\xf0\xa4\xd0\xef\x6b\x19\x0e\x08\x02\x00\xe8\xe1\xf2\x33\x09\x9c\
\xe3\x64\x44\xd9\x4c\xec\xfd\x46\x44\x80\xfc\xb4\x7a\xc7\x79\xe7\
\xcd\x62\x4e\x42\xa1\x04\x42\xbf\x33\x74\x70\x53\x73\xaf\x36\xc8\
\x93\xa2\x7b\x66\xdb\x18\x6c\x67\xb8\x04\xf4\xce\x05\xeb\x79\xb1\
\x19\xa9\xfc\x78\x2e\x6e\x54\x84\xfd\x2a\x0a\x54\x0f\x66\xa4\xf2\
\x6b\xe0\x0c\x20\xe8\x97\x5f\x2f\xae\xdd\x74\xa3\x93\x4a\x0a\x37\
\xaf\xb1\xa8\x5e\x84\xa5\x5e\xb4\x45\x1a\x6c\x33\x6d\x10\xcb\xdb\
\x47\xb5\xfe\x13\xe1\xb7\x3b\xd1\x26\xe0\x4a\x50\x6b\x67\x37\xc7\
\xda\x3b\x39\xbf\x39\xd9\x6b\xad\x7b\x69\xa0\x0a\x8c\xcb\x5c\x38\
\xc7\x2f\x6d\xb5\xdc\x1c\x30\x1a\x20\x82\x1e\x2e\xbb\xc0\x27\x81\
\xf7\x27\x5f\x49\x63\x6d\x90\xbd\x8e\xa2\x63\xc9\xd7\x08\x79\x7b\
\x0a\xce\xa7\x8b\x99\x9a\x3f\xbe\x82\x6d\x04\x24\xd0\x84\x0e\xab\
\x24\x1a\x20\x6f\x78\x69\xde\x77\x64\x9f\x97\x47\xc0\x6c\x18\xd5\
\x0f\xff\xdf\x37\x2b\x93\x68\x13\x70\x78\x2b\xcd\xa0\x03\x8e\x00\
\x11\xc2\xfe\x43\x57\x01\x0b\x83\x2f\xa5\xbe\x89\x33\x1d\x22\x44\
\xff\x85\xd6\x99\x42\x4d\x87\x41\xc9\x08\xbf\x65\x9b\x6f\x23\x08\
\x69\x46\x91\x1a\x47\x12\x22\x44\x82\x9f\xd7\x5f\xaa\xe8\x3d\x8b\
\xb4\xa9\x4d\x02\x3f\xda\x5b\x9a\xb3\x6f\x56\x0e\xf2\x59\x66\xa4\
\xd2\xb2\xd9\x38\x0e\x18\x13\x28\x8b\xb0\xff\xd0\x49\x44\xb3\xce\
\xe5\x9a\x16\x8a\xc2\xc6\xb3\xac\x99\x53\x63\x5e\x44\x0d\x61\x56\
\x83\x58\x91\x59\x94\x77\xaf\x66\xfb\x02\x99\x06\xef\x12\xd7\xba\
\xd6\x7b\xf8\xc0\x76\x12\xe1\x8f\xcc\x9d\xac\x69\x13\xfd\xe6\x48\
\x3a\xcd\x36\x83\xdc\xa2\xf3\x9c\xff\xa5\xfe\x2b\x09\xd1\x26\x66\
\x65\x0e\x9d\xdf\x4a\x39\x39\x60\x35\x80\x0d\x3d\x5c\x7e\x29\xf0\
\x15\xec\x71\xc6\x45\x61\xd0\x6c\x1a\x34\xd0\x04\xd6\x83\xb2\x8d\
\x62\xe4\x5c\x93\x85\xfd\x5b\xbd\xa2\xc8\x6b\xf8\xb2\xd3\xed\xfd\
\x2e\x82\xb8\x7e\x2c\xe0\x58\x5a\x80\xda\x29\xe8\xf3\xba\x8e\x14\
\xbd\x43\xa1\x09\xa4\x72\xb4\x00\xf1\x42\x7b\x28\xa0\x7f\x46\xb6\
\xe0\xcd\x66\xa4\x72\xe6\x4c\xfe\xd8\x0c\x0e\x0a\x02\x00\xe8\xe1\
\xf2\x02\x82\xf6\x82\xff\x41\xa4\xf9\x0a\x4d\x9f\x0c\x11\xa0\x3e\
\x09\xa0\xb1\x29\x44\x5e\xba\x14\xfd\x30\x73\x13\x68\x2f\x41\xc4\
\x27\x2b\xfc\xb1\xe0\x93\x98\x42\x8d\xe6\x56\x0d\xee\x29\x28\x54\
\xa1\xb3\x9e\xd5\x44\x3e\x69\x22\xf8\x80\x17\xa6\xbb\xcc\x34\x3a\
\x74\xa8\x19\xa9\x6c\x9b\xc9\x1f\x1b\xe1\xa0\x21\x40\x04\x3d\x5c\
\x3e\x1b\xb8\x02\x38\x3b\xc9\x05\xf2\x89\x10\x39\xbc\xcd\x3a\xbd\
\xed\x08\x83\xd6\x23\xc1\x3e\x82\x96\xdd\xd8\x1c\xc9\xab\xfd\x25\
\xad\x09\x22\x02\x08\x06\x5f\xdd\x8e\xe1\x57\xc0\xdd\x08\xb7\x02\
\xb7\x9a\xcf\x79\x93\xfa\x7d\xee\x20\x70\x2a\x8a\x53\x80\x27\xe3\
\xf0\x62\xb4\x04\xe1\xe6\xac\xe9\x16\xce\xfb\x1f\x13\x21\xee\x56\
\xa1\x82\xe3\x3e\x05\xd3\x5f\xa1\xf9\x29\x66\xa4\xb2\x61\xda\xff\
\x6a\x02\x07\x1d\x01\x22\xe8\xe1\xf2\x79\xc0\xc5\xc0\x59\x49\x6e\
\x50\xa7\xd6\x2f\x70\x7a\x3b\xd1\x10\x96\x77\x3c\x41\x30\xb8\x52\
\x63\xd9\xe8\x64\x34\x80\x64\x34\x01\xe0\xab\x2d\xf8\x7c\x1d\xe1\
\x52\xf3\x39\x6f\xbc\xe9\xfc\xfb\x6b\x77\x29\x8a\x0f\xe0\x30\x8c\
\x96\xa1\x1a\xd3\x27\x5e\x67\x8c\xb4\x16\x10\x60\x60\xda\x19\x72\
\x61\xe8\xd3\xcd\x39\x0e\x5a\x02\x44\xa8\x4b\x04\x28\x30\x7d\x66\
\x18\x01\x2a\x4a\xcb\x43\x23\x13\xc8\x3e\x9f\x24\x10\x7e\x85\x15\
\x97\x27\x53\xfb\xa7\x84\x5f\xf0\xd5\x06\x0c\xff\xd3\x7c\xde\x9b\
\xf5\x44\x55\xfa\x7d\xee\xe7\x70\xe4\x7f\x02\xa5\x54\x14\xc8\x26\
\x40\x44\x02\x8f\xa0\xb5\xb8\x34\xad\x47\xfc\x99\x19\xa9\xfc\xe3\
\x6c\xdf\x33\x0f\x07\x3d\x01\x22\x84\x44\xf8\x28\xf0\xe4\x38\xb1\
\x19\xe7\x57\x15\x38\xbb\x73\xa9\x05\x8a\x04\x5f\x80\x0a\x81\xe9\
\x63\x37\x72\x65\x09\x10\x09\x3f\x54\x31\xea\x6a\xe0\x2f\xcc\x17\
\xbc\x39\xb5\xa9\xf5\x7b\xdd\x41\x14\x3f\xc3\x95\xa7\x24\x42\x9f\
\xd5\x02\x04\x24\x30\x04\x5a\xa0\xf9\x3c\xb9\xd4\x8c\x54\x2e\x9e\
\xcb\xf7\x8d\xd0\x25\x40\x06\x7a\xb8\x7c\x1a\xf0\x56\x82\x71\x07\
\xc9\x02\x73\x4d\x3b\xbf\xd2\x7a\x13\x08\x02\x21\x1a\x27\xe9\xc5\
\xa9\x48\x4c\x9f\x54\xab\xad\x80\x52\xbb\x11\xae\x00\x2e\x31\x5f\
\xf2\xbc\x66\x1f\x39\xa3\xfc\x7b\xaf\xfb\x4f\xb8\xf2\xf6\x5c\x12\
\x44\x04\xf0\x80\xb2\x0a\x56\x1a\x68\x0e\xff\x64\x46\x2a\xef\x6c\
\xfa\xea\x69\xa0\x4b\x80\x02\xe8\xe1\x72\x09\x38\x0f\x78\x0b\xf0\
\x62\x6c\xd7\xad\x59\xfb\x3f\x25\xf8\x33\xe8\x1a\x2d\x99\x3f\x44\
\x45\xe5\x03\x63\x12\xec\xe3\x17\x0e\xdf\x30\xe9\x92\x30\x89\x56\
\x37\xa3\xe4\x4b\xe6\x1f\xbc\x6f\xb7\x35\xef\xfe\xca\xfd\x0b\x1c\
\xf9\x32\x06\x55\x4b\x00\x02\x12\xc0\x74\xc2\xa2\x3f\x34\x23\x95\
\x96\xb4\x07\x74\x09\xd0\x04\xf4\x70\xf9\x50\xe0\x15\xc0\xf3\x80\
\x67\x11\xcc\xdf\x13\xa0\xdd\x9d\xe1\xa6\x08\x1a\x96\xb2\x70\x81\
\x92\xec\xc6\x51\xbf\xc6\xe1\xcb\xe6\xab\xd5\x5f\x76\x34\xcf\xde\
\xe3\x7e\x04\x57\x3e\x9e\x72\x80\xb3\x5a\xa0\x79\x33\xe8\x5a\x33\
\x52\x79\x46\x2b\xde\xb3\x4b\x80\x69\x22\x9c\xb1\xee\x2c\xe0\xb9\
\xe1\x76\x0e\x59\x65\xde\x4c\x5b\x40\xf6\xf7\x7a\xc5\x60\x80\x29\
\x09\x3a\x99\xd9\xd7\x69\xaa\x28\xb6\xe0\xf0\x63\x1c\x3e\x67\xfe\
\xa5\xda\xd1\x59\xd6\x6a\xf2\xea\x3d\xee\x4f\xd0\xf2\xfc\x38\x04\
\x9a\x67\x06\x35\xe7\x0c\x5f\x63\x46\x2a\x17\x34\x75\xe5\x34\xd1\
\x25\xc0\x2c\xa1\x87\xcb\xfd\xc0\x99\xc0\x09\xc0\xf1\xd6\xb6\x86\
\xa2\xae\x26\xcd\xd4\x7a\x42\x20\x24\x55\x01\x1f\x41\xb1\x0f\xcd\
\xa3\x28\xfe\x80\xe2\x57\xc0\x7f\x9a\xab\xaa\x8f\x77\xfa\xfb\x1b\
\xe6\xcf\x5f\xb9\x9b\x10\x59\x15\x93\xc0\x36\x83\xa2\xce\x72\x8d\
\xf1\x75\x33\x52\x79\x4b\x2b\xde\x6f\xfa\x4d\x12\x5d\xa4\x60\x46\
\x2a\xe3\xc0\xb5\xe1\x16\x43\x0f\x97\xfb\x80\x75\xc0\x5a\x60\x88\
\x60\x5d\xec\x60\x93\xf8\x78\x10\xc1\x03\x46\x51\xec\xc0\x50\xc5\
\xe0\x63\x10\x44\x82\x1e\x3d\x9a\x7b\xd0\xfc\xda\x5c\x55\x6d\xdb\
\x6a\x95\x73\x0a\x9f\x57\xe0\x70\x43\xee\x00\x9c\xe6\xdd\xf1\x96\
\x4d\xa2\xdb\xd5\x00\x5d\xb4\x1c\xfa\x2f\xdd\x5b\x81\xd3\x62\x33\
\xa8\xca\x74\xfd\x80\x8f\x9a\x91\xca\xdf\x37\xbc\x6a\x26\xef\xd6\
\xe9\xcc\xe9\xe2\x20\x80\xe1\x0d\x35\xe3\x28\xa2\xad\xb9\xfa\xb7\
\x65\xcb\xa9\x76\x09\xd0\x45\xcb\x61\xfe\xc1\xfb\x23\x46\xdd\x03\
\xd4\xf6\x36\x6d\xce\xb0\xbb\xb1\x55\xef\xd6\x25\x40\x17\xed\x81\
\xf0\x9d\x74\x74\x4c\xa2\xf4\x46\x18\x05\xee\x6e\xd5\x6b\x75\x09\
\xd0\x45\xbb\xf0\xa5\x9a\x94\xe6\x4c\xa0\x9b\xcc\x48\xa5\x65\x01\
\x80\x2e\x01\xba\x68\x0b\xcc\x57\xbc\xed\x08\x4f\xd4\xfc\xd0\x58\
\x02\x5b\xba\x78\x46\x97\x00\x5d\xb4\x0f\xa2\x1e\xac\x49\x6b\x1c\
\x01\xfa\x41\x2b\x5f\xa9\x4b\x80\x2e\xda\x07\x21\x59\x05\x26\x1a\
\x8b\x5d\x5f\x02\x1f\x30\x23\x95\xeb\x5b\xf9\x4a\x5d\x02\x74\xd1\
\x4e\x3c\x9a\x9a\xad\xae\xde\xf8\xe3\x00\xdf\x68\xf5\x0b\x75\x09\
\xd0\x45\xfb\x20\x3c\x68\x1d\x37\x83\x7f\x6d\xf5\x2b\x75\x09\xd0\
\x45\x3b\xd1\x97\x1a\x54\x5f\xbf\xf6\xff\xa9\x19\xa9\x3c\xd0\xea\
\x17\xea\x12\xa0\x8b\xf6\x41\x58\x9b\x22\x80\x5b\x97\x01\x1f\x69\
\xc7\x2b\x75\x09\xd0\x45\x3b\x71\x78\x3c\x7d\x0a\xd4\xeb\x8a\x79\
\xb5\x19\xa9\xcc\x7a\xac\x72\x33\xe8\x12\xa0\x8b\xf6\x41\x58\x11\
\xd7\xfe\xd1\xac\x71\xb5\x30\x04\x63\xb3\xdb\x82\x2e\x01\xba\x68\
\x1f\x8c\x1c\x15\xcf\x1f\x54\x6c\xfe\x8c\x98\x91\xca\x1d\xed\x7a\
\xa5\x2e\x01\xba\x68\x0b\xf4\x70\xe9\x64\x0c\x0b\x30\x2a\x99\xbe\
\xa5\x16\xf7\x62\x2f\x79\xd5\x8e\xf7\xea\x74\xc6\x74\x71\x90\x40\
\x78\x7f\xb2\x92\x4c\x6e\xed\x5f\x05\x5e\x1f\x0e\x30\x6a\x1b\xba\
\x04\xe8\xa2\x3d\x30\xf2\xc2\xb8\xf6\xcf\x1f\x07\x7c\x89\x19\xa9\
\xb4\xac\xdb\x73\x11\xba\x04\xe8\xa2\xe5\xd0\xef\x28\x9d\x8f\xa7\
\x96\x23\x04\xb3\xc2\xd5\xe2\xdf\x80\x4f\x77\xe4\xdd\x3a\x99\x31\
\x5d\x1c\x24\xf0\xe5\x4a\x0c\xc1\xb2\xaa\xb5\xf2\xff\x9f\xc0\x9b\
\x5b\xd9\xe5\xb9\x1e\xba\x04\xe8\xa2\xa5\xd0\x6f\x2f\xbd\x03\x5f\
\xad\x88\x67\xae\x4b\xe3\xe7\xc0\x45\x66\xa4\xd2\xd2\xd9\xea\xea\
\xa1\x3b\x28\xbe\x8b\x96\x41\xbf\xa3\xb4\x90\x2a\x9b\x30\x0c\xd0\
\x5b\x53\xfb\xff\x08\x78\xb5\x19\xa9\x8c\x75\xf4\x1d\x3b\xf9\xf0\
\x2e\x0e\x70\x78\xdc\x84\xe4\x0a\xff\xa7\x80\x0b\x3a\x2d\xfc\xd0\
\x9d\x17\xa8\x8b\x16\x41\xbf\xad\x74\x0d\x86\xa3\x33\x2b\xc2\x8c\
\x03\x6f\x33\x23\x95\x6f\x76\xfa\xfd\xe2\xf7\xec\xf4\x0b\x74\x71\
\xe0\x41\xbf\xad\x74\x1e\xc2\x4b\x32\xc2\xff\x6b\x82\x15\x1f\xe7\
\x8d\xf0\x43\x57\x03\x74\xd1\x0a\x68\xe5\xe0\xc4\x46\xcf\x56\xe0\
\xfd\x66\xa4\xd2\xf2\xbe\xfd\x33\x41\x97\x00\x5d\xcc\x3d\x1c\xfe\
\x13\x18\x06\x56\x00\x97\x9b\x91\xca\xee\x4e\xbf\x52\x11\xba\x51\
\xa0\x2e\x0e\x6a\x74\x7d\x80\x2e\x0e\x6a\x74\x09\xd0\xc5\x41\x8d\
\x2e\x01\xba\x38\xa8\xd1\x25\x40\x17\x07\x35\xfe\x3f\x03\x96\xe2\
\x1d\xff\x99\x30\x98\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
\x00\x00\x14\x3c\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x25\x00\x00\x80\x83\
\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x6f\x92\x5f\xc5\x46\x00\x00\x00\x06\
\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\
\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\
\x9a\x9c\x18\x00\x00\x13\xb0\x49\x44\x41\x54\x68\xde\xbd\x59\x77\
\x7c\x5c\xd5\x95\xfe\xee\xbd\xaf\x4c\x1f\x8d\x66\xd4\xad\x62\x4b\
\xb2\x64\x6c\xe3\x86\x3b\x98\x62\xc0\x80\x29\xa6\xc5\x31\xb0\x60\
\xb0\x09\x09\xcb\xee\x12\x02\xa1\x24\x10\x08\x4b\xe0\xb7\x21\xd9\
\x04\x42\x0b\x84\x04\x02\xa1\x85\x40\x08\x60\x03\xae\x18\xdb\x92\
\x2b\x56\xb5\xac\xde\x65\x49\xa3\xd1\x8c\xa6\xbc\x7e\xf3\x87\x46\
\x8e\xc6\x16\x98\xb0\x64\x8f\x7e\xe7\x77\x35\xf3\xde\xcc\x3b\xdf\
\x3d\xdf\x69\x77\x08\xe7\x1c\x63\x32\x6b\xd1\x22\x9c\x4c\x28\xe7\
\x20\x16\x00\x70\x98\x9c\x33\x26\x89\x66\x53\x5d\x1d\x0a\xf3\xf3\
\x21\x88\x22\xc0\x11\x50\x41\x6e\x33\x74\xb3\xae\xb3\xb5\x69\x5f\
\x71\x69\x59\x97\x4c\xed\x1a\xc7\xe8\x73\x38\xe1\x30\xa9\x05\x42\
\xf9\x49\x9f\xf5\x55\x44\xf8\x5a\x9f\x22\x00\x2c\x4e\xb9\xc5\xa7\
\x81\xf3\x2e\x00\xc3\x9c\x30\x80\x30\x70\xc2\xf3\x1c\xb2\x6d\x85\
\x21\x9a\xd3\x9c\x1e\x4f\x31\xa5\xf4\x23\x0e\x1c\x02\x60\x26\x3f\
\xf8\x8d\x18\x7e\x6c\x43\xbf\x8e\xf1\xba\xa6\x2d\x80\x20\x7c\xe0\
\x4d\xf3\xee\x35\x2d\xeb\xbf\x38\x38\x38\xa5\xb0\xb8\x08\x42\xc5\
\x6b\x6f\xff\x8f\xdb\x16\xbd\xf9\xea\xcb\xdf\xfa\xfd\x0b\x2f\xfc\
\xcc\xe1\x72\xfe\xde\x20\x6a\x9e\x49\x75\x98\x54\x05\xa7\x1c\xe4\
\x5f\x06\x80\x7f\xb1\xf2\xa4\x6a\xba\xb9\x3a\xbf\xb0\x70\xd3\x2f\
\xfe\xe7\xb1\x0b\xde\x7b\xfb\x6d\xdb\x9c\x53\x67\xdd\xc1\x2d\xb2\
\x5c\xd7\x75\x71\x24\x1a\x2d\x27\x94\x5e\xba\x74\xf1\x22\xe4\xe6\
\xe4\x60\xe9\xe2\x45\x10\x05\xc1\x72\xbb\x1d\x7d\x76\x9b\x88\xf6\
\x96\x26\x50\xf6\xcf\xef\xd9\x97\x49\x0a\x85\xb8\x39\x31\x2f\x09\
\x01\x18\x2c\x80\x03\xd1\x91\x91\xdc\x8b\x57\x5e\xe4\x3e\x7b\xd9\
\x99\x18\x18\x38\x8a\xeb\xaf\x59\xe3\xa9\xae\xad\xfb\x5d\xba\xcf\
\xdb\x5c\x52\x5c\x32\xb3\x7c\x6a\x49\xc6\xe0\xe0\x51\x74\xf7\x74\
\xc2\xb2\x08\x74\x5d\xef\xca\x09\xa4\x6b\xb1\x78\x02\x96\xf9\xcd\
\xd2\xe7\x04\x00\xe0\x13\x3b\x97\x10\x0e\xd3\x10\x21\x12\x13\x4e\
\x87\xed\x8d\x0f\x36\x7e\xb4\x76\xc1\xdc\xb9\xa7\x1a\x5c\x43\x5e\
\x5e\x0e\x9e\x7f\xea\xc9\x42\x5f\x7a\x5a\xa1\x2f\xcd\x07\x49\x12\
\x11\x89\x44\x10\x8f\xc7\xd0\xd6\xde\x09\x55\xd7\xca\x07\x43\xc3\
\x33\x14\x45\xad\x31\x4c\xd3\xa3\xaa\xea\xed\x84\xb2\x66\x46\xc9\
\xab\xdf\x04\x00\x32\x3e\x0b\x9d\x3a\x6f\xe2\x2c\x44\x18\x87\xa9\
\x89\x20\x12\x01\x11\x4d\x04\xfb\x83\x3f\x7f\xe8\xde\xbb\xef\xbc\
\xec\x92\x8b\x21\x08\x02\x06\x83\x83\xe8\xed\xeb\x41\x24\x12\x06\
\xc0\xe1\x72\xbb\xe1\x4f\x0f\x20\x23\x90\x89\x23\x8d\x8d\x78\xe4\
\xb1\x9f\x77\x35\xb7\xb6\xfd\xb9\xac\xb4\xf4\xe2\x39\xb3\x66\x95\
\xec\x3d\x78\x20\xd2\xdd\xdd\xbd\x82\x09\xac\xe2\xff\x03\x00\x03\
\x35\x0b\x0d\x55\xec\x96\x65\xa6\xc6\x94\xe1\x69\x25\xc5\xc5\x1b\
\x9f\xf8\xc5\xe3\x05\x36\x9b\x84\xfd\x07\xf6\x22\x1a\x8b\x41\x14\
\x45\x30\xc6\x40\x19\x05\x25\x00\x07\x81\xcb\xe9\x44\x59\x69\x39\
\xba\xbb\xfb\xb0\x69\xcb\x56\x9c\xb7\xfc\x1c\xcc\x9c\x31\x03\x1f\
\x6f\xde\x82\x9f\x3c\xfc\xc8\x16\x0b\xd6\x72\x42\xfe\x6f\x21\x9d\
\x0a\xe0\xb4\x85\x27\xdc\xc0\x39\x4a\x65\x59\xfc\x50\x96\xe4\x58\
\x42\x51\x5a\xed\x76\xf9\x94\x5f\x3f\xfe\xf3\xa9\x93\xf2\x72\xb0\
\x7d\xc7\x56\x70\x00\xa2\x28\x80\x52\x0a\x51\x14\x41\x29\x05\xe7\
\x00\xa1\x00\x01\x01\x63\x0c\xa5\x25\x53\xc1\x2d\x82\x9a\x9a\x6a\
\x94\x14\x97\xc0\xe3\xf5\x61\xcd\x0d\x37\x0e\x84\x42\xa1\x95\x94\
\xd2\xbd\xa9\x16\x91\xd1\x6c\x71\x82\xa5\x63\x06\xa5\xbe\x7d\x5c\
\x0c\x1c\x97\x94\x38\x07\x21\xd6\xea\xef\x7e\x67\x5d\xc9\xa5\x2b\
\x2f\x42\x55\x75\xcd\x2c\x5d\x37\x50\x56\x56\x8a\xed\x3b\xb6\x00\
\x04\x90\x44\x11\xa2\x20\x21\x10\x48\x07\xe7\x04\xa2\x28\xc2\xe1\
\xb0\x63\x20\x38\x08\x5d\xd3\x40\x28\x45\x77\x4f\x17\x72\x73\xf2\
\x90\x96\xe6\x43\x43\x63\x33\x5e\x79\xfd\x8d\xa1\xa3\x3d\xbd\xbf\
\x71\xd9\xbc\xd5\x38\x2e\x71\x70\x58\xa3\xe8\x53\x6c\x27\xe0\xcc\
\x84\xc1\x4d\xf0\xe3\xc0\xa5\x00\x30\x15\x7b\xca\x45\xcb\xe2\x70\
\xfb\x70\xce\x05\xe7\x2e\x87\x65\x9a\x70\x38\x24\x14\x15\x94\xa1\
\xbb\xbb\x0b\xb1\x58\x0c\x92\x24\x41\x92\x24\xf8\x7c\x7e\xec\xaa\
\xd8\x83\x86\x86\xc6\x18\x21\x94\xce\x9e\x7d\xaa\x7d\xe5\x05\xe7\
\xa3\xbb\xb7\x1b\xaa\xa6\xc0\xb4\x4c\x98\xa6\x01\xcd\x30\xf0\xd3\
\x9f\x3d\xda\x19\x8d\x8c\xac\xef\xef\xe9\xf9\xd8\x3b\xd5\x07\xd3\
\x34\x47\x0d\x27\x00\x81\x05\xcb\x52\x40\x04\x79\x14\x48\x52\x28\
\x61\x27\xec\xfc\x84\x00\x44\x9b\x76\x3c\x7d\xec\x9a\x61\xa6\x7f\
\xb4\x69\x33\xe6\xcf\x9b\x07\x51\x90\xc1\x04\x01\x83\x43\x83\x60\
\x4c\x80\x20\x08\xf0\xfb\x03\xd8\xb9\xb3\x12\xbf\x7f\xe9\x95\xbf\
\x18\xba\x71\x7f\x47\x5b\x3b\xdb\xb0\x31\xeb\x01\x59\x92\xaf\xba\
\x70\xc5\x72\x34\xb7\xb5\x80\x50\x02\x45\x51\xe0\x72\x39\x90\x96\
\xe6\xad\x57\x55\x6d\x77\x5e\xfe\x24\x18\xd0\xc1\x49\xb2\xc5\xa0\
\x1c\x04\x04\xb0\x08\xbe\xd0\xda\x09\x24\xc5\x57\xa5\x53\xb2\x53\
\xb4\xbc\x34\x37\x21\x49\xe2\x0d\xbf\x7c\xe2\xa9\xc7\xd6\xdd\x72\
\x6b\xc3\x1b\x6f\xbe\xc3\x19\x13\xa0\xa9\x2a\x44\x51\x80\x2c\xc9\
\x70\x3a\x9c\xf8\x6c\x77\xa5\xa6\x26\xcc\x7b\x1c\x36\x47\x9d\xd3\
\x69\xaf\xa6\x84\x3c\x74\xa8\xaa\x3a\x2a\x8a\x32\x64\x59\x06\x63\
\x0c\xa6\x65\xc2\x9f\xee\x47\xfe\xa4\x49\x53\x15\x45\x29\x74\x39\
\x1c\xe0\xe3\xfe\xbe\xae\xa4\x00\xa0\x94\xa6\x28\x1b\x5d\x0f\x71\
\xc3\xb8\x37\x1c\x0c\x5e\xb1\xb3\xb2\xa2\x5b\x51\x12\x20\x8c\x80\
\x09\x02\x98\x20\x40\x96\x65\x80\x13\xcb\xe2\xb2\x6e\x1a\x16\x26\
\xe5\x17\x42\xa0\x84\x31\x41\x10\x65\x9b\x1d\x82\xc0\x20\x30\x06\
\xca\x08\x04\x81\x61\xfa\xb4\x69\x45\x19\x81\xc0\x8b\x23\x09\xe5\
\x21\x70\x2c\xc1\xd7\x69\x67\xbe\x08\x80\x65\xf1\x14\x35\x2d\x0e\
\x70\x13\xa6\xa1\x41\x74\x39\xea\x54\x4d\xeb\x56\x14\x05\x76\x9b\
\x1d\x8c\x51\x58\x96\x09\x4a\x19\x2e\xbf\x6c\xa5\xcd\xe1\xe6\xcf\
\xc6\x95\x91\xe2\x91\x68\xac\x44\xb0\xc9\xf7\xcf\x9f\x37\x57\x16\
\x45\x06\xce\x39\x18\x63\x90\x44\x19\x4a\x22\x81\xe9\xe5\x53\xf1\
\xd0\xfd\xf7\xcd\xbf\xe5\x96\x9b\x1f\x60\xa2\xf0\xa8\x05\x78\x09\
\xfd\xa7\x58\x93\x22\x29\x31\xd0\xd8\xd9\x79\xc2\x0d\x96\x65\x82\
\x70\x0b\xba\x61\xce\xf7\x7a\xe4\x6c\x45\x51\x90\xee\x4b\x47\x24\
\x32\x0c\x4a\x29\x86\x42\x41\x5c\xb2\xf2\x22\x08\x82\xb8\xe2\x9d\
\xf7\xde\xaf\x13\x04\x01\xab\x2e\xbe\x48\x5a\x75\xe9\x25\x68\x6d\
\x6f\x01\x21\xa3\xa9\x54\x92\x6d\x88\x84\xc2\x18\x1a\x1a\xc2\xec\
\xd9\xb3\x21\xcb\x36\xbc\xa8\xe9\xbd\x04\x64\xd8\x12\x08\xb8\x6a\
\xf8\x08\x48\x84\x72\x9e\xec\x5a\xbf\x1a\xa2\xd4\x5e\x68\xa2\xfc\
\x0b\xe2\x22\x82\x74\xd7\x8c\xf2\xf2\x5b\xaf\xbe\x62\x55\x20\x1a\
\x8d\x22\xdf\x37\x09\x2e\x97\x07\xba\xae\x41\xd1\x12\xe8\xe8\x6a\
\xc3\x85\x2b\xce\xc3\x95\x97\x5f\x26\x51\xca\xa0\xe9\x1a\x5a\x3b\
\x5b\xa0\xa8\x09\x08\x82\x00\x41\x60\x00\xe7\xd8\xbc\xed\x53\x38\
\x6c\x4e\x3e\x1c\x89\x90\xca\xfd\xfb\xa0\x6b\xc6\x21\xd9\x66\xe7\
\x9a\xa5\xcf\x72\xd9\xed\x2f\x6a\x9a\xd6\x1c\x57\xf4\x83\x36\x51\
\xfc\x98\x10\x1c\xc4\x57\xe8\xbd\x53\x00\x58\x13\x00\xd0\x75\x3d\
\x23\xbf\xa0\xe0\xee\x7b\xee\xba\x43\x96\x25\x11\x6d\xed\x6d\x08\
\x04\xfc\x28\x2a\x2c\x42\x5b\x47\x2b\x00\x20\x9e\x88\xa3\xa9\xe5\
\x08\x1c\x0e\x07\x08\x21\xd0\x0c\xed\x58\x11\x23\x84\xc0\xe3\xf6\
\x62\xff\xfe\xcf\xf9\xcb\xaf\xbc\x46\xd2\xbc\x69\x44\x7e\x4f\xd6\
\xc2\xe1\x70\x9b\x2c\x09\x1b\x0d\xa2\x01\x86\x79\xe1\x8d\x37\xad\
\x9f\xbb\xf2\xa2\x0b\xe6\xee\xaa\xac\xbc\xfa\xe9\x67\x9e\xbb\x36\
\x34\x1c\xba\x88\x31\xd6\x71\x32\x4f\xa4\xc4\x00\x99\x40\x05\xc6\
\xda\x07\x07\x07\x3f\x6c\x38\xd2\x88\xac\xac\x2c\x80\x53\xbc\xf7\
\xfe\x87\x38\x7a\x74\x00\x25\x53\x4a\x61\xb3\xd9\x20\x08\xa3\x95\
\x58\xd5\x54\xe8\x86\x06\x4a\x46\x93\x80\x20\x88\xf0\x7a\xd3\xd0\
\xd8\xd8\x82\x3f\xbc\xfc\x1a\xb9\xe7\x87\x77\xf2\xf7\xfe\xf2\x67\
\x04\x02\x7e\x33\x38\x38\x70\xa7\xa2\xc6\x1b\xc2\xc1\x70\x71\x76\
\x20\xf3\xba\xf3\xce\x3d\x1b\x99\x19\x7e\x2c\x3c\x6d\x1e\xdc\x2e\
\xe7\x88\x61\x58\xf1\xd1\xfd\xfc\xf2\x56\x23\xb5\x0e\xd0\x09\x12\
\x02\xa5\x96\xaa\xe9\x8f\xbd\xfe\xd6\x3b\x67\x1c\x6e\x38\x12\xd8\
\xbb\x6f\xff\xa1\xc6\xe6\x96\xcd\x9f\xee\xf8\x6c\xf5\xba\x9b\xd6\
\xe6\x2d\x5c\x38\x0f\xd1\x58\x0c\x9a\xa6\x26\x2b\x37\x01\xa3\x14\
\x82\x28\xc2\x34\x2d\x1c\xed\x3d\x8a\xbf\xbc\xf3\x37\xd8\xed\x0e\
\x7e\xd9\xca\x8b\x49\x34\x3a\x82\xf5\x6b\x6f\xb0\xd7\xd7\x1f\x7e\
\x2e\x33\x2b\xab\x76\xca\xe4\xc2\x29\xa5\xc5\xc5\x93\x63\xf1\x28\
\xaa\xaa\xab\x31\x30\x10\x44\x73\x7b\xfb\x41\x59\x90\x07\x81\xd1\
\xce\xe2\xcb\x24\xa5\x17\x72\xb9\xdd\x13\xde\x64\x59\x16\xd2\x7c\
\xfe\x6f\xb9\xbc\xe9\x79\x92\x4d\x78\x9d\x89\xac\x37\xa6\xe9\x73\
\x5d\xa2\xf4\xd6\xcd\x6b\xaf\x9f\x72\xc6\x19\x4b\x60\xb3\xc9\x10\
\x45\x09\xba\xa1\x43\x51\x14\x84\xc3\x11\x0c\x05\x87\xd0\xdd\xdd\
\x05\xb7\x3b\x0d\x6f\xbf\xfb\x37\xdc\x7b\xd7\x1d\x30\x2d\x1d\x3e\
\x6f\x3a\x46\x62\x09\x64\x66\x04\xe0\x4f\xf7\x01\x20\x88\xc6\xa2\
\x30\x0c\x03\x9f\x7f\x5e\x85\xbb\xef\xbf\xff\x20\xb3\x84\xeb\x89\
\x44\x6a\x0c\x5d\x4f\x23\x84\x7c\x97\x10\x32\x48\x18\x79\xe1\xf8\
\x38\x4d\x01\xf0\x65\x9d\x61\x76\x6e\x2e\x72\xf2\xa7\xc0\x82\x01\
\x8b\x5b\xd0\x09\x85\x4c\xe8\x1f\xd6\xae\x59\x7d\x83\xdf\xef\x81\
\xdd\xee\xc4\x27\x5b\x3e\xc5\xde\x7d\x07\x3a\x25\x51\xe8\x32\x0c\
\x23\xe7\x9a\xd5\x57\x15\x4d\x2f\x2f\x43\x76\x4e\x36\x1e\xff\xf5\
\x53\xb0\x49\x12\x7f\xf4\x91\x87\x09\xa3\x14\xc1\x50\x10\x3d\x3d\
\x5d\x88\x44\xc2\x20\x84\xc0\xe5\xf2\xc0\xef\xf7\xc3\x9f\xee\x47\
\x43\x43\x23\x9e\x78\xfa\x99\xde\xea\xda\xfa\x57\xa6\x96\x96\x5c\
\x31\x6f\xce\x9c\xe2\x3d\x7b\xf7\x0d\x75\x74\x77\x5e\x42\x29\xdd\
\xf5\x85\x14\xfa\x2a\x42\x4c\x03\x94\x73\x30\x5d\x95\x32\xf3\xf3\
\x17\x4f\x9f\x3e\x0d\xb2\x28\xa0\xbb\xa7\x0f\x55\x55\xd5\x35\x8a\
\x1a\x5f\x13\xec\x0f\xd7\x78\xd3\x7c\x97\x6e\xde\xba\xe3\x95\xc9\
\x45\x45\xee\x4d\x5b\xb7\xa3\xa7\xb7\x97\xaf\x5b\xbb\x96\xa8\x6a\
\x02\x7b\xf7\xef\x41\x2c\x16\x85\x24\x49\xc9\x40\xa7\x18\x1e\x1e\
\x42\x28\x3c\x84\xde\xbe\x1e\x94\x14\x97\xe2\x81\xfb\xee\xc9\xd9\
\xf8\xc9\xe6\xbb\xce\x39\x73\x19\x66\x4e\x9f\x81\x6d\x3b\x3e\x4d\
\xbf\xeb\xbe\x1f\xdf\x09\x60\x35\x21\x44\x9f\x30\x88\x4f\x2a\x9c\
\x03\xba\x09\x62\x18\x60\xdc\x72\x85\x42\xa1\x9c\x6d\x3b\x3e\xc3\
\x48\x4c\x41\x53\x4b\x2b\xa2\x91\xf0\xdb\xfd\x5d\x9d\x35\x47\xfb\
\xfa\xe6\x38\x3d\xde\x5f\x0d\x0e\x05\xd9\x93\xcf\xfe\x16\x2f\xbf\
\xfa\x1a\xbe\x7b\xf3\x7a\xb2\xfc\x9c\x65\xd8\xf1\xd9\x76\x28\x89\
\x04\x64\x9b\x0d\x82\x20\x42\x92\x46\xdb\x0d\x41\x14\x21\x89\x22\
\x0c\x43\x47\x53\xcb\x11\x78\xd3\x3c\xb8\xfa\x8a\xcb\xd0\xdb\xdb\
\x8d\xbe\xfe\x3e\xe4\xe5\xe6\x22\xc3\x9f\xb1\x8a\x9b\x38\x95\x82\
\x61\x4c\x4f\x1e\x03\x84\xc0\x34\x4d\x04\xb2\xb3\x10\xc8\xca\x81\
\x19\x27\xa0\x82\x0a\x58\x16\x74\x4a\x0b\x89\x65\xad\xf1\xb8\xdc\
\x17\x26\x14\xcd\x46\x4c\xf3\x0e\xdd\xd4\x76\x26\x54\xed\xb6\xbb\
\xef\xba\xf3\xc9\xab\xae\x58\x85\x0d\x1b\x3f\x42\x76\x76\x36\x5f\
\xb4\x60\x3e\xd9\xb2\x7d\x13\xe2\xf1\x38\x98\x20\x40\x12\x45\xf8\
\xd3\xd3\x01\x42\x21\x89\x22\x6c\x36\x19\xc1\xd0\x10\x54\x55\x05\
\x63\x14\x36\xd9\x86\xcc\x40\x36\xfa\xfb\xfb\xd1\xdb\x7b\x14\xaf\
\xbf\xfd\xce\x48\x63\x63\xd3\x2f\xa9\x85\x47\x41\xa0\x4e\x48\xa1\
\xf2\x19\x33\x4e\x0c\x60\x42\x46\x0f\x26\xb8\x05\xd3\x30\x00\x08\
\x20\x82\x08\x95\x5b\x60\x26\x0f\xa9\x89\xe8\x6f\xd2\xb2\x32\x1f\
\x53\x14\x53\x82\xc5\x74\x8b\x58\xa4\xa0\x30\x7b\xf9\xc2\xf9\xf3\
\xc0\xb9\x89\xa2\xc9\x93\x30\xb9\x60\x0a\x69\xeb\x68\x45\x42\x49\
\x40\x14\x45\xc8\xb2\x04\x8f\xc7\x8b\x7d\x07\x0e\xe1\x50\x55\x4d\
\x1c\x84\xf0\xf9\x73\xe7\x3a\xcf\x5d\x7e\x16\x82\xc1\x41\x24\xd4\
\xf8\x68\x0b\xce\x0d\x50\xc6\xf0\xc4\x33\xcf\xc5\xfa\x06\x06\x56\
\xd9\xec\xf6\x2d\xe0\xa9\xb5\x2d\x75\x1e\x48\xf6\xe6\x13\x01\x38\
\xe6\x0d\xcb\x9a\x4c\x4c\x73\x9d\xdb\xee\x38\x47\x14\x58\x41\xc4\
\xb2\xfe\x3c\x10\x8a\x7c\x9f\x5b\x44\xa3\xd4\x84\x8d\x70\x39\x1e\
\x8f\xcf\xdc\x55\x51\xc1\x97\x2e\x59\x44\x04\x26\x82\x32\x82\x60\
\x70\x30\x59\x1b\x18\xd2\xd3\x03\xa8\xae\xa9\xc1\xb3\xcf\xbf\xf4\
\x96\x69\x18\x0f\x80\x70\xb2\x6b\x67\xc5\x23\x7e\xbf\xff\xf2\xc5\
\x8b\xe6\xa3\xb5\xad\x09\x94\x31\x28\x6a\x02\x6e\x8f\x1b\x69\x5e\
\x4f\x6f\x30\x18\xda\x49\x2d\x02\x4e\x48\x4a\xc7\xf0\x95\x62\x80\
\x10\x02\x41\x10\x00\xf0\xd9\x3e\xbf\xb3\xe6\x07\xb7\xdf\xfe\xa3\
\x77\xdf\xf8\xd3\xe2\xb7\x5e\x7d\x25\x6f\xfa\x8c\x69\x57\x8f\x84\
\x43\xc5\xd9\x59\x5e\x98\xa3\x9e\x3d\x65\x70\x30\xb8\xf3\xe9\xdf\
\xbe\xf0\xcc\x8d\x37\xdf\xba\xfb\x9d\xbf\x7e\x70\x2c\xbd\x0a\x8c\
\x41\x92\x24\x88\x82\x88\x77\xff\xfa\x61\xaf\x65\xe1\x71\xa7\xc3\
\x79\xd8\xe9\x70\xd4\x83\x92\x87\x5f\x7b\xe3\xad\x4e\x49\x94\x60\
\xb3\xdb\x41\xe9\xe8\x10\xe3\x76\x3b\x41\x08\x09\x28\x8a\x9a\x0e\
\x90\x13\x8e\xac\x4e\x0a\x80\x11\x0a\x43\xd5\xd0\xd1\xd4\x84\xf6\
\xa6\xa6\xde\xa2\xa2\x02\xb2\x66\xf5\xd5\x08\x85\x87\xa0\x6a\x2a\
\x4a\x4a\x4a\x72\x75\xcb\x3a\x8b\x51\x0a\xdd\xb0\x96\x7a\xfd\x19\
\x7f\xbb\xf2\xf2\x55\xd7\xdb\x6c\xb6\xbc\xa1\x50\x70\x7d\x73\x73\
\x73\xd0\xb2\x2c\x50\xca\xc0\x04\x06\xca\x18\x64\x59\x86\x24\xcb\
\x84\x31\x80\x1b\x1c\xba\x61\x80\x50\x6a\xa3\x94\xca\x92\x24\x43\
\x14\x45\x08\x02\x03\x63\x0c\x96\xc5\x71\xea\xcc\x99\x69\x99\x19\
\xfe\x37\x15\x35\xf1\x53\x80\x9f\x9e\x62\xdf\x83\x0f\x3e\x78\xec\
\xc5\x73\xcf\x3f\x3f\x01\x04\x8a\x51\xe4\x14\x43\xfd\xfd\x5a\x66\
\x56\xe6\xaa\xa2\x82\xc2\x9c\xae\xee\x4e\x0c\x0d\x05\x41\x29\x43\
\x53\x6b\xfb\x92\xe1\x70\xf8\x42\x7f\x46\xc6\x6d\xb7\xac\xbf\x31\
\xe7\xea\x2b\x57\xc1\x26\x4a\xe5\xd5\xb5\x75\x73\x1c\x2e\x27\xce\
\x3d\xfb\xec\x80\xa2\xc6\xa1\x6a\x1a\x08\x21\xc8\xca\xcc\x42\xba\
\xcf\xe7\xfa\x64\xd3\x96\x6c\x43\x33\x2b\x54\x43\x4d\xe3\xa6\xf5\
\xf0\xbd\x77\xdd\x39\xaf\x78\xca\x64\xf4\x0f\xf4\x41\x10\x04\xc8\
\x36\x19\x4a\x42\x45\x9a\xc7\x83\x33\x97\x9d\x5e\x30\x29\x37\xef\
\xcc\x86\xc6\xc6\xa5\x9a\x6e\xfc\x96\x32\x6a\x12\x42\x52\x63\x60\
\xa2\x32\xa6\xe9\xda\x4c\xd3\xe2\x9a\xc7\xe7\xeb\x70\x3a\x1d\xce\
\xbe\xfe\x01\x77\x7b\x47\x07\x4e\x9b\x37\x1b\x94\x32\x94\x4d\x2d\
\xc7\x82\xd3\xe6\x05\x3a\x3a\xbb\xce\x1e\x89\x44\x60\x99\x3a\xb6\
\x7f\xba\x0d\x17\x5d\xb0\x02\x7b\x0e\x1c\x5c\x52\x55\x53\x6b\x1e\
\x3d\x7a\x14\x7e\xbf\x1f\x23\x23\x91\xd1\x91\x34\x38\x88\xf3\xcf\
\x3d\x17\xaa\x6a\x5c\xf2\xfe\x87\x1b\x56\x80\x00\x57\x5e\xbe\x4a\
\xba\xe0\xfc\x73\xd1\xd8\x72\x04\x8c\x09\xa3\x99\xc8\x66\xc7\xc8\
\xf0\x20\x06\x07\xfb\x51\x56\x56\x86\xd9\xb3\x66\xe0\xdd\xf7\xde\
\x6f\xb2\xcc\x98\x41\x93\x45\x37\x05\x80\x7e\x3c\x02\x8b\x9f\x92\
\x93\x9d\xb3\xa1\x20\x7f\x92\x58\x55\x5b\xbb\x37\x38\x12\x71\xad\
\xbd\xfa\x9a\x92\xb3\x96\x9d\x8e\xb6\xf6\x56\xb4\x77\x74\xc0\x34\
\x0d\x38\x9d\x4e\xe4\xe4\xe4\xa2\xb0\x20\x0f\xfb\xf6\xef\x03\xa3\
\x02\xb6\xed\xf8\x14\x5d\xdd\xdd\xb8\xe1\x9a\x6f\x53\x42\x38\x6c\
\x36\x3b\x9c\x2e\x17\x74\x5d\x87\xaa\x2a\x68\xef\x6c\xc5\xa5\x97\
\x5c\x80\xd5\xdf\xba\x42\xa2\x84\x40\xd7\x75\x34\xb6\x1c\x49\xb6\
\xe0\x0c\xa2\x20\x82\x5b\x1c\x87\xaa\x6b\xa1\x26\x14\x18\x26\x47\
\xc5\x9e\x7d\x08\x0e\x87\x36\x4a\x92\x68\x8d\x75\x0d\x29\x75\x60\
\xda\xc2\xd4\x73\x21\x25\x16\x7f\xf0\xf1\x87\x1f\xfe\xc9\xb2\x33\
\x96\x62\xdf\xfe\x03\xe8\xed\xed\xc5\x39\x67\x2f\x43\x65\x65\x05\
\x5a\xdb\x5a\xe1\xf1\xb8\x21\xcb\x76\x78\x3c\x1e\x70\x58\xf0\x7a\
\xbc\xc8\xca\xca\xc6\xee\xdd\x7b\xf0\xd2\x9f\xde\xc0\xf7\xbe\xb3\
\x0e\x8b\xe6\xcf\x43\xfd\xe1\x7a\x4c\x9f\x3e\x03\x4e\x87\x1d\xad\
\x9d\x2d\x20\x20\xc7\x12\x83\xdd\x3e\xda\x82\x1b\xc6\x68\x71\x65\
\x8c\x01\x00\xfc\xe9\x7e\xec\xdd\x77\x10\x0f\x3c\xf8\xb3\x06\xca\
\x68\xa5\xdf\x97\xbe\x28\x16\x8b\xe5\x25\x74\x75\x29\x65\xec\xd0\
\x84\x69\x54\xb2\xfe\xe1\x02\xd3\xb2\x6c\x7e\x8f\x77\xde\xf4\x53\
\xa6\x21\x38\x34\x00\x45\x8d\x62\xc1\x69\x73\xd0\xd2\xd2\x84\x8e\
\xce\x0e\xf8\x7c\x3e\x7c\x5e\x55\xab\x56\x55\x1f\xee\x5d\xb0\x60\
\x6e\xce\xc5\x2b\x57\xc8\x86\xa6\xc1\x30\x74\xb8\x3d\x6e\x74\x76\
\x76\x19\xb2\x24\x11\xaf\xc7\xcb\x08\x11\xf0\xee\x5f\xdf\xc7\x39\
\x67\x9f\x85\x92\xc9\xa5\xe8\xee\xe9\x82\x69\x9a\x20\x94\x40\xd3\
\xd4\xd1\xf9\x9b\x8d\xc6\x1a\x63\x0c\x4e\x87\x13\xb5\x75\x87\xf1\
\xe4\x6f\x9e\xeb\x6f\x6b\x6e\xba\x21\x3c\x14\xac\x04\x20\x65\x64\
\x64\xd8\xb2\xb3\xb3\xe3\xda\xb8\x74\x9f\x1a\x03\xe3\x1a\x3d\x4b\
\x37\x02\xb9\x85\x05\xc5\x4e\xa7\x13\x96\x65\xc0\x32\x2d\x48\xb2\
\x8c\xae\xee\x6e\xe4\xe6\xe6\x62\xf3\xb6\xcf\xf8\xee\x8a\xbd\x8f\
\x30\x51\xfc\xe3\x87\x1f\x7e\x72\x1d\x40\x7e\xba\xfe\xa6\xeb\x48\
\x34\x3a\x82\xbc\xbc\x6c\x3e\x65\x72\x81\xf9\xd4\xb3\xcf\x0b\x35\
\xb5\x75\xa8\xa8\xdc\xb3\xff\x50\x6d\xdd\x8e\x8d\x9f\x6c\xb9\xea\
\x87\x3f\xf8\xcf\x49\x73\x66\xcf\x44\x2c\x1e\x83\xaa\xa9\xe0\xdc\
\x02\x25\x04\x84\x32\x48\xa2\x00\xdd\x30\xd1\xdb\xd3\x87\x4d\x9b\
\xb6\xa1\xb9\xb1\x69\x6f\xd2\x78\x19\x80\x3e\x30\x30\x10\x19\x18\
\x18\x48\xcd\x92\xe3\xb3\xd0\xb3\x2f\x3c\x7f\x6c\x92\xa1\x8c\x92\
\x78\x22\x5e\xb6\x73\x77\x45\x79\x4b\x4b\x9b\x54\x54\x58\x80\xdc\
\xec\x1c\xb4\x75\xb4\x21\xe0\xf7\xe3\xe3\x4d\xdb\x95\xd0\x51\xe5\
\x4a\x66\x93\xfb\x15\x62\x1c\x20\xa6\xf9\xfd\x2b\x2e\xbd\x58\xd4\
\x74\x0d\x92\x24\x11\x91\x09\x4c\xd3\x34\xba\x7d\xc7\x2e\x44\x63\
\xb1\xc3\x03\x3d\x5d\xff\xde\xd6\xd6\xb2\xe5\x60\x75\xcd\x9c\x0c\
\x7f\x20\xd7\xeb\xf5\x42\x12\x24\xc8\x92\x0c\x0e\x02\x4d\xd5\x10\
\x1c\x1c\x46\x5b\x6b\x1b\x1a\x8f\x34\xa2\xb0\xb0\x10\x0d\x8d\xcd\
\xfa\xd0\x50\x70\xb3\xa6\xaa\x11\x00\xd2\x44\x79\x26\xc5\x03\x82\
\x94\xf2\x32\x4c\x09\xbd\xa5\xfe\xc8\x91\x7b\xab\x6b\xea\xd7\x1e\
\xaa\xaa\x7a\xe8\xbf\x1f\x7a\xc0\x95\xe6\xf5\xc0\xe9\x74\x61\x6a\
\x69\xb1\xdc\x70\xa4\xe9\x7b\x54\x93\xdf\x30\xe2\x89\x6b\xa7\x4f\
\x2b\xb7\x4b\xd2\x68\x53\x16\x1d\x89\x21\x91\x88\x91\xd3\x97\x2e\
\xc4\xe2\x05\x0b\xf0\xc7\xd7\xdf\xcc\x33\x34\x3d\xdf\xb2\xcc\xea\
\xf0\x70\xa4\x71\x78\x38\x3c\xaf\xae\xa6\x06\x76\x9b\x03\xef\x7e\
\xb0\x91\xd7\xd5\x1f\xee\xd2\x54\x35\x2c\x49\x62\xe6\xf5\xd7\xae\
\xc9\x2c\x29\x9e\x0c\x9f\xcf\x87\x45\x0b\xe6\x4f\x6f\x69\x6a\x5a\
\x17\x8d\x44\x7e\x9c\xf4\x02\x05\xa0\x27\xd5\x3c\x01\x40\x77\x5b\
\x7b\x0a\x3a\xce\x39\x7c\x99\x99\x43\x2e\x97\xfd\x97\x47\x9a\x9a\
\x66\xb5\xb6\xb6\x5c\x9f\x9f\x9f\x87\xa1\xd0\x10\xd6\xdf\xf4\x6f\
\x34\xe0\xf7\x3f\xda\xd7\x7f\xf4\x47\x05\x93\x26\x39\xaf\xbb\xe6\
\xdb\x88\x44\x23\x90\x24\x09\xc3\xa1\x2e\x44\xc2\x61\xf8\xbc\x69\
\x20\x8c\x21\x1a\x8b\x76\x72\xf0\x08\xe7\x98\x52\x90\x5f\x50\x5a\
\x52\x3c\x05\x5e\xb7\x0b\x5d\xdd\xbd\x68\x6e\x6e\xe9\x38\x5c\x5d\
\x7d\x9b\xaa\x24\xda\x6d\x2e\xd7\xa9\x1f\x6f\xda\xfa\x7c\x61\x7e\
\xbe\x7d\xcf\xfe\x03\x38\x50\x55\xa5\x10\x46\xda\x29\xa5\x6e\xcb\
\x3a\xd6\x03\x59\x18\x57\x8c\x53\x00\xf4\x75\x77\x9f\x50\x07\x5c\
\x69\x69\x20\x20\xe7\x4d\x2b\x2b\x5f\x69\x59\x1c\x5e\x8f\x17\x09\
\x25\x81\x78\x3c\x86\xf5\xeb\xae\x67\x84\x13\xaf\xdd\x61\xc3\xd0\
\x70\x08\x20\x1c\xa6\x6e\x62\xcf\xbe\x03\x48\xf3\xf9\xe1\xf6\x78\
\x50\xd7\xd0\x84\xe0\xc0\x40\x95\xae\xaa\xfd\xb2\xcd\x56\xa4\xea\
\x6a\x76\xe5\xbe\xfd\x58\xb6\x74\x09\xea\x1a\x1a\xd1\xd1\xd1\xb1\
\x21\x16\x8d\xf4\x01\xb0\x47\x87\x43\x87\xea\x6b\x6b\x7f\xf7\xab\
\xa7\x9f\xbd\xad\xbf\xbf\x7f\x6b\x6f\x47\xc7\xaf\xc3\xc3\xa1\x4a\
\xcb\xb2\xa4\xe4\x8e\x5b\x48\x1d\xd9\xf9\x49\x27\xb2\xa2\xa9\x65\
\x67\x95\x97\x97\xbf\x75\xcb\x4d\x6b\x03\xaa\xa6\x60\x24\x1a\xc1\
\xd2\x25\x8b\x61\x5a\x16\x54\x55\x81\x28\x8a\x90\x24\x11\xb2\x6c\
\x43\x38\x12\xc1\xcb\x7f\x7c\x9d\x6f\xd8\xf0\x49\x2d\x63\x4c\x73\
\x7b\xbc\x93\x2c\x6e\xc6\x1a\xeb\xeb\xd6\x85\x06\x07\x6b\x00\xb8\
\xbc\x3e\xdf\x54\x5f\x20\xb0\xd2\x6e\x77\x2c\x14\x25\x39\xb7\xe5\
\x48\xfd\x7d\xd1\xc8\xc8\xa1\x24\x45\x34\x49\x96\x65\xd1\x66\x4b\
\x8f\x45\x22\x2d\xe0\x3c\x9a\xa4\x8d\x95\xa4\x8d\x31\x8e\x42\x3a\
\x00\xeb\x8b\x26\x32\x92\xa4\x97\x61\x68\xfa\xaa\x69\x65\x65\x81\
\x65\xcb\xce\xc0\x86\x8d\x1f\x40\x14\x24\x6c\xdd\xfa\x19\x64\xbb\
\x8c\x39\xb3\x66\x82\x51\x86\xa1\xa1\x61\x84\x86\x42\xd8\x55\xb1\
\x17\x1b\x3e\xda\xd4\xd6\xda\x74\xe4\x46\xd3\x30\xa3\x6e\xaf\xb7\
\x18\xe0\x8e\x70\x28\xd4\x01\xc0\x03\xc0\x0a\x87\x42\xf5\xe1\x50\
\xa8\x86\x50\x4a\xed\x0e\x67\xba\x9a\x48\xf0\xe4\xee\x26\x00\x58\
\x9a\xaa\x26\x34\x55\x1d\x48\x52\x84\x25\xaf\x8d\x71\x7e\xcc\x0b\
\x63\xeb\x84\x23\x25\x49\xee\x86\x1d\x80\xed\x68\x4f\xf7\x1f\xb6\
\x6c\xdb\x76\xc6\xf4\xe9\xd3\xe6\x9e\xb1\x64\x31\x2a\xf7\xec\xc3\
\x4b\xaf\xbc\xa8\x0c\x87\x86\x1b\x26\x4f\x29\x2a\x5f\xb2\x68\x81\
\x2c\x8b\x14\xc1\x60\x10\x36\xbb\x0b\x5e\xb7\x5b\x55\x13\x09\x37\
\x00\x29\x34\x38\xd0\x03\x40\x03\xe0\x4e\x1a\xa1\x8d\x71\x97\x5b\
\x16\x89\x47\x47\x42\x18\x6b\xb6\x92\x61\x97\x34\xcc\x3a\xce\x58\
\x63\x1c\x90\x63\x01\x3c\x11\x00\x96\x54\x5b\x52\x1d\xba\xa6\x8e\
\x1c\xae\xae\x7a\xe0\x17\xff\xfb\xc4\xc3\x35\x75\x87\xe7\x54\x54\
\x54\x0e\xd4\x55\x57\x3d\x17\x8d\x84\x6b\xda\x5a\x5b\xcf\xcf\xc9\
\xc9\xbe\x69\xe9\xc2\xd3\xe0\x76\xbb\x90\x96\x96\x8e\x5d\x95\x7b\
\x8b\x99\x28\x15\x9a\xba\xd6\x99\xfc\x7e\x0d\xf8\xc7\x04\x95\x34\
\x52\xc5\x3f\xba\xe1\xf1\xbc\x1d\x1f\xa0\xe3\x01\x8c\x81\x30\x70\
\xdc\x49\xd7\x97\x51\x68\x4c\xed\xaa\xa2\x74\xb5\x34\xd4\xff\xf8\
\xf5\x60\x70\x4d\x6c\x24\x52\x1d\x8d\x84\x5b\x00\x38\xb8\xa5\xb7\
\xb7\xb7\x77\x44\x16\x2d\x38\xcd\x63\x58\x04\xbb\xf7\xec\x47\x2c\
\x16\x8f\x3b\x9d\x4e\x4f\x64\x58\x1b\x3b\xe0\x34\xc6\x81\x18\xd3\
\x44\xf2\x1a\x19\xf7\xbc\x31\xc3\x38\x52\x3d\x30\xf6\xff\xc4\x86\
\x4e\x10\xc4\x74\x8c\x3e\x00\x9c\xc9\xd5\x0e\x40\x20\x84\x50\xce\
\xb9\x33\x49\x09\x81\x09\x4c\x28\x98\x5c\x7c\x6b\x7e\x61\xe1\xe9\
\xd1\x91\x91\x96\xce\xce\xae\x5d\xd1\x70\xa8\x52\x55\x94\x16\xcb\
\x34\xe3\x00\x94\x71\x1a\x4f\x1a\x1e\x4f\x02\x1a\x33\x7c\x6c\x1d\
\xcb\x30\xe3\x81\x9c\x54\xbe\x28\x0b\x8d\xc5\x81\x3c\x8e\x4e\x32\
\x46\xab\xa1\x38\x4e\x09\x21\x44\xe4\x9c\x0b\x49\x6e\xc6\xc7\xd1\
\xc5\x38\x6e\xc7\x95\xe4\x6a\xe2\x1b\x94\x93\xa5\xd1\xb1\x6c\x34\
\xde\xe8\x31\x10\x0c\x27\xfe\x76\x38\xe6\xee\xb1\x80\x1d\xbf\x7e\
\xf3\x3f\xd3\x7f\x05\x00\x13\x01\x1a\x1b\x43\xe9\xb8\xf7\x8e\x07\
\x31\xc6\xe3\x7f\xb9\xfc\x1d\x74\x4a\x51\x78\x72\xbc\x11\xfc\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x0a\x0e\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x06\
\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\
\x09\x97\x49\x44\x41\x54\x68\xde\xd5\x9a\x5b\x6c\x1c\xd5\x19\xc7\
\x7f\x73\xdd\x5d\xaf\xb3\xbb\xb1\xb1\x63\x3b\x0e\x4e\x22\x20\x45\
\x95\x02\x45\x2a\x2f\x11\xbc\x05\x04\xa8\x0f\x5c\x84\x04\x14\x52\
\xc4\xa5\x84\x70\xa9\x5a\x95\x42\x2b\x55\x2a\x94\xaa\x55\x2f\x38\
\x6d\x69\xd3\x56\x08\x42\x20\x6d\xa9\x8a\x92\x0a\x1a\xca\xed\x01\
\x4a\x5b\xab\xe2\x96\x12\xaa\xc4\x24\x36\xc4\x80\xbd\xbb\xb6\x77\
\xbd\xb3\x33\xb3\x3b\x97\xd3\x87\xd9\x71\x76\xed\xbd\x1a\xd3\xaa\
\x47\x5e\xcd\x65\xc7\xe7\xfc\xfe\xdf\xff\xfb\xce\x9c\x19\x5b\x12\
\x42\xf0\xbf\x6a\xd2\x0f\xa4\x08\x9b\xd9\x85\xc1\xc5\x58\xf4\x11\
\x41\x47\xa6\x0b\x19\x1d\x97\x37\x28\x72\x80\x7f\xb0\x4f\xec\x17\
\x4e\xc3\x3e\xea\x09\x90\x24\x49\x05\xa4\x4f\x8d\xfc\x06\x52\x6c\
\xe5\x3b\x68\x5c\x83\x4c\x4a\x8d\xa8\x8c\xac\x1d\xe1\x8c\xbe\x33\
\xa0\x04\xef\x15\xde\xe3\x7d\xeb\x7d\xdc\xbc\x0b\x82\x3c\x51\x9e\
\xc7\xe6\xab\xe2\x0e\x31\xd5\x52\x40\x05\xfe\x0b\xc0\xe9\x9f\x8a\
\x88\x1d\x7c\x9e\x73\xb8\x9c\x18\xb1\xde\xb5\xbd\xdc\xf4\x99\x9b\
\xb8\x70\xdd\x85\xa8\xb2\x7a\x6a\x34\x1f\x5c\x5c\x5e\x99\x7d\x85\
\x47\x5e\x7f\x84\x59\x63\x16\x6c\x0a\x14\xb8\x5e\x7c\x5b\x1c\x6c\
\x25\x40\x03\x6e\x17\x42\x8c\x96\x4a\xa5\xf0\x6c\xf8\xb3\x54\x7f\
\x83\xf3\x4b\x2f\x93\x10\x42\xf0\x87\x89\xa7\xd8\xf1\xd2\x0e\xbc\
\xa8\xc7\x25\x23\x97\x70\xc5\x99\x57\x10\x51\x22\x08\x04\x8e\xeb\
\x80\x38\x15\x32\x4d\xd5\x90\x24\x89\x92\x57\xe2\xe9\x13\x4f\x73\
\xe8\xd8\x21\x30\xb1\xd1\xb9\x51\xdc\x26\x7e\x17\x76\xad\x36\x1a\
\xb2\x54\x2a\x93\x9d\x9d\x05\x24\x24\x49\x42\x92\x40\x92\xa4\xca\
\x71\xbd\xfd\x53\xdb\x7a\xfb\x87\x26\x0f\x71\xf3\x6b\x37\xe3\xa9\
\x1e\x17\x8c\x5c\xc0\xf6\x33\xb6\x53\x10\x05\x16\xdc\x05\x8c\x5c\
\x8e\x73\xbb\x3f\x47\x97\x1a\x03\xc0\x72\x2d\xde\xce\xbf\x49\x22\
\x95\xc2\x43\x62\xfb\xc8\x76\x0c\xcf\xe0\xd5\x63\xaf\x46\x71\x79\
\x42\x7a\x52\x8a\xf2\x45\xf6\x0a\x21\x44\x23\x01\x80\xa8\x82\xac\
\x0f\x5d\x0f\x78\x71\x5b\x89\x3c\xc0\x8c\x35\xcd\xed\x7f\xdb\x89\
\x1d\xb1\x39\x6b\xf8\x2c\xb6\x6d\xd8\x46\xc6\xc9\xa0\xca\xa0\x4b\
\x82\x6c\x61\x9a\x6d\x67\xde\x49\x6f\xac\x0f\x80\x59\x3b\xc3\x4b\
\xaf\xff\x09\x2d\xe9\xe0\xfa\x12\x25\x01\xdb\x86\xb7\x31\x63\xcd\
\x70\x6c\xe6\x98\xc2\x2c\xbb\xb9\x81\x67\x81\x8c\xdc\xcc\x76\x49\
\xaa\x85\x5f\xfa\x09\x81\x6b\x3e\xa1\x88\xca\xb1\xef\xfb\xdc\xf3\
\xc6\x3d\xe4\xc9\x43\x04\xb6\x0e\x6d\x25\xe3\x65\xc8\x7b\x69\x8a\
\x7e\x1a\xcb\x4f\xb3\x60\x4c\x23\x84\x7f\x2a\x74\xbe\x4f\xc1\x9c\
\xa6\xe4\xa7\xb1\x44\x9a\xa2\x97\x26\xef\x67\xd8\x3a\xbc\x15\x34\
\x20\x42\x82\xcf\xf2\x3d\x49\x92\xe4\x86\x0e\x84\x71\xac\x27\xa0\
\x5e\xa4\xab\x23\x5e\xfd\x9d\x69\x9b\xbc\x3c\xf9\x32\xa8\xd0\x33\
\xd8\x43\xd9\x2f\x93\xf7\x32\xc4\x2b\x97\x2b\xb2\xc0\xb4\xb2\xf8\
\xbe\xb7\x38\xb6\xef\x7b\x94\xac\x2c\x9e\xd0\x70\x08\x1c\x30\x7d\
\xf0\x44\xd0\xc7\xdc\xc7\x73\x60\x71\x35\xdb\xf9\x5a\x23\x01\xa2\
\x36\xba\xd4\x87\x6f\xb4\x0d\x0b\xd7\xf7\x79\xfc\xe8\x5e\x8a\x4e\
\x11\x06\x40\x97\x75\x32\x6e\x9a\x24\x20\x0b\xd0\x24\xd0\x7c\x81\
\x6d\xce\xd5\x3a\x20\x7c\x6c\x73\x0e\xdf\x57\x70\x85\x44\xc9\x07\
\xcb\x03\x53\x80\xae\xea\x81\x0b\x2a\x09\xae\xe0\xeb\x4d\x6a\x80\
\x86\xf0\x4b\xa3\xde\x68\xeb\xfa\x3e\xcf\x7c\xf8\x0c\x24\x82\xfe\
\xa6\xfd\x69\x16\xfe\x99\xa5\xd7\xeb\x22\x21\x6b\xc4\x14\x88\xc9\
\x50\x48\x97\xa9\x9e\x0d\x85\x10\x7c\x30\x91\xa1\x60\xe6\xb1\x7d\
\x30\x3d\x58\xf0\x1c\xf2\x9a\xc9\xdc\x06\x17\x22\x40\x12\x98\xe5\
\xa2\x26\x02\x1a\xc0\xb7\xe9\x02\x42\xe0\x38\x0e\xe3\xe5\xf1\xc0\
\xcf\x6e\x60\x06\x6e\x5b\x7b\x07\x77\x5f\xf4\x95\x4a\xdf\x81\x5b\
\x12\xd0\xbf\x66\x68\x71\xe4\xfe\x35\x43\x1c\xd8\xf9\xaf\xc5\x54\
\x10\x80\xf0\xe1\xa7\x2f\x8c\xf2\x70\x6e\x14\x27\x01\x94\x00\x9b\
\x75\x6a\x13\xfe\x9a\xc2\xa4\x43\x17\x00\x4a\xb6\x0d\x2e\xb0\x16\
\xf0\x00\x07\x92\xd1\x14\xa7\xf7\x8c\x34\x33\x1e\x59\x52\x18\x4a\
\x2d\xbf\x26\xa1\xa6\x50\x2c\xc0\x27\x70\x41\x23\xd2\xb4\x88\x5b\
\xc2\x37\x12\x04\x88\xca\x0c\xa4\x2a\x6a\x20\x42\x00\x8d\xe7\xbc\
\xb6\x9b\x04\xc1\xdd\x4b\x00\x31\xb4\xe6\x45\x5c\x05\xda\xb1\x0b\
\x80\x2c\xcb\x28\x28\x41\x6f\x41\xe1\x91\x77\x73\x7c\x30\xf7\xfe\
\xb2\x14\x1a\x48\x0e\x23\x4b\x0a\x00\xbe\xf0\x98\xc9\x4f\x9d\x4a\
\x1f\x40\x08\x58\x70\x73\x41\x3f\x65\x40\x07\xe2\xc8\x4d\x8b\xb8\
\x1e\x7c\x33\xe8\x9a\x3a\x10\x02\x59\x96\x49\x25\x52\x7c\xa4\x7c\
\x04\x0e\xd0\x03\x7b\x4e\x8c\xf2\xf8\xe3\xa3\x74\x2b\xd0\xad\x42\
\xb7\x0c\x56\x16\x9e\xbb\x73\x92\x81\x44\x90\x36\x99\xc2\x14\x97\
\xff\x72\x23\x6b\xfa\xa1\xe8\x41\xc1\x03\xc3\x03\x4b\x83\xf2\xa6\
\x20\x15\x51\x00\x83\x93\x2d\x67\x21\x9a\xc1\xb7\x10\x24\x2b\x0a\
\xe7\x69\xe7\xf1\xae\xf7\x6e\x30\xa8\x04\x62\x33\x74\x45\x60\xad\
\x0e\x7d\x11\xe8\x55\x20\x7d\x78\xf9\xd8\x7d\xa7\xc3\xc6\x73\x61\
\xde\x83\x4c\x19\x32\x25\xf0\xcb\x50\xb6\x08\xea\x49\x02\x26\xf8\
\x6b\xd3\x3b\xf1\x62\xaa\x54\x0b\x69\x00\x2f\xd5\xd9\xaa\x8a\xc2\
\xb5\xf1\x6b\x89\x11\x0b\xf2\xd6\x05\xab\x0c\x66\xe5\x63\x94\x61\
\xc1\x05\x47\x0b\xf3\xf5\x54\xa2\x97\x55\x98\x77\x21\x5f\xb9\xce\
\x74\x20\x67\x81\x08\xa3\xef\x63\xb1\x87\xdf\x36\x2d\x2b\xa9\x2a\
\x9f\xa5\x16\xf0\x8b\xf5\x11\x7e\x2f\xcb\xc4\x62\x31\x86\xfa\x87\
\x38\xdf\x3d\x3f\x00\x54\x82\xe8\x19\x16\x2c\xd8\x01\xd0\xbc\x05\
\xa6\x0c\xa2\x4a\x80\x90\x82\x73\x19\x13\xe6\x2c\xc8\x99\x30\x67\
\x42\xa1\x5c\x25\x34\xc3\x61\x8a\x4c\x37\x14\x10\x16\x57\xab\x3a\
\x58\x84\xae\xe3\x82\xae\xeb\x9c\x76\xda\x69\xec\xec\xde\x49\x54\
\x44\x83\x59\x48\x03\xbb\x04\x39\x03\xe6\x8b\x30\x5b\x04\xc3\xaf\
\xcc\x1a\x55\x02\x0a\x3e\x64\x8b\x30\x6b\xc0\xac\x09\x59\x03\x84\
\x4c\x30\x7d\xba\x98\xfc\x82\x9f\x03\xf9\xd6\x45\xdc\x48\x48\x1d\
\x78\x96\x9c\x93\x65\x99\x44\x22\xc1\xd9\x1b\xce\xe6\xba\xb7\xae\
\xe3\xd1\xa1\x47\xf1\x25\x1f\x62\x60\x15\xc1\xf3\xc0\x91\x21\x22\
\xe0\x37\x47\x46\x49\x45\x52\x48\x40\xbe\x9c\x63\x41\x80\x63\x40\
\xae\x14\x14\xb2\x58\x43\x10\x00\x09\x87\x03\xfc\x9a\x77\x39\x02\
\x98\xcd\x8b\x38\xac\x85\xa5\x42\xea\xc1\xd7\x13\x52\x71\xa1\xaf\
\xaf\x8f\x2b\xfb\xae\xc4\x3d\xe9\xb2\x7f\xe3\x7e\x9c\x88\x03\x2a\
\x94\x73\x90\xf5\x40\x75\xe0\xc7\x47\x47\x51\x64\x40\x04\xf7\xa9\
\xb2\x0e\xa5\x02\x08\x15\x88\x56\x22\x1f\xc1\xe7\x49\x9e\xe0\x31\
\x0e\x00\x27\x01\xbb\x23\x07\x3a\x85\xa7\xe2\x42\x32\x99\x64\xcb\
\x96\x2d\x5c\xe5\x5d\x45\xf4\x64\x94\x7d\x1b\xf7\x61\x26\xcc\x60\
\x79\x91\x01\x37\x02\x05\xa3\x52\x23\x2a\x81\x02\x05\xe8\x09\x52\
\x8e\x38\xb0\x80\xc3\x63\x3c\xc5\x63\xfc\x1e\x18\x07\x72\x42\x08\
\xaf\x63\x07\x3a\x81\x0f\x8f\x35\x4d\x63\x60\x60\x00\x59\x96\xd1\
\x8f\xe9\x6c\x3a\xb2\x89\xfd\xc9\xfd\x1c\xdd\x70\x94\xd2\x48\x09\
\x72\x40\x2f\x90\x27\x10\x15\x01\xac\xe0\x5c\xd4\x8b\x72\xce\xd4\
\x39\x8c\xdd\x32\xf6\x67\xe6\x78\x0a\xf8\x37\x90\x15\x42\xb8\xd0\
\xf8\x91\x72\x99\x03\x21\x2c\xd5\xc7\x4b\xf7\xeb\xc0\x87\x5b\x5d\
\xd7\x19\x18\x18\x40\xd7\x75\x62\xb1\x18\x83\x13\x83\x64\xdf\xcc\
\xf2\x5a\xea\x35\xde\x5e\xfb\x36\x33\xda\x0c\x72\x42\x46\x45\x45\
\x91\x14\xe2\x56\x9c\x2d\xaf\x6f\xe1\xe2\xd2\xc5\x0c\xf6\x0d\x72\
\xcd\xdc\x35\x93\xc0\x5b\xc0\x34\xc1\x5d\x85\x66\x02\x6a\x96\x12\
\xcb\xc0\x9b\xac\x4e\xeb\x09\xa9\x76\xa2\xb7\xb7\x97\x68\x34\xca\
\xe0\xe0\x20\xe9\x74\x9a\xcd\xe9\xcd\x5c\x9a\xbe\x14\xdb\xb6\x11\
\x95\xbb\xb7\x24\x49\xa8\xaa\x4a\x32\x95\x64\xfd\xfa\xf5\x0c\x0f\
\x0f\x03\x7c\x0c\x64\x84\x10\xe5\xea\x7e\x9b\xac\x46\xa5\xb6\x1c\
\x68\x05\xbf\xf4\x58\x55\x55\x92\xc9\x24\xf1\x78\x9c\xfe\xfe\x7e\
\x4c\xd3\xa4\x50\x28\x60\x18\x46\xb0\xf8\x53\x55\x14\x45\x41\xd7\
\x75\xe2\xf1\x38\xdd\xdd\xdd\x74\x75\x75\x41\xb0\x02\x72\x97\x62\
\xb6\x78\xa4\x6c\xdf\x81\xfa\x31\x68\x2c\x46\xd3\x34\x54\x55\xa5\
\xab\xab\x8b\x9e\x9e\x1e\x5c\xd7\xad\x71\x40\x96\x65\x14\x45\x59\
\xdc\xa7\xf6\x56\xd1\x86\x03\x21\x70\x75\x2a\x55\x9f\x6f\x90\x26\
\xed\x3a\x01\x84\x60\x28\x8a\x82\xa6\x69\xac\xa4\xb5\x2c\xe2\xa5\
\xd1\x6f\x27\x75\x5a\xc1\x37\x73\x6d\x55\x05\xd4\x8b\x7e\x08\xd0\
\xaa\x68\x3b\x85\x17\x42\x2c\x3e\x17\x57\x3f\x1f\x57\xa7\x6c\xc7\
\x02\x6a\x84\xb4\xe8\xa8\x1e\x60\x3b\xf0\xbe\xef\xd7\xc0\x87\xc7\
\x35\xbf\x23\x2b\x2b\x14\xb0\x14\x80\xd5\x4b\x23\x21\xc4\x22\xac\
\xe7\x79\x78\x9e\x87\xef\xfb\x35\x82\x16\xc7\x52\x75\x58\x71\x11\
\xb7\x19\xf9\x4e\x5c\x09\x41\x7d\xdf\xc7\x71\x1c\x5c\xd7\xc5\xf3\
\x3c\xca\xe5\x72\x70\xec\x05\x2f\xb9\x64\x49\x42\xd6\x74\xc6\x8f\
\xbf\x33\x05\xa4\xeb\x89\x58\xb5\x14\x6a\xd7\x89\x10\xdc\x75\x5d\
\x1c\xc7\xc1\x71\x1c\x6c\xdb\xc6\x34\x2d\x2c\xdb\xc2\x29\x3b\x78\
\x15\x01\xf1\x44\x92\x7c\x3a\x6b\xde\x7a\xe3\x8e\x5f\x01\xcf\x86\
\xcb\x87\x8e\x05\x2c\x13\xd2\x00\xae\x55\x0b\xd3\x26\x84\x2f\x97\
\xcb\x2c\x14\x0a\xe4\x72\x39\x2c\xd3\xa4\x68\x9a\x94\xcb\x0e\xa9\
\x54\x92\xc1\xa1\xf5\xcc\xce\x66\xbd\x5b\x6e\xfe\xd2\x4f\x8e\x1f\
\x3f\xfe\x33\x21\xc4\x42\xbd\x3e\x9b\x2e\x25\x56\x92\x3a\xcd\x04\
\x86\x02\x42\xf8\x5c\x2e\xcf\xd4\xd4\x49\x8a\x96\x49\x77\x57\x37\
\xfd\xfd\xfd\x0c\xac\x5b\x87\xae\xeb\x9c\x98\x98\xf0\xef\xdc\xb5\
\xeb\xa1\x23\x87\xdf\xf9\x91\x10\x22\xdf\x68\xbc\xf6\x56\xa3\x1d\
\x40\xb7\x13\x7d\xd7\x75\x31\x0c\x83\xf9\xf9\x79\x0a\x45\x83\xcb\
\x2e\xb9\xb4\x52\xcc\x2e\x86\x51\x64\x62\x72\x42\xdc\x73\xdf\x37\
\xf6\x8c\x8d\x8d\x3d\xd0\x28\xf2\x61\x6b\xf9\xaa\xa9\x51\xfe\x7f\
\x92\xf4\xb1\x6d\x1b\xc3\x30\x28\x14\x16\xe8\xee\x8a\xd7\xc0\x4f\
\x4e\x4e\x70\xff\x83\xf7\xef\x7b\xf9\xf9\x17\xef\x6b\x05\xdf\x96\
\x80\x65\x42\xda\xbd\xbe\xea\xda\x70\x5a\x0c\xa7\x4b\xcb\xb2\x29\
\x16\x4d\x0c\xd3\x24\x99\x4c\xd6\xc0\x3f\xb4\x7b\xf4\xe0\xc1\x3f\
\x1e\xbc\xa3\x1d\x78\xe8\xb0\x88\x3b\x81\x6e\xe6\x40\xa9\x64\x63\
\xdb\x36\x25\xdb\x66\x70\x70\x20\x4c\x1b\x46\x77\x8f\x1e\xdc\xb7\
\x77\xdf\xf5\x42\x88\x42\xbb\x63\x36\x2f\xe2\x55\x6a\xd5\x77\x59\
\xd7\x75\x29\x57\x8a\x38\x95\x4a\x11\x89\x44\x99\x98\x9c\x10\xf7\
\x7f\xf7\x81\xbd\x07\x9f\x3e\x70\x57\x27\xf0\xcd\x04\xac\x7a\xab\
\x49\x23\xd7\xc3\x17\x82\x91\x91\x4d\x9c\x98\x98\xf0\xef\xfd\xe6\
\xbd\x0f\xbf\xf8\x97\x17\xbe\xd5\x29\xfc\x7f\x45\x40\x98\x56\xd5\
\x0b\x35\x21\x04\xc9\x9e\x5e\xf2\xf9\x9c\x77\xd7\x5d\xbb\x7e\x38\
\xf6\xf7\xb1\x07\x85\x10\xc6\x4a\xfa\x6f\xfc\x40\xb3\x4a\x4b\xde\
\x10\x3c\x5c\xd7\xc8\xb2\x8c\x16\x8b\x63\x2d\xe4\xcd\x5d\x5f\xbe\
\xe5\xfb\x87\x0f\x1f\x7e\x68\xa5\xf0\xcd\x04\x08\x59\x96\x89\x44\
\x22\x2b\x16\x55\x73\xad\x24\x21\x24\x19\xa1\x68\x48\x91\x2e\x3e\
\xf8\x78\x7c\xea\xee\x9d\xb7\xee\x3e\x31\x3e\xbe\xe7\x93\xc0\x03\
\x0d\xff\xd5\xe0\x32\x60\x23\xb5\xaf\x5c\x57\xab\x09\xe0\x43\xe0\
\xb9\x4f\x0a\x5f\x57\x40\x45\x84\xc2\xaa\xfc\x3d\xa5\xb1\x88\x7a\
\x0b\xb3\x55\x13\xf0\xff\xd4\xfe\x03\x04\xe4\x96\x06\x3c\x08\x04\
\xfb\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x0a\xbb\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x06\
\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\
\x0a\x44\x49\x44\x41\x54\x68\xde\xd5\x99\x7b\x70\x55\xc5\x1d\xc7\
\x3f\xe7\x71\x6f\x6e\x9e\x08\x57\x84\x60\x40\x10\x34\x4a\x10\x12\
\xa0\xa5\x4a\xc5\x06\xa8\xb5\x3c\xfa\x1a\x8b\xf8\xea\xc4\x22\x54\
\xf1\xed\x50\x86\xfa\x00\xb4\x3a\x2a\xe8\x28\xc5\x71\xb0\x43\x2d\
\x23\xd4\xe2\x8c\x83\x0f\x10\x3b\x15\x0d\x93\x02\x53\x47\x0a\x04\
\x85\xd0\xf0\x0e\xa4\x85\xdc\x80\x11\xc2\xcd\x3d\xb9\xe7\xec\xf6\
\x0f\xb2\x67\xce\x3d\x39\xe7\x26\xb1\x76\x3a\xee\xcc\xce\x39\xfb\
\xdb\xfd\xed\xfe\x9e\xdf\xdd\xb3\x47\x93\x52\xf2\x4d\x2e\xfa\xff\
\x5b\x80\x9e\x96\x92\xe2\xc2\x05\xb3\x67\xcf\x9e\xe1\x12\xa4\x94\
\xdf\x98\xfa\x78\x15\xf7\xf6\x2a\xd0\x45\x5e\x4c\x13\xe5\xe5\xe5\
\x7f\x02\x34\xd3\xab\xdd\xd8\xb1\x63\xd7\x3b\x8e\x33\x4d\xd3\x34\
\x74\x5d\x27\x1e\x8f\x9f\x3a\x7d\xfa\x74\x0a\x40\xd3\xb4\x0c\x4b\
\x54\x55\x55\xf5\x19\x31\x62\x44\xae\x94\x12\xc7\x71\xd0\x34\x0d\
\xd3\x34\x89\x46\xa3\x44\x22\x11\x84\x10\x28\x3e\xc3\x30\x5c\x3e\
\x21\x04\x42\x08\x2c\xcb\x22\x9d\x4e\x93\x4e\xa7\x01\x30\x0c\x83\
\x1d\x3b\x76\xac\x59\xb7\x6e\xdd\xef\x0c\xc3\x78\x5b\xad\xa7\x69\
\x5a\x11\x50\x38\xb6\xac\x28\xf1\xd4\xec\xd2\x3e\x27\xbf\xd8\x7f\
\x7c\xe5\x7a\xad\x44\xd3\xb4\x9b\xa3\xd1\xe8\x8e\x0c\x05\x1c\xc7\
\x99\xe6\xf5\x0a\x10\x57\xc2\xa9\xa2\xde\x6d\xdb\x76\xdb\x4a\x70\
\x5d\xd7\x31\x4d\x93\x48\x24\x42\x24\x12\xc1\x34\x4d\x77\x8c\xca\
\xb5\xba\xba\x3a\xf6\xec\xd9\xe3\x2a\xe2\x38\x0e\x95\x95\x95\x00\
\xa4\x52\xa9\x9b\x0d\xc3\x98\x09\x98\x52\x4a\x97\xef\x8a\x21\x05\
\x4c\xb8\xfe\x17\x7d\xdf\xdb\x7f\x50\xfe\xad\xfe\xf5\x81\x83\x06\
\x25\x39\x72\x60\x97\x7d\x49\xff\xe8\x82\x0c\x05\xc2\x8a\x9a\xcc\
\xfb\xee\x4d\x7e\xc3\x30\x10\x42\x90\x93\x93\xe3\x0a\x1e\x8d\x46\
\x31\x0c\x83\x68\x34\xea\xf2\x09\x21\xa8\xaf\xaf\xe7\xdd\x77\xdf\
\xcd\x30\xca\xe4\xc9\x93\x71\x1c\x07\x29\xa5\xa1\xe6\x55\x6b\xf4\
\xce\x6f\xe3\x89\x99\x47\xd9\xd6\xb4\x93\x6f\x4f\x98\xad\x2d\x89\
\x8f\xc0\xfe\xe7\x9c\xcf\xca\x2e\x1d\x34\xf4\xa1\xa5\x47\x17\x9b\
\x61\x82\xfa\x2d\x17\x44\x13\x42\xa0\xc2\xcd\x34\x4d\x0c\xc3\xc8\
\xf0\x82\x61\x18\xae\x87\x94\x02\xba\xae\x77\x5a\xcb\x30\x0c\x6c\
\xdb\x76\x3d\xaf\xd6\x28\x88\xb5\xf3\xd2\x9c\x46\x3e\x3b\x28\x58\
\xf8\xca\x5b\x4c\xaf\x13\x54\x55\x55\x71\xb6\x68\xe9\x88\x86\xda\
\x39\xd3\x37\x6c\x93\xef\x77\xf2\x80\x77\x62\xaf\x32\xde\x89\xfd\
\x74\x35\xd6\xb6\x6d\x72\x72\x72\x5c\xc1\x75\x5d\x77\x2b\x90\xf1\
\xee\xe5\xf5\x2a\xa5\x9e\xb1\x88\xcd\xd2\x5f\x1e\xa5\xf9\x6c\x84\
\x79\x2b\xf2\x49\xdb\xa0\x25\x56\x63\x1d\xb3\x71\x2c\x5e\x9d\x74\
\xdf\xb9\xf7\xc1\x07\xa3\xfe\xac\x0f\x52\xce\xbf\xb0\x0a\x97\x68\
\x34\x4a\x5e\x5e\x9e\xfb\xae\xf2\x40\x25\xb0\x12\xde\x0f\x06\xca\
\x03\xba\xae\x23\x84\x40\x4a\x89\xa9\x3b\xfc\xf6\xb6\xc3\x44\xa2\
\xf9\xcc\x7d\x31\x87\x54\xbb\xce\xa4\x8a\x14\x8b\x67\xe5\xf2\x97\
\x8d\xef\x38\xdf\x99\xf6\xfc\xdd\x8a\x37\x70\x1f\xf0\x5a\x35\xa8\
\xcf\x1b\x42\x4a\x40\x15\x4e\xaa\x2a\xab\x7a\x43\x4b\xf5\xf9\x0d\
\xa4\xeb\x3a\x86\x61\x20\xa5\x44\xd7\x24\x8f\xdc\x78\x90\x92\xe2\
\xde\xcc\x7a\x0e\xce\x24\x0d\xbe\x55\x9a\x66\xc9\x5c\x93\x55\x1b\
\x05\x1b\xb6\x17\x3b\x5e\x79\xcc\x30\xc1\xfd\xd6\xf6\x2b\x23\xa5\
\x74\x05\x33\x0c\x83\x9c\x9c\x9c\x8c\xf0\x50\x3c\x42\x08\x4c\xd3\
\xcc\x08\x4b\x7f\x38\xba\x4f\x21\xb8\x6f\xea\x41\xc6\x5e\x15\xe7\
\xa6\x85\xed\x24\xbe\x34\xb9\xf2\x12\x87\x65\x0f\x08\x36\xfe\x5d\
\x63\xf5\xe6\x12\x40\x66\x08\xa2\x07\x29\x90\x4d\x11\xbf\x80\x86\
\x61\xb8\x02\xea\xba\x4e\x34\x1a\x75\xf9\x54\x9f\x37\x07\xfc\xf3\
\x49\x29\x31\x4d\x13\xc7\x71\x18\x19\xff\x2b\x53\x27\xf4\x61\xd6\
\x33\x69\x1a\x9a\x4c\x06\xf5\x93\xbc\xf2\x90\xcd\xf6\x7d\xb0\x7c\
\xfd\x80\x40\x19\x42\x93\x38\x0c\x3a\x83\x50\x4a\xa1\x8b\x8a\x7d\
\xe5\x19\xe5\x1d\xc5\xa3\xc2\xcc\x0f\xc5\x52\x4a\x92\xc7\xd6\xf0\
\x93\xf1\xad\xfc\x6a\xa9\xc1\xde\xa3\x82\x0b\x7b\x49\x56\x3c\xdc\
\xce\xd1\x13\x92\x67\xde\x1c\x80\x94\x5a\x86\xe2\xa1\x21\xe4\x87\
\xce\x20\x9a\xbf\x4f\x29\xe3\x38\x8e\x9b\x17\x7e\x14\x52\xfc\xe5\
\xe5\xe5\xae\xd2\x8e\xe3\x60\xdb\x36\xcd\x87\xde\xe1\xfb\x97\x6f\
\x65\xf1\xea\x8b\xd8\x52\xfb\x2f\x0a\x72\x25\x2b\x1e\x4e\xd3\x66\
\xc1\xa3\xab\xfa\x91\x76\x74\x20\x38\x94\x43\x3d\xe0\x38\x8e\x9b\
\xa0\x41\xde\x38\x74\xe8\x10\xe3\xc6\x8d\xcb\x48\x50\x15\x2a\x1d\
\x9b\x12\x8e\xe3\xb8\x5e\x91\x52\x92\x4e\xa7\x29\x2d\x2d\x65\xd8\
\xb0\x61\x38\x8e\x43\x2a\x95\x22\x71\xe4\x43\xc6\x14\xfe\x91\x55\
\xd5\x97\xf3\xe7\x8d\x75\xe4\x44\x60\xf9\x03\x36\xf9\x79\x70\xf7\
\xb2\x0b\x49\xa5\xcd\x0c\xe1\x3b\xed\x4b\x5e\x42\x59\x59\x99\xf4\
\x6a\xd9\xb7\x6f\x5f\x0a\x0a\x0a\x5c\x61\xbc\xe8\x23\xa5\xa4\x5f\
\xbf\x7e\xe4\xe5\xe5\x65\xa0\x8f\x17\x62\xbd\xde\x53\x47\x07\x6f\
\xd8\x14\x98\xff\xe6\xe9\xdb\xea\xd9\xb0\x7d\x00\x8b\x56\x9c\x44\
\xd7\x24\x2f\xde\xeb\x30\x6a\x98\xc3\x9d\x2f\x5c\xc0\xe9\xd6\xdc\
\xa0\xbc\xb3\xb7\x6d\xdb\x16\x09\xf5\x80\xd7\xca\x89\x44\x82\xe6\
\xe6\xe6\x8c\x98\xf7\x3e\x9b\x9a\x9a\x02\x37\x3d\x7f\xa2\x07\x3d\
\xe3\x05\x6d\xac\x79\xb4\x95\x8f\x6b\xfb\xf2\xf4\x6b\xa7\x91\x52\
\xb2\xf0\x0e\xc1\xb8\xe1\x82\x59\x4b\x0b\x38\x75\x36\x06\xc8\x40\
\xf4\xf3\x96\xc0\x8d\xcc\x3b\xb0\x27\xb4\x30\x63\xf8\x9f\x85\x31\
\x8b\x57\x1f\xfa\x92\xcf\x8e\xf6\x61\xd1\x4a\x0b\xcb\x6a\xe7\xc1\
\x19\x30\x7d\xbc\xe0\x81\xe5\x39\x34\x9e\xce\xef\x72\x53\x0d\x55\
\xe0\xab\x0a\xdf\xdd\x1d\x3c\x37\x6a\xf3\xf2\x7d\x09\x9a\x5a\x7b\
\x33\xef\x65\x8b\x73\x49\x8b\xdb\x6f\xd0\xb9\xe3\x87\x36\x0b\x56\
\x68\xec\x6b\x2c\xea\x04\x22\xd9\x0c\xa5\x67\x5b\xec\xeb\xa0\x79\
\xfb\x22\x86\xc3\x0b\x73\x4e\x80\xd1\x9b\xbb\x96\xb4\xd1\x9a\xb4\
\x99\x7a\x8d\xce\xbc\x99\xf0\xcc\xeb\x36\x9f\xd4\xf7\x09\x34\x46\
\xb6\x79\x43\x73\x40\x95\x8a\x8a\x0a\xe2\xf1\xb8\xdb\x0e\x8b\xe9\
\xb0\xe2\x8e\xc3\x61\x66\xc5\x46\x8a\xfb\x5e\xc8\x8c\xc7\x2d\x5a\
\xdb\x74\x7e\x70\x75\x01\x4f\xcf\x69\xe7\x83\xed\x85\x24\x0b\xa7\
\x71\xdd\x75\x99\xbc\x75\x75\x75\x24\x12\x89\x0c\x70\xe8\x12\x46\
\xbd\xe5\xd9\x67\x9f\x65\xe2\xc4\x89\x44\x22\x91\x4e\x7d\x0a\x51\
\x54\xf5\xb7\xbd\x34\x21\x1c\xe4\x81\xb9\xe4\x89\x18\x37\x2d\x72\
\x68\xfe\x52\x67\xd2\xd5\xfd\x59\x7a\xbf\xc1\xa9\xd4\x25\x8c\xf9\
\xf1\x13\x8c\x0e\xe0\xb3\x2c\x8b\x37\xde\x78\x83\x4d\x9b\x36\x85\
\x1a\x2a\x30\x07\x54\x19\x35\x6a\x94\x7b\x96\xf7\x4e\xae\x36\x2c\
\xf5\xee\x6d\x07\xd1\x9c\x83\xbf\xa1\x48\xee\x65\xd6\x73\x92\x83\
\xc7\x6d\xca\xaf\xb8\x80\xe7\xef\x4a\xf2\x45\xdb\x45\xb4\x17\x3f\
\xee\x8e\xb7\x6d\x1b\xdb\xb6\xdd\xb6\x61\x18\x54\x54\x54\x64\xcd\
\xb1\xac\xdf\x03\xde\x6f\x59\xd5\x17\x66\xf1\x30\x0f\xb4\xec\x7b\
\x9e\x4b\x63\xdb\x98\xbb\x2c\x87\xda\xfa\x14\x43\x4a\xf2\xf9\xc3\
\xc2\xfe\x9c\x4d\xe9\xb4\x15\x3f\x89\x94\x20\x65\xf8\x5c\xfe\x0d\
\x34\x6b\x0e\x64\x83\xab\xae\xc2\x24\x88\x76\xfc\xf3\x55\x8c\xee\
\xb5\x91\xf9\xaf\xe6\x51\xb3\xf3\x1c\x7d\x7b\x47\x59\xfb\x5c\x29\
\xc9\xb6\x16\x5a\xfb\x2d\x41\xc3\xec\xf6\x5c\x61\xc7\x99\xac\x1b\
\x99\x37\xde\x2d\xcb\x62\xf3\xe6\xcd\x9d\x3c\x11\x54\x01\xec\x96\
\x4f\xb8\x75\xec\x26\x1e\x5b\xe9\xf0\x5e\xcd\x39\x72\x73\x74\x56\
\x3f\x55\x42\xf2\xdc\x19\x3e\x3e\x3c\x05\xfb\xc0\x3f\x42\x11\xa7\
\xac\xac\x8c\xa2\xa2\xa2\x4e\xf4\x0e\x99\xb4\x50\x05\x82\x8e\xd1\
\xca\x02\x91\x48\x84\x49\x93\x26\x75\xe9\x01\x21\x04\xcd\xc7\xb6\
\x70\x05\x1f\xf2\xda\x07\x26\x6f\x55\xb7\x63\xe8\xf0\xf6\xb2\x51\
\x14\x9a\x27\xf8\xa2\xcf\x93\x5c\x33\xb0\xa4\x5b\xde\x54\x57\x33\
\xdd\xf6\x40\xd8\x51\xba\x27\x88\xd3\x72\x72\x37\x97\x89\x27\xf8\
\x68\xa7\xc6\x92\x35\x6d\x00\xac\x7d\x61\x34\xbd\xa2\x09\x9a\xf2\
\xe7\x13\x89\x5d\x9c\xc1\xdf\x95\x22\x41\xc6\xed\x56\x12\xfb\x0f\
\x60\xdd\x59\xec\xd4\x89\x3a\x06\x9f\xfb\x35\xb5\x47\xe0\xc1\x97\
\x2c\x40\xe3\xf7\x8b\xaf\x62\xf0\x80\x02\x1a\xed\x9f\x12\x2d\x2c\
\xed\xf6\x5c\xd9\x92\xb9\x5b\x1e\xc8\x96\xbc\x41\x8b\xa7\xd3\x69\
\x0e\x6f\x5f\x4e\x6b\xe4\x0c\x77\x2e\x89\x20\xa4\xc6\xa2\xbb\x06\
\x32\x6a\xa8\xa0\xb1\x7d\x3c\xd1\xf8\x98\x1e\x83\x80\xf7\x5d\x15\
\xbf\x07\x42\x8f\x12\x2a\xfe\xfd\x93\x05\xb5\x85\x10\x7c\xf4\xe1\
\xfb\x54\x54\x2e\xe0\xed\xdd\x93\x69\xb7\x35\xaa\xa6\xc7\x99\x38\
\x6c\x0b\x47\x5a\xc7\x10\xe9\xf3\xdd\x40\x23\x84\xcd\xe5\xcf\x01\
\xaf\x5c\x52\xca\xae\x93\xd8\x4f\xeb\xca\x03\x89\x44\x82\xcb\x87\
\xf4\xc3\x4a\x9e\x64\xc6\x0d\x25\x14\xe9\xfd\xb9\x75\xfc\x27\x7c\
\x7e\x76\x26\x83\xaf\xbc\xa9\x47\x61\x13\x96\x03\x61\xf2\x65\x85\
\x51\xff\xb6\x5e\x53\x53\xe3\xf6\x7b\xeb\x89\xe3\x07\xf8\xd9\xd4\
\xd1\x9c\x69\xda\xc5\xd9\xe6\xdd\x94\x5e\x7c\x96\x95\x35\x95\x0c\
\x1e\x31\x81\x86\x9a\x9a\x0c\x34\x09\x83\x4e\xef\x98\xe1\xc3\x87\
\x53\x58\x58\xd8\x29\x7c\x82\x4a\x56\x18\xf5\x2a\x60\x9a\x26\x95\
\x95\x95\x9d\x2c\xb4\xbb\x76\x17\xe5\x97\x95\x71\xa6\xa9\x96\x33\
\x89\xdd\xec\xd9\xd7\x40\xff\x11\x0f\x72\xfb\xd4\xab\x7b\x6c\xed\
\xcc\xf3\x53\x26\xbd\xc7\x49\xec\x55\x22\x6c\xf1\xf6\xf6\x76\x4e\
\x34\xec\xe4\xe2\x5c\x83\x33\xcd\x7b\xd9\xb2\xeb\x1c\x37\xcc\x58\
\x41\x61\x61\x51\xb7\x84\xec\x0e\xcd\x2f\x5b\x97\x21\xe4\x17\x3e\
\xdb\x62\x8f\x3d\x32\x1f\x3b\x79\x9c\x5e\x5a\x09\x27\x53\xa5\xcc\
\xb8\xf3\x8e\xff\x2a\xde\x83\xda\x01\xe8\xa8\x65\x55\xc0\x97\xf1\
\xa1\x13\x1f\x3a\x74\x90\xf5\x1b\x36\x72\x41\x51\x0e\x3f\xba\x71\
\x0e\xd7\x4f\x9e\xf0\x95\x85\xcc\x46\xf3\xe6\x86\x0a\xef\x2e\x15\
\xc8\x86\x40\x8a\x36\x7f\xde\xfd\x54\x8c\x1c\xc6\xcb\x2b\xd6\x90\
\x9b\x9b\xfb\x95\x84\xec\xae\x22\x41\x47\x9c\xac\x39\xa0\x4a\xd8\
\x2e\xfc\xe9\xa7\x9f\x72\xed\xb5\xdf\x63\xc6\xcc\xdb\x68\x69\x69\
\xa1\xb9\xb9\x39\xe3\x1c\xef\xbf\x3e\x09\x8b\xdf\xb0\xaf\x3b\x75\
\x45\x03\x60\x59\x16\x8e\xe3\xb8\xd7\x34\xfe\xfb\xd7\xac\xdf\x03\
\x5e\x25\x2c\xcb\xa2\xa5\xa5\x85\x53\xa7\x4e\x91\x4c\x26\x19\x34\
\x78\x18\x5b\xb7\x6e\xed\x74\x23\xed\x7d\x76\x45\x03\x3a\xd1\xbd\
\x0a\x68\x9a\x86\x65\x59\x48\x29\x5d\x25\x3a\xae\x27\x23\x80\x00\
\x9c\x4e\x1e\x18\x3a\x74\x28\xb1\x58\x8c\xc3\x87\x0f\x67\xb8\xd0\
\x34\x4d\x0a\x0a\x0a\xc8\xcf\xcf\x67\xe0\xc0\x81\x81\xbb\xa7\xdf\
\x03\xea\xeb\x2c\x2c\x39\x83\xf2\x0e\x70\x2f\x8b\x85\x10\x24\x93\
\x49\x77\x4d\xcb\xb2\x38\x76\xec\x98\x26\xa5\x4c\x07\x7a\xa0\xb8\
\xb8\x98\x95\x2b\x57\x02\xb0\x76\xed\xda\x8c\x85\xd3\xe9\x34\x7b\
\xf7\xee\x0d\x8c\xcf\xa0\x78\xf5\xef\xa6\xfe\xcd\x2a\xa8\xad\xea\
\x90\x21\x43\xdc\x7f\x6b\x42\x08\x6e\xb9\xe5\x16\xa6\x4c\x99\x02\
\xc0\x3d\xf7\xdc\x13\x1e\x42\xea\xaf\xa2\x7a\xf7\x4e\xaa\xeb\x3a\
\x23\x47\x8e\xfc\xda\xd0\xa5\x27\x09\xee\xfd\xb4\xf5\x7f\xe6\x66\
\x28\xd0\xd0\xd0\xc0\xe2\xc5\x8b\x89\xc5\x62\xec\xdc\xb9\x93\x29\
\x53\xa6\x7c\x6d\x42\x7e\x55\x3e\x80\x75\xeb\xd6\x51\x5f\x5f\x4f\
\x2a\x95\xa2\xb1\xb1\x31\x3b\x0a\x55\x57\x57\x77\xfa\x89\xf7\xbf\
\x16\x32\xdb\x5c\x70\xfe\x0e\xb6\xba\xba\x9a\xa0\xe2\x47\xa1\x5a\
\x60\x94\x62\x6c\x6c\x6c\xc4\x71\x9c\x0c\x65\xc2\x62\xdb\x0f\x9b\
\x61\xf1\xdd\xd5\x18\x3f\xad\xa5\xa5\xc5\x9f\xe0\x7b\xbc\x02\x6b\
\x1d\xb0\x69\x00\x66\x3c\x1e\x1f\x12\x8b\xc5\x7e\x2e\xa5\x2c\x04\
\x74\x29\xa5\x0e\xe8\x9a\xa6\xe9\x9c\xff\x76\xd0\x15\xbd\x83\x47\
\xd7\x75\xdd\x30\x0c\xc3\xec\xa0\xe9\x9c\xdf\xea\x75\x5f\x5b\x02\
\x42\xd3\x34\x01\x88\x0e\xb8\x74\xdb\x9a\xa6\xb9\xd5\x37\x4e\x7a\
\xfb\x0d\xc3\x68\x6d\x6b\x6b\x7b\x73\xff\xfe\xfd\x07\x80\x76\xc0\
\xce\xf8\x3f\x00\xee\x66\xa2\x07\x54\xc3\xf3\xcc\x56\xf5\x0e\xcf\
\x2a\x3e\x38\x8f\xd9\xaa\x3a\x9e\xaa\xda\xb6\xaf\xed\x04\xf0\x08\
\x40\x48\x9f\xc0\xff\x01\x1e\xbf\xd8\x70\xe6\x62\x9f\x2b\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x44\xcc\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3\x3e\x61\xcb\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x06\
\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\
\x44\x55\x49\x44\x41\x54\x78\xda\xed\xbd\x79\x90\x5d\xd7\x79\x1f\
\xf8\xfb\xce\x39\x77\x79\x5b\xbf\x7e\xbd\x03\x8d\x85\xd8\xb8\x00\
\x5c\xc5\x45\xa2\x36\x92\x22\x29\x6b\x97\xbc\xd0\x5b\x79\x4b\xc6\
\x71\x66\xc6\x56\x55\x2a\xa9\x99\xcc\x1f\x33\x16\x3c\xa9\xd4\x94\
\x33\x1e\x27\x76\x26\x2e\x27\x29\x39\xb6\x33\x96\x1d\xc9\x1e\x2f\
\xb2\x25\xcb\xa2\x44\xca\xda\x48\x11\xdc\x09\x10\x1b\xb1\x36\x80\
\x46\xaf\xaf\xdf\x76\xef\x59\xbf\xf9\xe3\xbe\xd7\x78\x80\xe0\x38\
\x9e\x84\x00\x9a\xf6\x57\x75\xfa\xae\xef\xf6\x7d\xef\xfb\x9d\x6f\
\x3b\xdf\xf9\x0e\xf0\x77\xf4\xb7\x9a\xe4\xf5\x7e\x81\x6b\x45\xcc\
\x8c\x7d\xfb\xf6\xd1\xbe\x7d\xfb\xe8\xa9\xa7\x9e\xa2\x8f\x7e\xf4\
\xa3\xb4\x79\xf3\x66\xf1\xf0\xc3\x0f\xd3\xf7\x7d\xdf\xf7\xd1\x17\
\xbe\xf0\x05\x3c\xfc\xf0\xc3\xb4\x63\xc7\x0e\x7a\xea\xa9\xa7\x40\
\x44\x78\xfa\xe9\xa7\xaf\xf7\x6b\xbf\xe9\x44\xd7\xfb\x05\xde\x0c\
\xda\xbf\x7f\x3f\xf6\xee\xdd\x4b\x93\x93\x93\x04\x80\x76\xee\xdc\
\x49\x2f\xbd\xf4\x92\x08\x21\xc8\x6a\xb5\x4a\x00\x48\x08\x41\xdf\
\xfa\xce\x2b\xc9\xf9\xf9\x85\xe4\xce\x7d\x37\xf7\xf6\xec\xdc\xea\
\x84\x10\x9c\x65\x19\xe7\x79\x1e\x26\x27\x27\xbd\xf7\x9e\x77\xef\
\xde\x1d\x3e\xfd\xe9\x4f\xe3\x53\x9f\xfa\x54\x20\x7a\xeb\xfd\x5c\
\x6f\x99\x6f\xb4\x7f\xff\x7e\x00\x10\x0f\x3d\xf4\x10\x49\x29\xc5\
\xc2\xc2\x82\x7a\xe3\xf4\x7c\xf9\xf3\x5f\x7c\xfa\xce\x76\xa7\xb7\
\xcb\x79\xbf\xc5\x39\xb7\xcd\xfb\xb0\x99\x11\x4a\x60\xa4\x00\x88\
\x01\x80\x41\x44\x70\x00\x69\x22\x6a\x29\x25\xe7\xa4\x94\x67\x62\
\xa5\xe6\x1a\x8d\xfa\xeb\x3f\xf7\x0f\x7e\xe8\x88\x20\xb6\xf5\x7a\
\xdd\x2f\x2e\x2e\x06\x00\xfc\x83\x3f\xf8\x83\x7c\xbd\xbf\xf3\x7f\
\x0b\xda\xf0\x00\xf8\xec\x67\x3f\x4b\x69\x9a\xd2\xf8\xf8\xb8\xea\
\xf5\x7a\xea\xff\xfa\x37\xff\xf1\xe6\xf3\xf3\x0b\xef\x36\xda\xde\
\x6d\x9d\xbf\x99\x08\x82\x88\x02\x11\xc2\xec\xa6\x19\xb5\x6b\xe7\
\xf6\x74\xfb\xb6\xcd\x69\xad\x5a\x55\xa5\x52\x2a\x4b\x69\x22\x9d\
\x0f\x9c\x67\x99\xef\x66\x79\x58\x5e\x59\xb5\x27\x4e\x9c\xd1\xc7\
\x4f\x9c\xd2\xad\x76\x17\xcc\x2c\x01\xea\x45\x91\x7a\x35\x89\xa3\
\x17\xee\xdc\x77\xf3\x57\x7f\xea\x47\x3f\xb6\x18\x42\x70\xab\xab\
\xab\xe1\x89\x27\x9e\xd8\xd0\x92\x61\xc3\xbe\xf9\x67\x3f\xfb\x59\
\x4a\x92\x44\x8c\x8f\x8f\x47\xbf\xf1\x3b\x7f\x34\xf9\xe2\xcb\x87\
\x1f\xeb\xf6\x7a\x8f\x46\x51\xbc\xaf\x54\x4a\xd3\x76\xbb\x75\xf6\
\xc1\x07\xee\xa9\xbe\xfb\x9d\xf7\x8f\xde\xb9\xef\xb6\xd1\x3d\xbb\
\x6f\x1a\x2d\xa5\x49\xc4\x7c\xf9\xf7\x26\x22\x06\x30\xd4\x9b\x8b\
\x5d\x66\x60\x71\x69\x39\x3f\xf8\xfa\xb1\xb5\x17\x5e\x3e\xd8\xfe\
\xd2\x97\xbf\xd6\x3c\x3f\xbf\xc8\xcc\x2c\xa2\x48\xbe\x50\xad\x54\
\xfe\xe2\x7b\x3f\xfc\xc8\xd7\xde\xf9\xf6\xbb\xbb\xb3\xb3\xb3\xfe\
\x33\x9f\xf9\x4c\xe8\x4b\xa1\x0d\x45\x1b\x0e\x00\x9f\xfd\xec\x67\
\x09\x80\x98\x9c\x9c\x8c\xfe\xed\x6f\xfe\xc1\xd6\x83\xaf\x1f\x7b\
\xc2\xf9\xf0\xa1\x5a\xad\x36\x51\xad\x54\x46\x1b\xa3\xf5\xf8\x7b\
\x1e\x7b\x4f\xfc\xd0\xbb\xef\xf7\x53\x93\x13\xa5\x81\x7c\x07\x20\
\x2e\xff\xda\x03\x24\x10\x88\x60\x88\xc8\x0f\xae\x06\x66\x01\x66\
\x39\x8c\x8a\xe0\x43\x78\xe5\xe0\xe1\xd5\x3f\xfc\x93\x3f\x9f\xff\
\xf3\x2f\xff\x65\xdb\x87\x90\x00\x68\x55\xca\xe5\xdf\xff\xe0\x63\
\xef\xfa\xc3\x07\xde\x76\x5b\x7b\x6e\x6e\xce\x7d\xf2\x93\x9f\xdc\
\x50\x12\x61\xc3\xbc\xe9\xfe\xfd\xfb\xf1\x93\x3f\xf9\x93\xe2\xc4\
\x89\x13\xd1\x7f\xf8\x9d\x3f\xde\xf4\xd2\x2b\x87\x7f\x0a\x44\xef\
\x6f\x8c\x8d\x6d\xae\x54\x2a\xd5\x38\x52\xfc\xd8\x23\xef\x4a\x3e\
\xf4\xfe\xf7\xa6\xd5\x4a\x99\x00\xe2\x82\x0f\x84\xcb\xb7\x57\xf9\
\x11\x88\x9c\x52\xa2\x03\x2e\x60\xe1\x9c\x1f\x61\x66\xc1\xeb\x08\
\x60\x62\x66\x06\x10\x00\xb8\x37\x4e\x9c\x5e\xf9\xd5\x5f\xff\xcd\
\x93\x2f\xbd\x72\xc8\x7a\x1f\x12\x66\xe8\x72\x29\xfd\xfd\x1f\xfb\
\xc1\x0f\xff\xee\xdb\xee\xba\xb5\xfb\xd8\x63\x8f\xf9\xbe\x64\xb9\
\xe1\x69\x43\x00\xe0\xa9\xa7\x9e\xa2\xc5\xc5\x45\xe9\x59\x94\x7f\
\xe9\x5f\xff\xd6\x27\x72\x6d\x7e\x72\xac\x31\x76\x53\x6d\xa4\x36\
\x92\x44\x8a\x2b\x95\x32\xff\xc8\x13\x1f\x29\xdf\xbe\xf7\xe6\x84\
\x88\x98\xd0\x17\xed\x44\xc5\x17\x24\x82\x14\xa4\x85\x20\xdd\xef\
\xe9\x04\x86\x60\x40\x30\x40\x52\x50\x26\x88\x1c\x50\x00\x80\x43\
\x50\x81\x59\x86\xc0\x91\xf3\xa1\x0a\x66\x30\x73\xa1\x2b\x98\x09\
\x60\xe3\x9d\xef\x7d\xfa\xb7\xfe\xd3\xe1\x3f\xfc\xfc\x97\x9a\x3e\
\x84\x92\xf7\xa1\x44\x44\x0b\x93\x13\x63\xff\xfa\x17\xf7\xff\xa3\
\x6f\x1f\x3a\x74\xc8\xfc\xcc\xcf\xfc\xcc\x0d\x2f\x0d\x6e\xf8\x38\
\xc0\x2f\xff\xf2\x2f\x8b\xd1\xd1\xd1\xf8\xd3\xbf\xf3\x27\xb7\xfd\
\xe1\x9f\x7e\xf5\x7f\xaf\xd5\x46\x7e\x74\x7a\x66\xfa\xa6\x7a\xad\
\x16\x97\xcb\x25\x5f\x2e\x95\xfd\xf7\x7e\xec\xf1\xd2\xbe\x5b\xf7\
\xc4\xcc\x1c\x38\x04\x66\x66\x66\x30\x87\xc0\x41\x0a\x6a\x2b\x25\
\x16\xa4\xa0\x35\x41\xe8\x11\x28\x23\x20\x23\x81\x9e\x20\x74\x05\
\xa1\x0d\x40\x33\xd8\x32\xb3\x05\x07\xc7\x80\x26\x20\x27\xa0\xc7\
\xcc\xda\x3a\x5f\x0e\x21\x70\xd1\x98\xbd\xf3\xd2\x07\x8e\xef\xbe\
\x63\x6f\xfd\xec\xb9\xf3\xa7\x2f\xcc\x2f\xb4\xfb\xc0\xaa\x75\xda\
\xbd\xef\xf9\xb3\x2f\x7f\x7d\x57\x92\x94\x5e\x72\xba\xa3\x6f\xbb\
\xed\x36\xbe\x91\xe3\x09\x37\x2c\x3c\x99\x19\x4f\x3f\xfd\xb4\x74\
\xce\x25\xff\xdb\x3f\xff\x37\x1f\xec\xe5\xe6\xe7\x66\xa6\xa7\x6f\
\x2e\x97\xcb\x51\x9a\xc4\x3e\x49\x92\x10\x27\xb1\xdf\xbc\x69\x4a\
\xfc\xf0\xf7\x7d\xa8\x21\x04\x31\x11\xf5\xf5\x3d\x41\x08\xb8\x52\
\x12\x9f\x11\x92\x4c\x5f\xcf\xf3\xfa\xd7\x25\xf0\xd5\xbe\xf8\x25\
\x89\xcf\x54\x88\xff\x62\x9b\x1b\x37\xe9\xac\x1f\x67\x66\x30\x18\
\x21\x30\x02\x07\xe2\x10\x68\x61\x61\x69\xe9\x17\xff\xd5\xaf\xbf\
\xbc\xba\xb2\xda\xed\xf6\x32\xe9\x7d\xa8\x7a\x1f\x2a\x0c\x5e\xdd\
\x32\x3b\xf3\xcf\xfe\x8f\x9f\xff\x47\xaf\x28\xa5\xec\x7d\xf7\xdd\
\x77\x43\xaa\x84\x1b\x12\x00\xcc\x8c\x27\x9f\x7c\x52\xbd\x76\xf8\
\x8d\xda\x6f\xfc\x3f\x7f\xf4\x73\x95\x4a\xf5\xfb\x27\x27\x26\x36\
\xa7\x69\x12\xd2\x24\xf1\x69\x9a\xf8\x38\x49\x42\x12\xab\xb0\xef\
\xb6\x3d\xe9\xdb\xef\xbb\xab\x2a\x88\x00\x2a\x44\x3f\x81\x20\x04\
\xe9\x38\x56\x0b\xb1\x92\x2d\x21\xc8\x61\x9d\xbf\x74\xb5\x6f\xcd\
\x28\xd4\xc2\xd0\x21\xa8\x60\x38\x10\x7c\x88\x8d\xf5\x0d\x6b\xfd\
\x98\x67\x4e\x98\x19\xdc\x07\x81\x73\x0e\x9f\xfe\xed\x3f\x58\xed\
\x65\x19\x2d\x2e\x2c\x9c\xbc\x70\xe1\xc2\x7c\xa6\x75\xe2\x5d\xa8\
\xba\x10\xa2\x7a\xad\xfa\xeb\xbf\xfe\x2f\x7f\xfe\x0f\xca\xe5\xb2\
\xbd\xfd\xf6\xdb\xc3\xf5\xfe\x6d\xaf\xa4\x1b\x4e\x05\xec\xdf\xbf\
\x1f\x5a\x6b\xf5\xf5\x67\x5e\x1a\xff\xbd\xdf\xff\xe2\xff\x3a\x39\
\x35\xf5\x43\xe3\xe3\xe3\x13\x95\x72\xd9\x55\xca\x65\x5f\x29\x97\
\x5d\xa9\x54\xf2\xe5\x24\xf1\x69\x5a\x0a\xd5\x4a\x19\xb3\x9b\xa7\
\x62\x0e\xcc\x0c\x98\x48\x8a\x0b\x52\x8a\x65\x21\x68\x95\x80\x4c\
\x10\x5a\xcc\x6c\x99\xd9\x15\xdb\x60\xb0\xbe\xcf\xa6\x7f\xde\x31\
\xb3\x0d\x21\x58\xe6\x60\xfb\xd7\x6c\x08\xc5\x79\x80\x73\x21\xd0\
\xb4\xce\xa7\xce\xfb\x72\xf0\x81\x43\x08\x08\x81\x39\xcb\x73\xff\
\xc6\x89\xb3\x3d\x29\x04\x4a\xe5\x72\xa3\x31\x36\xb6\xad\x5a\xa9\
\x8e\x28\xa5\xa4\x31\x26\xef\x75\xb3\xb7\x7f\xe1\xcb\xdf\x28\x3f\
\xf4\xce\xb7\xbd\xfc\xf0\xc3\x0f\x85\x3f\xfe\xe3\x3f\xbe\xa1\x24\
\xc1\x0d\x05\x00\x66\x86\xd6\x5a\xfd\xd1\x17\x9e\xde\xf4\xe5\xa7\
\x9e\xf9\xf9\x4d\x9b\x37\x7f\x78\x74\xa4\x5e\xae\x94\xcb\xae\x5c\
\x29\xfb\x72\xa9\xec\x4b\xa5\xc4\x97\xd2\x52\x88\x93\x24\x38\xd3\
\xfb\x56\x73\x75\xe1\xab\xbb\x77\xee\xdc\x2e\x84\x28\x17\x7a\x1c\
\xda\xf9\x10\x79\xef\x05\x83\x8d\xb5\x3e\xd1\xd6\x55\x8c\x75\x15\
\x25\x69\x89\x99\x5d\x08\x61\xb8\xd9\xc1\xf6\x12\x48\x2e\x01\xa6\
\x7f\xde\x86\x10\xac\x36\x6e\xd4\x3a\x5f\x19\xd8\x02\x21\x04\x3e\
\x7d\xfa\x7c\xb6\xba\xd6\x36\x42\x48\x16\x52\xb0\x10\x02\x2a\x8a\
\xd2\x34\x4d\xeb\xb5\x5a\x6d\x4a\x45\x4a\xb5\xda\xed\xed\x5f\x7c\
\xf2\x9b\x33\xef\x7e\xf0\xbe\xe7\xde\xfb\x9e\x77\xf9\x2f\x7d\xe9\
\x4b\x37\x0c\x08\xd4\xf5\x7e\x81\x01\xf5\x75\xbe\xfa\x8b\xa7\x9e\
\x99\xfa\xd6\xb3\x2f\x7d\x6a\x76\xcb\x96\xc7\x46\x6a\xd5\xb4\x94\
\xa6\xae\x54\x2a\xf9\x34\x4d\x7c\x12\xc7\x21\x8e\xe3\x10\x45\x2a\
\x34\x57\x2e\xfc\xde\xea\xe2\xb9\x83\x51\xa4\xfc\x33\xdf\xfe\xfa\
\x85\x07\xde\xf1\xde\x7f\x16\x29\xa9\x2c\xf9\x09\x00\xdc\x77\x00\
\xa6\x89\xa8\x2b\x04\x65\x91\x14\xab\x56\x22\x03\x80\xbf\xce\x32\
\x67\xbe\x2a\x7f\x28\xcb\x4d\x23\x30\x02\xfa\xd7\x9b\xcd\x96\x3b\
\x3d\x37\xdf\x49\xe2\x98\x84\x14\x24\x95\x80\x92\x12\x52\x4a\x96\
\x42\x40\x08\x01\x21\xe4\x28\x11\xe1\xfc\x85\xf9\x87\xff\xc5\xaf\
\xfe\xa6\xf8\xa5\x7f\xf6\x4f\x7e\x91\x99\xf3\x1b\xc5\x4d\xbc\x21\
\x00\xc0\xcc\xf8\xe2\x17\xbf\xa8\x0e\xbc\x74\x68\xf4\xa9\xaf\x3f\
\xf7\xbf\x6c\xd9\xba\xed\xd1\x5a\xad\x9a\x94\xd2\xd4\x95\x4b\x25\
\x57\x4a\x93\x90\x24\x49\x88\xe3\x38\x28\xa5\x82\x73\xf9\xcb\xed\
\x95\x0b\xaf\xa6\x69\x12\x88\x88\x57\x9a\xbd\xda\x2b\xaf\x1d\x5d\
\xde\x77\xeb\xf6\x33\xe3\x8d\x91\x05\x66\x56\xce\x87\x9a\x52\xa2\
\xa5\xa4\xe8\x01\x05\xd3\x9d\x73\x18\xec\xff\x97\xbc\xd3\x30\x79\
\x1f\x4a\xce\xf9\x2c\x04\x56\x0c\xe6\x85\x85\x15\x7d\xea\xec\x85\
\x9e\x14\x02\x88\x23\x92\x5e\x90\x14\x82\x85\x10\x4c\x42\xb2\x20\
\x09\x22\xc1\x82\x04\x33\x87\x2a\x40\x9b\x16\x16\x16\x1f\xf9\xa7\
\x9f\xfa\x97\xab\x91\x12\xff\xf6\xc0\x81\x03\xfa\x46\x30\x0c\x6f\
\x08\x00\x5c\xb8\x70\x41\x2c\x2d\xaf\x95\xff\xe0\xf3\x4f\x7e\x72\
\xcb\xec\xd6\x0f\xd7\x6a\xd5\xb4\x5c\x2a\xf9\x52\xa9\xe4\x4a\xa5\
\x24\x24\x71\xe2\x93\x38\xe6\x28\x52\x01\x84\xe5\xa5\xf9\xb3\x7f\
\x16\xc5\xb1\x97\x42\x04\x29\x05\x46\x46\x27\xef\x5f\xbc\x78\xf6\
\x37\x3e\x7f\xe4\xb9\x63\xf7\xdc\x73\xcf\xa6\xdd\xbb\x77\xef\x48\
\xd3\xb4\x1c\x7c\x60\xed\x21\xbc\xe7\x2a\x00\xa1\xa4\xe8\x14\x83\
\x3e\x05\x08\x98\x99\xae\x00\x03\x0f\x18\xdf\xbf\x3e\x38\x89\x6e\
\xe6\x77\x6b\xeb\xcb\xed\x76\x37\xbf\x30\xbf\x94\xf7\xb2\xdc\x0b\
\x22\x90\x94\x80\x20\x08\x41\x0c\x21\x48\x14\x4c\x87\x20\x40\x08\
\x30\x09\x00\x04\x29\x85\x1c\x8f\xa3\xa8\x7c\x7e\xfe\xc2\xf7\xfe\
\xd3\xfd\xff\x6a\xe9\xd7\xff\xe5\xa7\x3e\xc7\xcc\xf6\x7a\x4b\x82\
\xeb\xee\x05\x1c\x38\x70\x80\xda\xed\x76\xfa\x3f\x7f\xea\x97\x7f\
\x70\xac\x31\xfe\x8f\x47\xc7\xc6\xa6\x2a\xa5\x92\x2f\x97\xca\x2e\
\x2d\x25\xbe\x94\x26\x21\x8a\xe3\x10\x49\xc5\x81\xed\x91\xf9\xb3\
\x47\x7e\xa7\x52\x1d\xad\x8f\x8e\x4d\xdf\x2f\x55\x54\x5d\x38\x77\
\xf4\x3f\x09\x21\x04\x07\xef\xfa\x6e\x20\x0b\x21\xb0\x7d\xfb\xf6\
\xfa\xec\xd6\x9d\xf7\x46\x69\xe5\x31\x29\x55\x03\x00\x24\x61\xb5\
\x56\x51\x4f\x4b\x41\xbd\xcb\x7e\x84\x21\x66\x0f\x68\x88\xf9\xa1\
\xd7\xeb\x75\x2f\x2e\x36\xab\x4b\x6b\xe6\xc3\xd6\x3a\xd9\xbf\x80\
\x81\x5b\xc8\x5c\xdc\xef\x43\x20\xef\x3d\x79\xe7\xc8\x68\x23\xb4\
\xd1\x22\xcb\xb4\xec\x66\x99\xea\xf5\x7a\xb2\xd7\xeb\xa9\x3c\x37\
\x34\x3f\x3f\x7f\x64\x74\xb4\xfa\x4f\x7e\xfe\x7f\xfa\x99\xe7\x1f\
\x7e\xf8\x61\x77\x3d\x83\x45\xd7\x55\x02\x30\x33\x3d\xfd\xf4\xd3\
\xd1\x2f\xfe\xca\x7f\xb8\xab\x52\xa9\xfe\xfd\xc6\xd8\xd8\x64\xa9\
\x94\xae\xf7\xfc\x34\x4d\x43\x14\xc7\x3e\x8a\x14\x2b\x21\x19\x10\
\xa5\x9b\x76\xdf\xfd\x49\xa9\xa2\x31\x22\x42\xaf\xbd\xfc\xbb\x04\
\xf6\x04\xf6\x42\x4a\x0c\xe2\x00\x51\x52\x2a\x67\xbe\xf4\x3d\xa7\
\xcf\x37\xdf\x06\x34\x29\x8a\x54\x3b\x4d\x12\x19\x45\xaa\xb4\xd2\
\x94\xf7\x34\xaa\xf4\x87\x52\x4a\x29\x0a\x92\x00\xd8\x7b\xef\x43\
\x08\xc1\x39\xe7\xb2\x2c\x33\x59\x96\xe5\xad\x56\x2b\x9b\x9f\x9f\
\xef\x18\x63\x02\x33\x23\x29\xd7\x8f\xd4\xc7\x67\xff\xbe\x20\x39\
\xce\xe8\x03\x07\x40\x08\xa1\x1f\x66\x16\x5c\x74\x78\x80\x80\x00\
\x41\x60\x22\x30\x78\x00\x18\x02\x48\x4e\xcf\x4c\xef\x9e\x9b\x9b\
\xfb\x47\xbf\xf7\xff\x7e\xe9\x1f\xdf\x72\xcb\x2d\x17\x51\x84\x98\
\xff\x76\x01\x80\x99\xf1\xec\xb3\xcf\xca\x3f\xfa\xc2\x57\x27\x9b\
\x6b\xed\xff\x61\xfb\xf6\x9b\xf6\xa4\x69\x1a\x4a\x69\xc9\x95\x4a\
\xa9\x4f\x93\xc4\xf7\x0d\x3e\x56\x52\xf5\x2d\x6c\x35\x5b\xf8\xf8\
\x02\x0c\x36\xed\xd5\x0b\xaf\x4a\x29\x59\x08\x01\x22\x42\xb9\x36\
\xb6\x29\x49\x2a\x9b\x93\x4a\xfd\xdd\x42\xa8\xd9\xc1\xff\xf2\xde\
\x73\xb7\x97\xb9\x10\xdc\xc9\xd6\xf2\xd9\xdf\x0c\x4e\xf7\xfe\xba\
\x77\x1b\x6c\x99\x99\xa4\x94\x28\xd7\xa7\xf6\x54\x6a\x13\x7f\x8f\
\xb9\xf8\xcd\x08\x05\x63\x09\x0c\x21\x05\xd8\x07\x08\x41\x60\x80\
\xd5\x20\x20\x45\x14\xd0\x1f\x41\x60\x06\x02\xf3\x40\x70\xa8\xe9\
\xa9\xa9\x7b\xbf\xf9\xed\xe7\xff\xc1\x4b\x2f\xbf\xfc\x7f\x1e\x38\
\x70\x20\xbf\x5e\xf6\x80\xf8\xaf\x7f\xc4\xff\x3f\xfa\xdc\xe7\x3e\
\x27\xd6\xd6\xd6\xe2\x6f\x3e\xf3\xe2\xc7\x37\x6d\xde\xfc\xf6\x34\
\x49\x42\x29\x4d\x7c\x9a\x26\x21\x49\xe2\x10\x27\x49\x9f\xf9\x92\
\x85\x1a\x30\xb9\xb0\xac\x01\x40\x90\x88\xcb\xb5\xc6\xb8\xe8\x5b\
\xdb\xe5\xda\xd8\xf4\xe8\xf8\xd6\x9f\x2d\xd7\xc6\x7f\x48\x0a\x35\
\x4b\xfd\xa0\x90\x10\x82\x89\x04\x13\x11\x23\xb8\xd3\xec\x4d\x8f\
\xa8\x00\x91\x2c\x2c\xf6\xef\xda\x1f\x34\x39\xb0\xe8\xa5\x64\xdd\
\x59\x3e\x9a\xf7\x9a\xbf\x4b\xe0\x25\xd1\x67\x70\xd1\x04\x08\x04\
\x21\x25\x44\xff\xb9\x42\x08\x96\x4a\xb1\x54\x8a\xe3\x38\x0a\x49\
\x12\x87\x52\x9a\x86\x52\x9a\xfa\x52\x9a\x84\x34\x4d\x7c\xad\x56\
\x1b\xad\x8d\xd4\x1f\xfd\xe7\xbf\xf4\xef\xdf\xb5\xb4\xb4\x24\xff\
\x0a\xcf\xe3\x4d\xa7\xeb\x22\x01\xf6\xef\xdf\x8f\x99\x99\x19\xf9\
\xaf\x7e\xed\xb7\x6f\xa9\xd7\x1b\x1f\xab\x56\xaa\xd5\x24\x4d\x43\
\x92\x26\x3e\x89\x93\x90\x44\x71\x50\xaa\xcf\x7c\x29\x21\x0b\x6e\
\xae\x33\x9f\x88\x00\xe6\xcc\x5b\xd3\x2a\x24\x80\xa4\xfa\xf8\x96\
\x1f\x27\x21\x62\x02\xf1\x65\x96\x0d\xf5\x7b\x2b\x87\x05\xdd\x6b\
\x7e\x5b\x4a\x19\x70\xc9\xf6\xe9\x4b\xf2\x4b\x1f\xe8\xef\x33\xfa\
\x91\x40\xa0\x10\xf1\xd5\xc6\xec\x3b\xa2\xb8\xfc\x00\x83\x7b\x44\
\x02\x20\x66\x02\xd1\x65\x8c\x13\x02\x60\x86\x60\x06\x15\xa2\x03\
\x54\xf4\xfa\x10\x98\xc9\x87\x40\x21\x04\xef\x03\x23\x04\xa6\xc9\
\xa9\xa9\xdd\xa7\x4f\x9d\xf8\xa1\x2f\x7d\xe5\xdb\x2f\x25\x49\xb2\
\x82\xeb\xa0\x0a\xae\x0b\x00\xf6\xee\xdd\x2b\xce\x5d\xb8\x58\x3a\
\x37\xbf\xf8\x03\xbb\x76\xef\xb9\x35\x89\xe3\x90\x24\x71\xc1\xfc\
\x24\x2a\x7a\xbe\x92\x2c\x94\x82\x20\x62\xe7\xcc\x2b\x56\x77\x0f\
\x56\x46\x26\x7e\x44\x90\x80\x35\xdd\xaf\x75\x5a\x8b\xdf\x0c\x5e\
\xf7\x84\x10\x48\x2b\x23\x0d\x21\xd4\x24\x10\x9a\xce\x66\xcf\x5a\
\xdd\x3d\x1a\xbc\x6d\x31\x7b\x23\x84\x8c\xa2\xb4\xba\xdd\xf4\x9a\
\x87\x39\xb8\x5c\x4a\x09\x00\x3c\xc4\xf4\xcb\xba\xde\x10\x43\x99\
\x99\x11\x42\x80\x94\x12\x49\x5a\x7d\x1f\x48\x34\xfa\x9f\x28\x46\
\x0a\x18\xdc\x8f\x40\xaf\x7b\x14\x0c\x02\x53\xf1\x39\x21\x25\x2b\
\xf4\xcf\x15\x03\x55\x14\x82\xc7\x00\x08\x40\x90\x53\x53\x33\x77\
\x7f\xed\x5b\xcf\x7d\xe8\xa1\x77\xdd\xf3\x7b\xfb\xf7\xef\x37\xd7\
\x3a\xa9\xe4\x9a\x03\x60\xff\xfe\xfd\x98\x9c\x9c\x54\xff\xfc\x97\
\xfe\xdd\xdb\x26\x27\xa7\x1e\x4c\xe2\x44\x26\x49\xe2\x25\xf1\x85\
\xe0\xed\x42\xab\xd9\x3e\x28\xd8\x9e\xde\xb4\x6d\xd7\x27\x9c\xc9\
\x17\x92\xb4\x7c\x77\xd6\x59\xf9\xba\xce\x3b\x8b\xb5\xd1\x29\x66\
\xf6\x4b\x3a\x5b\x7b\xc9\xe9\xee\x8a\xec\x1b\x7e\x1c\x6c\x27\x78\
\xfd\xed\xce\xca\xdc\x9f\x30\x7b\x4b\x43\x22\x3a\x04\x82\xe9\xea\
\x55\xe0\x92\xff\x7f\xe5\x16\x00\xa2\xa4\x3a\x1d\x95\x46\xdf\x09\
\x50\xb9\xb3\x72\xfa\x77\x06\xd7\xa5\x54\xa2\x3c\x3a\xfb\x03\x24\
\xe4\x80\xf9\x97\xc6\x94\xa8\xd0\xed\x7d\x31\xc2\xcc\x5c\x18\x83\
\x0c\x88\xc2\xf8\x23\x08\x82\x52\x92\x03\x2b\x4e\x91\x06\xe7\x3d\
\xbc\xf7\x21\x38\x4f\x1c\x02\x8d\x8e\x8e\xce\x2e\x2d\x2d\x3c\xfc\
\xf9\x2f\x7d\xfd\x2b\xdf\xf3\xc8\xdb\x2f\xe0\x1a\x4b\x81\x6b\x1e\
\x0a\xfe\xd4\xa7\x3e\x25\x56\x9b\xad\xf2\x57\xfe\xf2\x99\x1f\xde\
\xba\x75\xfb\x23\xa5\x34\x25\x20\x9c\x38\x7b\xea\xe8\xef\xf5\xda\
\xcb\x2f\x5a\xd3\x3d\xf7\x81\xf7\xbf\x6f\x76\xd7\x4d\xb3\x0b\x87\
\x5e\x3d\xf0\x94\x77\xe6\x50\xaf\xbd\x7c\x76\x6c\x72\xdb\xbd\x24\
\xc8\xac\xcc\x1f\xff\xf7\xce\xe6\xcd\x21\xdd\xcd\x04\x76\x36\x6f\
\x1f\x01\xd8\x0f\x74\xf7\xc0\x30\xa4\x21\xbd\x2c\x84\x80\x52\x6a\
\xfd\xdc\xe0\x7a\x5a\x19\xbf\x2d\xa9\x8c\xff\x43\x21\xa2\x1d\x24\
\x44\xe4\x75\xeb\xeb\x83\xeb\x49\x75\xe2\x4e\x15\x57\x3e\x34\x3c\
\xd2\x08\x2a\x92\x4b\x86\x81\xc4\x5c\x6c\xfb\xb9\x27\x05\x3a\x30\
\xf8\x0c\x68\xac\x51\x8f\xf6\xde\xb2\xb3\xb6\xb8\xb4\x62\x9c\xf3\
\x85\x41\x08\x26\xe6\x40\x4a\x29\x71\xf2\xd4\xe9\x85\x4f\x7c\xe4\
\xb1\x23\x3b\x76\xec\xf0\xd7\x72\xf8\xf8\x9a\x1a\x81\xfb\xf7\xef\
\x47\xbd\x5e\x97\x9f\xfe\xed\x3f\xd8\x37\x35\x31\x7d\x5f\x12\x47\
\x14\xc5\x91\xeb\xb6\x9a\xcf\x2b\x29\x7a\x49\x1c\xb9\x07\xee\xbf\
\x77\x7c\xe7\xce\x9d\x37\x4d\x8c\x8f\x4f\x7d\xe2\x13\x9f\x78\xbb\
\x60\x73\x51\x08\x81\xa4\x5c\x7b\x3b\x01\x1a\x60\x5b\x18\x76\xc4\
\x03\x0f\xa0\x90\x04\x08\x42\x88\xc1\xb9\xf5\xeb\x4a\xa9\xf5\xfb\
\x06\x4c\x1d\x36\xf0\xe2\xb4\x52\x8b\xcb\x8d\x9f\x00\x09\x05\xa2\
\x40\x24\x1a\x95\xc6\xf6\xbf\x97\x56\x27\xf6\x49\xa9\x64\x92\x8e\
\x7c\x58\x90\x08\xa2\x9f\x58\x7a\xa9\x51\x00\x8a\x2d\x01\x41\x08\
\x0a\x82\x10\x48\x50\x71\x8e\xa8\xd8\x17\xf0\x93\x13\x63\x6a\xef\
\x2d\x3b\x2a\x23\x23\x15\x71\xc7\xde\x3d\x69\x12\xc7\x2e\x89\x63\
\x1f\xc7\x91\x8b\xa3\xd8\x8f\x8e\x8e\x6e\xf3\x81\x1f\xfc\xfc\x17\
\xbf\x36\xfa\xd0\x43\x0f\x5d\x53\x9e\x5c\x6b\x2f\x40\x2c\x2e\x2e\
\xc6\x8b\xcb\xcb\xf7\x8d\x4f\x4e\xec\x8a\xa2\x28\x44\x4a\xf5\x3a\
\xed\x95\xe3\x49\xac\xec\xf4\xf4\x94\x7c\xcf\xbb\xde\x75\x77\x1c\
\x47\x51\x1c\x47\x71\xa3\xd1\x98\xfa\xd8\xc7\x3e\xf6\xee\x46\xa3\
\x11\x11\x78\x8d\xd9\xcf\x0f\xac\xf5\x81\xf8\xef\x33\x95\xfb\xd6\
\xfa\xe0\x1c\x2b\xa5\x58\x0c\x42\xb3\x97\xae\xf3\x30\x48\x84\x10\
\x48\x2a\x13\x1f\x10\x82\x20\x25\x1c\xb1\x79\x4a\x4a\xe1\x65\x14\
\xdd\x1c\x97\x46\x7f\xb8\xd2\xd8\xf2\x03\x24\xe5\x08\x49\xf2\x24\
\x85\x13\x52\x7a\x21\x85\x17\x92\xfa\x4d\x5c\xb6\x25\x29\x3c\x09\
\x1a\xba\x4e\x61\x72\xa2\xa1\xf6\xec\xda\x52\x8e\xe3\x98\xe2\x38\
\xe2\xad\x5b\x66\x92\x1d\x3b\xb6\xa8\x38\x89\x6c\x1c\xc7\x2e\x8a\
\x94\x8b\xa2\x08\x53\x53\x53\xbb\x9e\x3d\xf0\xd2\xdb\x56\x57\x57\
\xaf\xa9\x54\xbe\xa6\x00\x78\xec\xb1\xc7\xe8\x4f\xbe\xf8\xd4\x64\
\xad\x3a\x72\x67\x9a\x24\x71\x1c\x45\xb6\xd7\x6d\x3d\x23\x25\xe5\
\x71\xa4\xec\x07\xde\xff\xd8\x6d\xe5\x72\xa9\x1a\x45\x51\xac\x94\
\x8a\x95\x52\xf1\xc8\xc8\xc8\xd4\x8e\x1d\x3b\xc6\x5a\x2b\xe7\x7e\
\x5f\x77\x9b\xaf\x0e\x89\xf3\xcb\x18\x3a\xcc\xf8\x61\x70\x0c\x83\
\x65\x58\xf4\x4b\x95\x94\xca\xf5\xcd\x1f\x57\x51\x7c\xb7\x10\x14\
\x08\x98\x13\x42\xb0\x20\x0a\x92\x28\x08\x01\x21\x55\x74\xa7\x14\
\xe4\xfb\x2d\x08\x42\x7f\x5f\x78\x41\xc2\x4b\x22\x2f\x88\xbc\x10\
\xc5\x76\xe8\x5e\x2f\x05\xf9\xc9\xb1\x51\xb5\x73\xfb\x6c\x35\x56\
\x11\xa2\x48\x21\x8e\x14\xc5\x71\xc4\xf7\xdc\x71\x4b\xb5\x31\x5a\
\x47\x1c\x29\x17\x45\xca\x47\x4a\xba\x7a\x7d\x74\x53\xb7\x97\xdd\
\x2e\xa5\x8a\xfa\x89\xaf\xd7\x84\xae\x99\x11\xb8\x7f\xff\x7e\x9c\
\x3d\x7b\x56\x1d\x3a\xfc\xc6\xbe\xc9\xa9\x99\x4d\x4a\x29\xa7\x22\
\xe5\x3b\x17\x57\x8f\x46\x91\x32\x5b\xb7\x6c\x49\x6f\xba\x69\xfb\
\x6e\xa5\x94\xe8\xbb\x7b\x4c\x44\x7c\xee\xdc\xb9\xd7\x0e\x1e\x3c\
\x78\x41\x08\xc1\xec\x0d\xfa\x56\xfc\x7a\xcf\x1f\x12\xeb\x3c\xec\
\x26\xf6\x7b\xfe\xfa\xf1\x95\xfb\x49\x6d\xf2\x51\x21\xe2\x7b\x81\
\x62\x6c\xc0\xea\xe6\x7f\x8a\xca\x8d\x9f\x04\x06\xb9\x81\x7d\x6f\
\x60\xb0\x61\xe0\x52\xd8\xbe\x7f\xb5\x1f\x42\x1e\x18\x7f\x03\x3b\
\x80\x99\x21\x08\x34\x3d\x35\x9e\x28\x25\xa1\x94\x24\xa9\x24\x64\
\x11\x28\x22\x29\x25\xdd\x7d\xc7\x2d\x95\xaf\x7d\xe3\xb9\x35\x17\
\x39\xf6\xde\xcb\x4a\xa9\x34\x52\x2a\x95\xb6\xfc\xce\xef\x7f\x61\
\xf6\xfe\x3b\x77\x9d\x00\xe0\x71\x0d\xe8\x9a\x49\x80\xbd\x7b\xf7\
\x8a\xb1\xb1\xb1\xa8\xdd\xe9\xdd\x32\x32\x52\x9f\x51\x4a\xfa\x28\
\x52\x4e\x2a\x91\x47\x4a\xba\xc7\x1e\x7b\x64\x6f\x1c\xc7\xa9\x52\
\x2a\x96\x52\x46\x4a\xa9\xd8\x39\xd7\xfb\xca\x57\xbe\xf2\xc2\x80\
\xb9\xfd\x9e\xbe\x2e\xce\x87\x0d\xc1\x61\xa9\x30\x24\x11\xd6\x03\
\x39\xa5\xda\xd4\xbd\x71\x5a\x1d\x5f\xbf\x57\xaa\x11\x92\xc2\x0f\
\x1a\xd8\xae\xb0\xeb\xfe\x21\x11\x56\x48\x90\x17\x42\x14\xed\x32\
\xb1\xde\xbf\x7f\x70\x5e\x0e\x9f\x97\x41\x48\x11\x48\x50\x10\x92\
\x02\x04\xf9\xb3\xe7\x2e\xae\x0a\x21\x82\x54\x12\x4a\x29\x28\xa5\
\x10\x47\x11\xe2\x48\x61\xdb\xd6\x4d\xa5\xb1\xb1\x3a\xa2\x38\x72\
\x51\x14\x79\x19\xa9\x30\x35\x35\x35\x7b\x76\xee\xfc\x6d\xdb\xb7\
\x6f\x17\xd7\x2a\x30\x74\xcd\x00\x90\x24\x09\xfd\xf9\x57\xbe\x39\
\x5e\xad\x56\xb6\xc4\x49\x5c\x8a\x94\x72\x4a\x0a\x1d\x9c\x59\x9b\
\x9a\x9c\x8c\x76\xee\xd8\x71\x9b\x52\x2a\xea\x03\x20\x91\x52\xc6\
\x87\x0e\x1d\x7a\xc6\x7b\xef\x87\x98\xbf\xde\xf3\x07\x51\xbe\x61\
\x03\xef\xaf\xd0\xf9\x50\x51\x92\x46\x69\xe5\x83\x49\x65\xec\x7d\
\xeb\xc6\x20\xdb\xa3\x14\xf4\x57\x24\xe1\x62\x91\x31\x0c\xeb\x6d\
\xef\x04\xb1\x7d\x4e\x0a\x51\x88\xf9\x75\xd1\xde\xdf\xef\x8b\xf6\
\xf5\x7d\xea\xab\x02\x41\x85\x3a\x18\x52\x0f\x42\x90\x37\xc6\xe8\
\xb9\xf3\x0b\x2b\x85\x14\x10\x88\x94\x82\x52\x92\x06\x40\xb8\x63\
\xdf\xcd\xd5\x81\x0a\x50\x52\xb8\x6a\xad\x36\x69\x8d\xdb\x56\xad\
\x56\xd5\xe7\x3e\xf7\xb9\x6b\xa2\x06\xae\x09\x00\x98\x19\x8d\x46\
\x43\xbc\x7e\xe4\x8d\xed\xe5\x72\xa9\x22\xa5\x70\x44\x68\xad\x2e\
\x2f\x3c\x09\xb0\x7e\xfb\x03\xf7\xcd\xc6\x71\x54\x96\x52\xc6\x52\
\xca\x58\x29\x15\x6b\xad\x97\x0f\x1c\x38\x70\x6a\x58\xc7\xf7\x7b\
\xfc\x70\xcf\xc7\x15\xcc\xff\xae\xf3\xa5\xea\xf8\xde\xb4\x3a\xf1\
\x88\x20\xe1\x09\x58\x1b\x3c\xcf\x65\xcd\x67\x09\x68\x22\xd8\x97\
\x04\x91\x8b\x92\xda\xe6\xb4\x36\xfd\xfd\x32\x2e\xbd\x67\x88\xf1\
\x4e\x14\xad\xd8\x27\xe1\x05\x91\x2b\x18\x4f\xa1\x0f\x84\x40\x24\
\x82\x10\xe4\x04\x91\x13\x03\xe9\x41\xe4\x89\xc8\x2d\x2d\xad\xb6\
\xb2\x4c\x67\x4a\x4a\x92\x52\x92\x92\x8a\xa2\x48\x89\x28\x52\xd8\
\xb1\x6d\x73\x79\xb4\x5e\x63\x15\x29\xa7\x94\x72\xe5\x52\xa9\xc6\
\xec\x47\xff\xe2\xab\xdf\x1e\xe9\xf5\x7a\x6f\x1d\x00\x7c\xee\x73\
\x9f\xa3\x4e\xa7\xa3\xda\x9d\xee\xb6\x76\xbb\xb5\x24\x48\x98\xd5\
\x95\xc5\x67\x2f\x5e\x3c\x7f\x58\x29\x11\x76\xed\xda\xb9\x4d\x4a\
\x19\x0d\x00\x20\x84\x88\xe7\xe6\xe6\x5e\x1d\x72\xdd\xd6\x7b\xf6\
\xba\x11\x37\x24\xf6\x87\x99\x3f\x7c\x4e\xaa\x28\x8a\xd2\x91\x8f\
\x48\xa9\xa6\x09\xfe\x25\x15\xc7\x77\x4b\x15\x45\x03\x1b\x21\x8a\
\x4b\xef\x00\xb8\xe9\xf5\xda\xa7\x09\xa1\x2b\x85\x50\xfd\x1e\xee\
\x48\x92\xa3\x7e\xaf\x26\x22\x47\x34\x90\x02\x22\x08\x49\x5e\x08\
\xe1\xa8\xcf\x64\x21\x2e\x31\xfe\xd2\x71\x01\x1c\x12\xe4\x4e\x9e\
\x9e\xbb\x28\x85\x80\x54\x82\x94\x92\xa4\x64\x61\x17\x44\x51\x44\
\x7b\x6f\xd9\x55\x89\x94\xb4\x52\x0a\x17\x45\x91\x2a\x57\x2a\xe9\
\xa1\xc3\xc7\xb6\x28\xa5\xae\x09\x6f\xae\x95\x11\x48\x00\x64\x9e\
\xeb\x6d\x49\x12\x3b\x9d\x75\x4f\x2d\x2d\xcc\x1f\x2f\x97\xe2\xa0\
\xa4\xf4\xd3\x53\x93\x3b\x85\x94\x4a\x0a\x21\x88\x48\x00\x08\x07\
\x0e\x1c\x78\xfd\xca\xc1\x99\x81\x61\x77\x85\x55\xcf\x57\x06\x76\
\x06\x79\x01\xe0\xa0\xbd\x69\x7f\x56\x48\x59\x56\x71\xe5\x03\x00\
\x59\xa5\x92\x8a\x90\xf1\x98\x8c\xcb\xef\x22\x12\x23\x71\x52\xf9\
\x10\x50\x19\x7e\xd7\x75\xe3\x8b\x07\xc3\x02\xdc\x37\xfb\xfa\xc9\
\xe4\xdc\x1f\x0b\x1c\x36\x02\xd1\x1f\x1d\x04\x11\x02\x87\xfe\x08\
\x44\xf1\xcd\x5b\xed\x6e\xa7\xd9\xea\xb4\xa6\x27\xc7\xc6\xfb\xdf\
\x0f\x00\x82\x97\x01\x5b\x66\x67\xca\x52\xca\x65\x25\x25\x79\xe1\
\x51\xaf\xd7\xeb\xad\x66\x73\x6a\x76\x76\x96\x0a\xe3\xf2\xcd\x15\
\x04\xd7\x04\x65\x0f\x3e\xf8\x20\x8e\x1c\x3f\x5d\xf2\x21\xd4\x09\
\xc0\xd9\xb9\x33\x2f\x82\xc8\x0b\x29\xfd\xad\xb7\xdd\x52\x4f\x92\
\xb4\x2e\x88\x94\x10\x22\x12\x42\xc4\x5a\xeb\xa5\x76\xbb\xad\x07\
\x3d\x7f\xd8\xe2\x1f\x0e\xe6\x5c\x8d\xf9\x43\xc6\x20\x84\x10\xf0\
\xa6\x73\x02\xc1\x2f\xb1\x37\xdf\xe2\x60\xbe\x21\xe3\x74\x3c\x2e\
\xd5\x1e\x17\x52\x4e\x8a\xcb\x7d\xf6\xa2\xb7\x0f\xb5\x7e\xef\xf7\
\xeb\x46\xa1\x1c\xe8\x7f\x14\x7e\xbf\xc0\xba\x81\x48\x02\xfd\xcf\
\x23\x14\x06\x24\xd6\x9f\x27\x04\xf9\xf3\x17\x16\x16\xa4\x14\xa4\
\x94\x14\x52\x4a\x92\x52\x90\x94\x02\xb5\x4a\x59\x8d\x35\xea\x24\
\xa5\x70\x42\x0a\x97\x26\x49\xe4\x43\xa8\x1b\x63\xae\x09\x6f\xae\
\xc9\x3f\x79\xf5\xd5\x57\xc5\x73\x2f\x1c\x9c\x24\x08\x45\x44\x30\
\xc6\xf4\x0a\x3d\x0b\xde\x34\x3d\x5d\xed\x0f\xab\x12\x15\x7f\x44\
\xa7\xd3\x39\x3b\xc4\xe0\x75\xd1\x0f\x00\x73\x73\x73\x95\x21\x63\
\x0f\x43\xaa\xe1\x32\x55\x30\x94\x1d\xc4\x2a\x4a\xc6\xc0\xee\xb4\
\x14\x64\x93\xb4\xf6\x21\x21\x44\x2a\x07\x56\x3e\x0d\xb5\xbe\xf1\
\xb7\x6e\x04\xd2\x90\x51\x47\xc2\x0b\x92\x7d\xef\x40\xae\x9f\x2b\
\xae\x4b\x2f\x45\x71\x2d\xf8\x10\x9c\xb5\x2c\xfa\xc7\x04\xe1\x89\
\x84\x5f\x58\x5a\x5d\x31\xc6\x69\x29\x04\xf5\x53\x51\x48\x4a\x41\
\x42\x4a\x6c\xd9\x3c\x1d\x2b\xa9\x1c\x07\x6e\x2d\x2e\x2e\x9c\x71\
\xde\xd7\x4b\xa5\x92\xfc\x85\x5f\xf8\x85\x37\xdd\x0e\xb8\x26\x00\
\x18\x1d\x1d\xc5\xc2\xe2\xca\x14\xfa\x2a\x47\x10\xa1\xc8\xe7\x93\
\xbe\x5e\x1f\x29\x01\x1c\x00\xf6\x83\x1c\xfd\x6e\xb7\xbb\x84\xfe\
\x88\xdd\x80\x91\x03\xcb\xfe\xdc\xb9\x73\x63\xaf\xbe\xfa\xea\x3e\
\x66\xa6\x2b\x74\x3f\xae\xc2\x7c\x08\x21\x10\xbc\x3e\x1b\xa7\xb5\
\x87\x54\x94\x3e\xd8\x0f\xd3\x3a\x41\xe4\x48\xac\xeb\x6f\x27\x04\
\xb9\x81\xfe\x26\x42\xd1\x8a\xfd\xa5\x76\x6b\x75\x11\x6c\x5f\x00\
\xbb\x37\x04\x91\x13\x04\x77\xb9\xee\x87\x23\x82\xd7\x3a\x17\x2f\
\xbd\xf0\xec\x64\x9a\x26\x56\x12\x3c\x11\x9c\x94\xfd\xfb\xc1\x6e\
\xb5\xd9\x5a\x11\x52\x0a\x29\x84\x94\x42\x50\xf1\x7e\xc4\x33\xd3\
\x13\x36\xcf\x7b\xa7\x9d\x33\x4b\x04\xf2\x1c\x42\xe9\xf9\x17\x0f\
\xc6\x1f\xf9\xc8\x47\xde\x74\xde\xbc\xe9\x00\x60\x66\x2c\x2e\x2e\
\x8a\x4c\xe7\x0d\x10\x44\x7f\x96\x6e\x10\x44\x2c\xa5\xe0\x4a\xa5\
\x52\x02\xe0\xfb\xf9\xfa\x36\x84\xa0\xbb\xdd\x6e\x73\x98\x99\xc3\
\x3a\x7f\xd7\xae\x5d\xcb\x0b\x0b\x8b\x3b\x9e\x7b\xee\xc0\x27\x2e\
\x5e\xbc\xd8\xb8\x62\xb0\x87\xaf\x50\x1b\xf8\xe6\x37\xbf\xb9\xf5\
\xd8\x91\x43\x09\x98\xa5\xa0\x4b\xee\x9c\x18\xb8\x6e\x43\x4d\x8a\
\x41\x64\xaf\x90\x06\x08\xa1\xf9\x85\x3f\xf9\xbd\xd3\x5f\xfd\x8b\
\x3f\x3e\x7a\xfa\x8d\x83\xcf\x77\xd7\x16\xbe\x18\xbc\x79\x4e\x10\
\x19\x29\xc8\x1b\x6d\x68\x65\x69\x29\x99\x3b\x73\xb2\x7e\xe8\xb5\
\x97\xa6\x5f\x3c\xf0\xed\xad\x95\x6a\x2d\x17\x24\x1c\x51\xe1\x35\
\x10\x68\x5d\xd2\xac\xac\xae\xad\x14\x7c\x17\x7d\x81\x57\x24\x93\
\x4e\x8c\x37\xd2\x0b\xe7\xe7\x9e\xef\x75\x3a\xe7\x85\x20\x96\x82\
\x70\xec\xc4\xe9\x72\xbb\xdd\x7e\xd3\x25\xc0\x35\x31\x02\x1b\x8d\
\x06\x82\x0f\x25\xf4\x93\x2f\x88\x04\x20\x88\x05\x11\x27\x49\x12\
\x85\x10\x2c\x8a\xba\x3d\x83\x94\xba\xcb\xf4\x3e\x80\xf5\x38\xc0\
\xec\xec\x6c\xe7\xcc\x99\x33\xcf\x4f\x4c\x8c\xcb\x43\x87\x0e\xbd\
\xeb\xe8\xd1\xa3\xba\x5e\xaf\x9f\xab\xd5\x6a\x2d\x21\x44\xc8\xb2\
\xac\x92\x65\x59\x8d\x84\xda\x96\x96\x2a\xef\x48\xd2\xea\xea\xab\
\xaf\xbe\x76\x66\x65\xad\x7b\x47\xad\x36\x62\x47\x1b\x63\xdd\x52\
\xb9\x6c\x93\x38\xf5\x71\x92\x38\x81\xf0\x4c\xa9\x52\xdb\x02\xd0\
\x94\xf7\x5e\xe8\x3c\x53\xdd\x6e\x27\x5e\x5d\x59\x2e\x9f\x3d\x7d\
\xa2\xbc\xba\xb2\x72\x71\x6a\x6a\xaa\xfd\xc2\x0b\x2f\xbc\xd7\x39\
\x57\x66\x66\x1a\x9b\x98\x1e\x4b\xd2\xf2\x64\x1c\x47\xbe\x54\xaa\
\x98\x72\xa5\x92\x4f\x4c\x4d\x37\xcb\xd5\xda\xea\x8e\x9d\xbb\x3b\
\xfd\x40\x21\x11\x03\x21\x14\x06\x22\x11\x68\x71\x79\x65\x49\xf4\
\xd3\x80\x07\x52\x8d\x88\x38\x52\x2a\x54\xcb\x25\x77\xf6\xfc\x85\
\x53\x42\x88\x58\x08\x11\xb2\x5c\xab\xdd\xbb\x77\xbf\xe9\xbc\xb9\
\x26\x00\xc8\xb2\x8c\xbc\x0f\xa5\xfe\x14\x7d\x26\x81\x40\x42\x04\
\x22\xc1\xce\x5a\xcd\xcc\x9a\x99\x45\x08\x81\x88\x88\xa4\x94\x62\
\x48\x84\x5f\x69\xe8\xf1\xec\xec\xec\x9a\xf7\x5e\x3c\xfe\xf8\xe3\
\x7f\x7a\xe6\xcc\x99\x89\xe5\xe5\xe5\x99\xe5\xe5\xe5\xcd\x21\x04\
\x99\xa6\x69\xa7\x56\xab\xad\xec\xbe\xe5\x8e\xb1\x4d\x9b\xb6\x1d\
\x97\x4a\x84\x13\x27\xde\xa8\x6f\xdf\xbe\xe3\x68\xb3\xb9\x92\xbe\
\xfa\xe2\x33\x7f\xe9\x43\x18\xd3\x5a\x47\xb5\x91\xc6\xed\xde\xfb\
\x0f\x2c\x5c\x3c\x7f\x6c\x7c\x62\x7a\x32\x8e\x93\x4a\x1c\x27\x36\
\x2d\x95\x4c\xbd\x3e\xda\x9b\xdd\xb2\xfd\xc8\x3d\x77\xdf\xf1\xe4\
\xc4\xf4\xd6\x5d\x24\xe4\x1a\x40\x6d\x22\x31\x0a\xd0\x02\x80\x85\
\xfe\x54\x71\xf4\x73\x3e\x71\xe2\xc4\xb1\xaa\x24\xe1\x18\x01\xe0\
\x62\xb8\x50\x50\x91\x25\x14\x00\xe8\xdc\x38\xe7\xbc\x11\x42\x28\
\x00\x4c\x20\x26\x10\x0b\x22\xd3\x18\x1b\x8d\xce\x5d\xb8\x68\x40\
\x21\x90\x90\x30\xc6\x44\x17\x2f\x5e\x7c\x6b\x00\x40\x08\x41\x81\
\x39\xa5\x22\x3f\x8b\x89\x0b\x87\x4a\x10\x85\x6e\xb7\xdb\x1e\x48\
\x80\x3e\x83\x29\x4d\xd3\xf2\x20\x5f\x7e\xc0\xfc\xfe\x73\x58\x08\
\x81\x5d\xbb\x76\x75\xbe\xf1\x8d\x6f\x6c\xba\xed\xb6\xdb\xd6\x76\
\xee\xdc\xb9\xb0\x7b\xf7\xee\x05\x00\x03\x55\xc1\x42\xaa\xa8\x3a\
\x3a\xfd\x6e\x02\x59\x00\xd8\xb5\x6b\x77\x7e\xf0\xb5\x97\x97\xf7\
\xec\xde\xf1\xd4\x83\x0f\xbe\xa3\x29\x84\xa0\xa4\x32\xf6\xb8\xee\
\x35\x7f\x25\x2e\xd5\x1f\x20\x88\x11\xab\xdb\x7f\x1c\xa5\xb5\x77\
\x09\x12\x9b\x19\x80\xd1\x79\xfb\x68\xaf\xb3\x3a\x33\xbb\xf3\x3e\
\x21\xa3\x7d\xfd\xd0\x6c\xff\x0f\x7b\xe6\xa2\x87\x17\x1e\x20\xe3\
\xd4\xc9\x13\x95\xed\x3b\x76\xb4\x84\x28\x92\x7f\x03\x73\x9f\xbd\
\x00\x33\x0b\xc1\xc5\xa4\x63\xad\x4d\x37\x4d\x93\x1a\x98\x03\x83\
\x03\x8a\xe1\x65\xdd\x68\x8c\xc6\x24\xa0\xfb\x89\xc6\xb0\xd6\x46\
\x27\x4e\x9c\x78\x6b\x18\x81\x05\x91\x47\x51\xaf\x05\x7d\xd4\x07\
\x12\x02\xdd\x6e\xb7\x17\x42\xd0\x21\x04\xd3\x6f\xba\x54\x2a\x55\
\xaf\x70\xf9\xd6\xad\xfe\x81\x14\x98\x98\x98\xe8\xcd\xcd\xcd\x95\
\x87\xad\xfd\xbe\x48\x05\x81\x2d\x10\xce\xd3\xba\xa1\x27\xf2\x5e\
\xa7\xf9\xca\xd2\xc2\x85\x2e\x11\x21\x30\x33\x09\x99\x80\x83\x31\
\xbd\xd5\x6f\x30\xbb\xa3\x71\xa9\x76\xaf\xd3\x9d\x6f\x82\xb0\x06\
\xe2\x73\x5f\xfe\xf3\x3f\xfa\xcb\xdb\xef\x7a\xdb\xb8\x8c\x92\x9b\
\x49\x90\xa5\xbe\x91\xd8\x7f\xa6\x27\x51\x3c\x9b\x44\xa1\xef\xbd\
\xf7\x41\x45\xca\x42\x08\x07\x12\x03\x23\xd1\x93\x20\x0f\x22\xcb\
\xe0\x95\x10\xfc\xa2\xb6\xb6\x1b\x98\x6d\x60\x0e\x00\x07\x30\x02\
\x08\x6e\xa4\x56\x2d\x8a\x5b\x14\x99\xe4\x41\x48\x11\xea\xf5\xfa\
\x9b\xce\x95\x6b\x02\x80\x52\xa9\x14\xa4\x14\xdd\xfe\x21\x63\x90\
\xe4\x49\xc4\x6b\x6b\xad\xae\x2f\x18\x9f\xf7\x81\xa0\xd3\x34\x6d\
\x0c\xc7\xfc\x87\x3d\x81\xc1\xf1\xed\xb7\xdf\xbe\x7a\xf2\xe4\xc9\
\xd1\x61\x49\x31\x0c\x84\xac\xb5\xf4\xa4\xf7\xfa\x59\x0e\xee\x90\
\xd5\xed\x3f\xbb\xeb\xce\xdb\x4f\xbe\xf6\xda\x6b\x33\x42\x08\x96\
\x42\xb0\xcb\x9a\x7f\x2e\xa5\xe4\x24\xad\x4e\x48\x21\x54\x70\xf9\
\x2b\x60\xb7\x06\xf6\x27\x57\x97\x97\xcb\x9b\x37\x6f\xee\x04\xa7\
\x5f\x11\x84\x81\x61\x68\x84\xa0\x4c\x10\xbc\x00\xb9\xe2\x3c\xbc\
\x24\xf8\xb9\xb3\xa7\xd2\xed\xdb\xb6\xb7\x05\xc8\x4b\x14\xe7\x64\
\x91\x40\xe2\x8b\xfb\xd1\x5e\x9a\x3f\xfd\x67\xf3\xe7\xde\xf8\xb3\
\x48\x89\x39\x02\xd6\xc0\x6c\x0b\xcd\xc0\x81\x19\xa1\x52\x29\xc7\
\x44\x82\xa9\x5f\x88\xa4\x5c\x4a\xf5\xf8\xf8\xf8\x9b\x3e\x22\xf4\
\xa6\xab\x80\x5f\xf8\x85\x5f\xc0\x3d\xf7\xdc\x03\xa9\x44\x66\x0c\
\x06\x39\x94\xfd\xb4\x3a\xc2\xc9\x93\x27\x57\x8c\x31\xbd\x38\x8a\
\xd4\x50\x30\x47\xde\x74\xd3\x4d\x63\x67\xce\x9c\x59\xbe\x72\x18\
\x77\xb8\x55\xab\x55\xbd\xbc\xbc\x1c\x4f\x4d\x4d\x99\x61\x09\xa1\
\xe2\xb4\x9a\x96\x6b\xb7\x2a\x15\xef\xe4\x10\xd6\x94\xaa\xbd\x43\
\x94\xeb\xa3\xf7\xbd\xfd\x3d\xa9\xe5\xc8\x8c\x8e\x36\xba\x00\x82\
\xc9\x5a\x7f\xc1\xec\x7b\x44\x91\xf3\x36\x5f\x02\x00\x6f\xf5\xf1\
\xb3\xa7\x4f\x56\x6f\xbf\xfd\xf6\xb6\xd3\x9d\x36\x38\xb4\x01\x52\
\x56\x67\x4b\x52\x45\xf5\xb8\x54\x7b\x8c\x86\x22\x7f\x0c\x86\x36\
\x06\x69\x29\xed\x39\xa3\x4f\x91\x90\x0d\x08\x31\x8e\x10\x32\xaf\
\xf5\x05\xe7\x5d\xaf\xd5\x5c\x3e\xe1\x9d\x33\x60\x86\x20\xca\x01\
\xee\x12\x51\xc6\xcc\x25\x66\x0e\x21\x84\xd8\x39\x5f\x48\x48\x14\
\xc9\xa6\x23\xb5\xaa\x1e\x1f\x1f\x7f\xb3\xd9\xf3\xe6\x03\x60\xff\
\xfe\xfd\x78\xf6\xd9\x67\x39\x56\xaa\x9b\x73\x3f\x40\x2a\x30\xf8\
\x01\xc9\x39\x1b\xd6\xd6\xd6\xce\x8f\x35\x1a\xd3\x03\x7d\x9f\xe7\
\x79\x13\x45\xba\xd5\x65\x63\xfe\x43\x92\x00\x44\x84\x07\x1e\x78\
\x60\xe9\xe9\xa7\x9f\xde\xf4\xd8\x63\x8f\x9d\xef\x7f\x96\x2a\x23\
\xe3\xb7\x44\x71\xe9\x76\x42\x11\x72\x15\x52\xd4\xfb\xb9\x79\x7e\
\xf3\xe6\xad\xdd\xc3\x87\x0f\x8d\x8c\x8d\x4d\xda\xe2\x9a\x4c\x9c\
\xc9\x57\x6d\xf0\xc7\x06\x92\xe4\x9b\xdf\x78\x5a\xdc\x7c\xf3\xcd\
\xdf\x18\x18\xeb\xde\x66\xf3\x21\x04\x30\x33\x64\x54\xd9\x22\x88\
\x1c\xaf\xe7\x00\x32\xce\x9d\x3d\x53\x9e\x9d\xdd\xd2\x16\x24\x42\
\xb7\xbd\xfa\xb2\x73\xd6\x25\x69\x79\x3c\x04\x16\xdd\x4e\x73\xc9\
\x39\x17\x42\x08\x7d\x0c\x17\xe0\x0e\x21\x68\xa0\x88\x0d\x30\x23\
\xf2\x9e\xeb\xb9\xb1\x11\xd6\x7b\x06\xf3\xe4\xc4\xb8\xfd\xfc\xe7\
\x3f\xff\xa6\x03\xe0\x9a\xa8\x00\x63\x0c\x47\x2a\xca\x18\x4c\x81\
\x99\xc0\x10\xa1\x5f\x7e\x85\x01\x2c\x2c\x2c\xce\x7b\xef\xf3\x95\
\x95\x95\x53\xcf\x3f\xff\xfc\x53\x5f\xf8\xc2\x17\x9e\x3c\x7d\xfa\
\xf4\xca\x80\xd9\x03\x1a\x66\xfe\x00\x0c\x51\x14\x85\x3c\xcf\x65\
\x14\x27\xe5\x52\xa5\x36\x19\xc5\xe9\xbe\x22\x3b\x9b\x1d\x09\x38\
\x08\x2a\xb6\xc4\x0e\x02\x6e\x6a\x7a\xaa\x33\x3f\x7f\x4e\x11\xb1\
\x8b\x93\xf2\xae\x28\x49\xab\x44\x60\x29\x25\x8c\x31\xa2\x54\xaa\
\x8e\x6f\x9a\xdd\x3e\xae\xa2\x64\x44\x45\x71\xa2\x54\x5c\x4e\x4a\
\xb5\xe9\x52\xa5\xbe\x2d\x8a\xd3\x5d\x28\x82\x44\x9e\x08\x1e\x04\
\xdf\xe9\xb4\x45\xad\x56\xcb\x41\xe0\x91\xb1\xa9\xfb\x85\x10\x94\
\x67\xdd\x95\xca\x48\xe3\xae\xa9\xd9\x1d\x1f\x9d\x9c\xd9\x76\xff\
\xe0\xdd\xcb\xe5\xb2\x12\x42\x54\x8a\xc2\x14\x41\x83\xb0\x1c\x42\
\x30\x8c\xb0\xb2\xd6\x5c\x6b\x63\x30\x85\x81\x99\x37\x4f\x4f\xda\
\x4f\x7e\xf2\x93\x1b\x5f\x05\x00\x40\xbb\xdd\x46\xa3\x51\x5f\xba\
\xb8\xb4\x4c\x3e\x04\x19\x02\x13\x87\x62\xb2\x0e\x43\xd0\xa1\xd7\
\x0f\x9f\x3b\x73\xfa\xd4\xdc\xda\xda\x5a\xa6\x94\xba\x2c\x9b\x67\
\x78\x44\xf0\x6a\x52\xe0\xa1\x87\x1e\x9a\x7f\xf5\xb5\xd7\xef\x7e\
\xe4\xb1\xef\xb9\x09\xc5\x2f\xe8\xe9\x52\xb9\x98\x70\xe5\x64\xf0\
\xc9\x89\x89\xde\xa1\x83\xaf\x8d\xce\x6e\x9e\xf5\x50\xd1\x56\xa9\
\xea\x5b\x98\xc3\x45\xa7\xf3\x13\xdf\xfc\xf6\x33\xb5\xc7\xdf\xff\
\xe1\x1d\x04\x2a\x15\x05\x85\x78\x50\x0a\x00\x0c\x78\x6f\xf5\x61\
\x21\xd5\x4e\x80\xca\x20\xc6\xf9\xf3\xe7\xd3\x4d\x9b\x36\x75\x84\
\x20\x0f\x26\x10\xa9\x89\xf1\xa9\xd9\xc7\x03\x73\x20\xa0\xcc\xcc\
\xb0\x1c\xb2\xc1\x1b\x6c\xdf\xbe\x7d\xa2\x5f\x7c\x02\x21\x04\xcf\
\x21\x18\x41\x7c\x86\x43\xd0\x4b\xcb\x2b\x6b\xfd\x81\x25\x26\x21\
\x3b\xb3\x9b\xa7\xec\xe7\x3e\xf7\xb9\x37\x9d\x37\xd7\x44\x02\x74\
\x3a\x9d\xf0\xe1\xc7\xdf\x3d\x07\xe6\x00\x86\x08\xcc\x92\x99\x45\
\x60\x08\x0e\x8c\x56\xb3\xa9\x97\x97\x97\xf3\x2b\xe3\xfe\x43\xb3\
\x74\x86\x03\x43\xc3\xe0\xa0\x91\xd1\x89\x9d\x53\x33\x9b\x6e\x25\
\x60\x30\x1e\x3f\xb0\xfc\xed\x60\xfc\x7e\x30\x4e\xdf\x8f\xff\x87\
\xed\x3b\x76\xae\x9d\x3a\x75\x22\xd5\xc6\xd0\x52\xab\x15\xe7\xd6\
\x6d\xea\xe5\xe6\xdd\x77\xdc\x75\xef\xdb\x84\x90\x31\xf5\x87\x71\
\x05\x09\x37\x18\xe7\x97\xa2\xa8\x3b\x68\xf2\xee\x77\x88\xc8\x12\
\x09\xd7\x6c\xae\xca\xf1\xb1\xf1\x5e\x11\xf9\x1b\xfc\x7f\x21\xa5\
\x10\x31\x0a\x8f\xc1\x67\x59\xe7\xec\x00\xb0\x53\x53\x53\x33\xfd\
\x98\x47\xdf\xeb\x61\xeb\x43\xd0\xde\x7b\xd3\x5c\x6d\x9a\x62\xa6\
\x11\x71\x92\xc4\xcb\x52\xca\xf0\xc4\x13\x4f\xbc\x35\x24\xc0\xe4\
\xe4\x24\x6b\xad\xad\x92\x6a\x3e\x84\x50\x09\x21\x48\x1f\x58\x38\
\x1f\x08\x60\x62\x22\x22\x12\x97\x7d\xd9\x01\xf3\xd7\x87\x76\x71\
\x79\x5f\x16\x42\x70\xa9\x5c\x1b\x4f\xd2\xf2\x2d\x7b\xf7\xde\xde\
\x3a\xf2\xfa\xa1\x91\xdb\xf6\xdd\xbe\xb6\x2e\x21\xfa\xc5\xa0\x06\
\x75\x02\xfb\x0f\x05\x01\x18\xa9\xd5\xdc\x91\xa3\x87\x47\x9a\x3e\
\x94\x2b\xa5\xd4\xcc\x2d\x5c\x1c\xe1\x5e\xd7\xde\x7f\xcf\xbd\x8b\
\x83\x1e\x4f\xc3\x7f\x8a\xf9\xdf\x88\xe3\x74\xb7\xb3\x66\xc1\xda\
\xfc\x60\xb3\xd9\xba\x7b\x62\x62\xb2\x47\x34\x18\x3e\xbe\x54\x7d\
\x16\x28\x7a\x96\x0b\xbe\x9d\xf7\x3a\xab\x44\x44\x42\x08\x1e\x1b\
\x1b\xdb\x1e\x42\x30\x85\x04\xe0\xe0\xbd\x77\xde\x7b\xdb\xe9\x74\
\xdb\x9d\x4e\xd7\x31\x23\x62\xe6\x90\xc4\xd1\xea\xe4\xe4\xa4\xbf\
\x16\xd3\xc6\xaf\x89\x04\x78\xf8\xe1\x87\xd9\x7b\xef\xa3\x38\x9a\
\xf3\x81\x55\x08\xac\x82\x0f\x92\x39\x08\xc7\x2c\x8a\x29\x53\x3c\
\x08\x04\x01\xfd\x5e\x3f\xb4\x7f\xf9\x4b\x8b\x02\x2c\xde\xbb\x4c\
\x10\xbc\x52\xd2\xf9\xe0\xc3\xfa\x70\x2e\xa1\x18\xaa\x1d\x6c\xfb\
\x43\xb3\x62\x7d\x2b\x5c\x3a\xda\x08\xbd\x4e\x9b\x6a\xa5\x52\xe6\
\x9d\x0d\x5a\x88\x88\x08\xfd\x7b\xd6\x87\x72\x9d\x10\x58\x1f\xf6\
\x85\x80\x4f\x4b\x95\x9b\x6d\xde\x3d\x7b\xe0\x3b\xdf\x7c\x65\x66\
\xd3\x4c\x73\xfd\xff\xf5\x3f\x4b\x02\x83\xc1\x24\xdb\x6e\x2e\xbf\
\x3c\x08\x20\x95\xcb\xe5\x28\x4d\xd3\xa9\x10\x82\xf6\x3e\x68\x1f\
\xbc\x0e\x21\x18\xef\xbd\x9b\x5f\x58\x58\x0c\x08\xc4\xcc\xf0\xde\
\xf3\x68\x7d\x64\x99\xaf\x51\x52\xe0\x35\x01\x00\x11\xa1\x5c\x2e\
\xfb\x72\x29\x9d\xf3\x3e\x28\xe7\x59\xf9\xc0\x91\x75\x41\x7a\x1b\
\x0a\xc7\x97\x06\x89\x15\xb8\xaa\xae\x1f\x8e\x05\xf4\x9f\xc9\x49\
\x5a\xaa\x03\x45\x70\x66\xd7\xee\x3d\xcd\x63\x47\x8f\x94\x09\x70\
\x04\xf2\x04\x0c\x32\x79\x1c\x81\xae\x68\xf0\xb9\x73\x62\x62\x62\
\xa2\x53\xab\x94\x7b\xb5\x6a\xb5\xd7\xb3\x36\x22\xba\xfc\x3e\xc1\
\xe4\x89\xc9\x09\x14\x81\x1d\x02\x39\xa9\x54\xad\x97\xbb\xbb\xcb\
\xa5\x74\x29\xef\x76\x5e\x04\x90\x0f\xae\x01\xe4\x88\x8b\xe7\x67\
\xdd\xce\xc1\x3c\xeb\x0e\x24\x12\x9c\x73\xee\xd4\xa9\x53\x4f\x67\
\x59\x76\x21\x04\xaf\x83\xf7\xd6\x3a\x67\x9c\x73\xf6\xcc\x99\xb9\
\x25\x30\x11\x23\xb0\xf3\xde\xef\xbb\xed\xe6\x65\x29\xe5\x35\x01\
\xc0\xb5\x9c\x1b\x18\x6e\xbb\x79\xe7\xd1\x6f\x3e\xfb\xe2\xc7\xbd\
\xf7\xb1\xb5\x56\x15\x15\x37\xbd\x74\xd6\x89\x10\x47\x45\x9c\x98\
\x99\xd0\x77\x01\xaf\xa0\xf5\x1f\x64\x00\x8c\xb4\x54\x9e\x25\x82\
\x03\x01\x95\x4a\xc5\x65\x59\x56\x2e\x98\x3e\x28\xde\x30\x54\x27\
\x78\xa8\x3e\x60\x3f\x1a\xe8\x93\x38\xd2\xa5\xb4\xa4\x93\x28\x32\
\x3e\x04\x10\x91\xef\x9b\x7e\xc5\xfd\xcc\x03\xb3\x1c\x03\x7d\xc2\
\xcc\x58\x58\xb8\xb8\xe5\xbd\x0f\x3f\x7a\xb1\xd5\x5c\x3e\x0d\xe6\
\x97\xd2\x4a\xed\x4e\x22\x08\x14\xd1\x5f\xee\x75\x3b\xc7\x5a\x6b\
\xcb\xe7\x87\xde\x9b\xac\xb5\xe1\xb9\xe7\x9e\x3b\xec\x9c\x3b\xba\
\x63\xe7\xae\xcd\xdb\xb7\x6d\xdb\x9b\x96\xd2\x19\x6b\x5c\x38\x72\
\xe4\xf8\x6a\x08\xac\x42\x00\xe2\x28\x5a\x7c\xdf\x7b\xdf\xde\x3b\
\x74\xe8\xd0\x5b\x0f\x00\x3f\xfc\xfd\x1f\x58\xfc\xce\x0b\xaf\x9e\
\x75\xce\x8f\x3a\x1f\x62\xeb\x7d\xe4\x7c\x10\x2e\x30\x05\x1f\xc8\
\xfb\x40\x4a\xc9\x75\xe6\x5f\x45\x0a\xac\x1b\x80\x52\xaa\x48\x0a\
\x99\x92\xa0\x00\x10\x04\x11\xb6\x6d\xdb\xde\x3a\x75\xf2\x44\x69\
\xe7\xae\xdd\x6d\xa0\xc8\x3b\xb8\xdc\x16\xe8\x83\xa1\x08\x44\x86\
\x28\x52\x56\x09\x61\x85\x94\x36\x30\x07\xea\xd7\x0b\xa6\x7e\x35\
\x71\xee\x6f\x0b\x30\x14\xe7\xd7\xda\xcd\xa8\x52\xad\xe8\x38\x49\
\xb7\x95\xab\x23\xbd\x6e\x7b\x6d\x51\x46\xf1\xd9\x38\x49\xb6\xb2\
\x67\xd7\x6e\xad\x1e\xec\x75\x5a\x2d\x21\x04\x79\xef\x87\xd5\x18\
\x01\xc5\xe8\xe0\xc1\x83\x07\xe7\x0f\x3c\xff\xd2\x52\xa5\x52\x19\
\x9d\x99\x9e\xd9\xd2\x6c\xb5\x1c\x13\x47\xce\xf9\x50\xad\x56\xce\
\x5e\xb8\x70\xc1\x5f\xab\x05\x29\xae\xd9\x58\xc0\xc3\x0f\x3f\xcc\
\x71\x1c\xbb\x5a\xb5\xfc\xb2\x75\x3e\x76\x2e\x24\xd6\xfa\x58\x1b\
\x1b\x39\xe3\xa4\x0f\x2c\x02\x80\x50\x4c\x9d\xbe\xea\x33\x2e\xe5\
\xdf\x15\x1e\x00\x09\x58\xe2\x41\x82\x06\xdc\xf8\xc4\x78\x6f\x6d\
\xad\x29\x09\x08\x82\x06\xa9\xda\x45\x38\x96\x86\xc7\xfc\x05\x79\
\x00\x5e\x80\x6c\xa4\x94\x15\x24\x2c\xc0\x41\x0a\x72\x52\x0c\x7b\
\x0d\xd4\xcf\xee\xed\x87\x74\x09\xfe\xc4\xf1\xe3\xe5\x5b\x6e\xbe\
\x65\x55\x08\x0a\xe5\x4a\xed\xe6\xfa\xe8\xf8\x76\x6b\xf2\x15\x6f\
\xed\xc5\x5e\xb7\x75\xd4\x3b\xa3\x27\xa7\x67\x6f\x2f\x57\x6b\x75\
\x5c\x65\xea\x79\x08\xc5\x58\xb1\x0f\x8e\xce\x9d\xbf\x90\x3f\xf5\
\x97\xdf\x98\xf3\x8e\xa5\x77\x9e\xac\xb3\xfe\xae\xdb\x6f\x3b\xe3\
\x9c\xbb\x26\x93\x42\xae\x29\x00\x88\x08\x59\x96\xf9\x87\xde\x75\
\xff\x4b\xce\x39\x69\x9d\x4b\x8d\x71\x89\x31\x2e\xca\xad\x55\xd6\
\x38\xc1\x21\x10\xf3\xba\x1a\xe8\x17\xe7\xe6\x41\xe9\xae\xa1\x5a\
\xc0\x80\xb5\x46\xf7\xba\xdd\x37\x20\xd8\x73\x91\xc8\xe9\x01\xf8\
\xe9\xe9\xe9\xce\xdc\xdc\xd9\x18\x04\xcf\xb8\xd4\x40\xf0\x20\x2a\
\xf6\x99\x1c\xc0\x1e\xa0\xf5\xeb\x81\xd9\x33\xc3\x33\xc3\x01\xf0\
\x60\xb8\x4b\x9f\x83\x67\x82\xcf\xf2\x8c\xa3\x38\xb2\x0c\x78\xe6\
\xe2\xff\xc5\x49\x69\x53\xa5\x52\x9d\xed\x75\x5b\x67\x88\x88\xc7\
\xc6\xa7\x6f\x97\x52\x56\x50\xb8\x36\xc0\x90\x31\xcb\xcc\x08\x60\
\x78\xe7\x85\x77\x81\x9c\xf3\x22\x04\x16\x81\x83\xf0\x2e\x40\x4a\
\xb5\xf4\xc1\xc7\xdf\xd3\xc2\x55\x0c\xdf\x0d\x0f\x80\x3e\x85\x77\
\xbf\xe3\xee\x56\xa4\xa2\x13\xc6\xda\xc4\x58\x9b\x68\x63\x13\x6b\
\xbc\x32\xce\x09\x6b\x9d\xf0\xfd\xb0\xeb\x95\xe0\xb9\x82\x18\x00\
\xda\x6b\x2b\x17\xbd\x73\xab\x54\x30\xcd\x11\xe0\xb6\xcc\x6e\x69\
\x5f\x9c\xbf\x10\x13\x17\xc7\xeb\xf1\x01\xe0\x52\x23\x38\x66\xf6\
\xd4\xdf\x47\xc1\xfc\xd0\x8f\x19\xf8\xf5\x2d\xe0\x04\xa8\xdf\xe0\
\x5e\x7b\xf5\x95\xda\x1d\x77\xdc\xb9\x34\x78\x06\x81\x1d\xc0\x4e\
\xca\xa8\x54\x1f\x1d\xdf\x5b\xa9\xd6\x76\xf4\xdf\xd4\x6a\x93\x75\
\x87\x6b\x0d\x85\xbe\xa7\xc3\x81\xc9\xfa\x20\xac\xb5\xd2\x79\x2f\
\x9c\xf3\xca\xb9\x20\x8d\x75\x5c\xab\x55\x4f\x2c\x2f\x2f\xbb\x6b\
\xb9\x1e\xd1\x35\x05\xc0\x23\x8f\x3c\xc2\xce\x39\xb3\x65\x76\xfa\
\x2f\x8d\x75\x89\x31\xae\xac\x8d\x4d\xb4\xb1\xb1\xd6\x26\xb2\xd6\
\x8b\x10\x82\xf0\xbe\x5f\x9f\xbf\xdf\xf3\xd7\xb9\x3e\x34\x26\x3f\
\x38\xd5\x69\x35\x4f\x33\xb1\x21\x62\x0f\x62\x0f\x82\x6f\x8c\x8e\
\xf6\x16\x17\x2f\x2a\xf4\x5d\x38\x10\xf7\xb7\xf0\x24\xc8\x81\xe0\
\x01\xf6\x4c\x21\x14\xe7\xd8\x03\x1c\x20\xe0\xd7\x5b\x3f\x27\x10\
\x54\x1c\x7b\xf6\x41\x14\x13\x5a\x7c\xdf\xf7\xef\xbb\x7f\xfd\xff\
\x0b\xf6\x28\x0c\x52\xd7\xed\xb6\x4e\x5a\xad\xf3\xe1\x77\x66\x66\
\x04\x06\xbc\x73\xe4\x9c\x23\xe3\xbc\xb4\xc6\x29\xe7\xbd\x34\xce\
\xc2\x07\xdf\xfe\xe8\xf7\xbc\xf7\x64\xaf\xd7\xbb\xa6\x05\x22\xae\
\x79\x91\xa8\x56\xab\x15\xfe\xbb\x1f\xfb\xf8\x31\x25\xe5\x69\x6d\
\x6c\x6a\xad\x2b\xe5\xda\xa4\xda\xb8\x48\x1b\x5d\xa8\x02\x02\x79\
\xef\xaf\x56\xb2\xf5\xbb\xdc\x63\xad\xf3\xac\xdd\x5c\x39\xca\x8c\
\x9c\x18\x9e\x00\xb7\x7b\xcf\xcd\xcd\x13\x27\x4e\x94\xa9\x10\xe5\
\x9e\x40\x1e\x85\x6b\xe8\x06\xc7\x21\x20\x50\xd1\xbb\x3d\x81\x3c\
\x33\xfb\xc1\xfe\xe0\x9e\xc1\xf3\x08\xe4\x5f\x38\x70\x60\xf4\xde\
\xb7\xdd\xb7\x88\xbe\xca\xe8\x3f\x6b\xa0\x2e\x3c\x40\x1e\x81\x75\
\xab\xb9\x72\xac\xd5\x5c\x5d\x1e\xbc\x6f\xdf\x05\xa4\x10\x18\x3e\
\x38\x32\x2e\x08\x67\x9d\x34\xc6\x49\xeb\x9c\xb2\xd6\x4b\x63\x2d\
\x1a\x8d\xc6\x91\x52\x1a\xe5\x4f\x3c\xf1\xc4\x5b\x1b\x00\x4f\x3c\
\xf1\x44\x68\xb5\x5a\xe6\xa6\xad\xb3\x5f\x35\xc6\x25\xb9\xb6\xe5\
\x5c\x9b\x34\xcf\x75\x9c\x69\x1b\x69\x67\xa4\xb3\x8e\x7c\x18\xac\
\xce\x71\xf5\x9e\x3f\x7c\xac\x75\x9e\xad\xad\x2e\x1f\xb7\xce\x2c\
\x7b\xef\x3b\xcc\x21\x2f\x97\x4b\x79\x6b\x6d\x8d\xd6\x33\x7c\xfb\
\xea\xa0\xef\x36\x16\x3d\xbc\x70\xfb\x2e\x31\xb2\x7f\xaf\x28\x7a\
\x7e\xbf\x91\x07\xb3\x0b\xc1\x3b\x21\xc9\xa1\x98\x4d\x5c\x34\x82\
\x07\xd6\x25\x8a\xeb\x76\xda\xe7\x7b\xbd\x6e\x7b\xf8\xdd\xfa\x23\
\x89\xe4\x02\x93\xb7\x85\xe8\xd7\xc6\x49\x63\xac\xb2\x36\x28\xeb\
\xac\x70\xd6\xf5\x3e\xfe\xa1\x47\xde\xb8\xeb\xae\xbb\xae\x49\xf4\
\xef\xba\x02\x80\x88\xa0\x94\xf2\x3f\xf1\xc3\x1f\x7e\x43\x2a\x75\
\x5a\x6b\x5b\xca\xb5\x29\x67\xb9\x29\xe5\xb9\x8e\xb5\x76\xd2\x58\
\x2b\x42\xf0\x22\x04\x1e\x78\x04\x97\xc5\x00\xae\x26\x19\x8c\xd1\
\xba\xb9\xb2\x7c\x76\x75\x65\xf1\xf8\xca\xf2\xc2\xe1\x9b\xb6\x6f\
\x7b\xee\x6b\x4f\x7f\xb5\x97\x65\xbd\x33\x59\xaf\x7b\xa6\xd7\xeb\
\x9e\x61\xc0\x00\x05\xb3\xb8\x18\x97\xe8\x8f\xea\xb1\x67\xf0\xba\
\x68\x2f\x0c\xc4\xbe\x98\x07\xbb\x67\x9f\x7d\xa6\x7e\xef\x7d\xf7\
\x2f\xa0\xb0\x1b\x3c\x81\x2f\xdd\xc3\xc1\x83\xd9\x19\x9d\x2d\x77\
\x3a\xad\xd5\x81\xa7\x32\x68\x21\x04\x78\x0f\x0e\xde\x51\x6e\xad\
\x30\xc6\x4a\x6d\xac\x32\xd6\x45\xda\x1a\x95\x6b\xcb\x23\xb5\xda\
\x91\x9b\xb6\xcd\x66\x7b\xf7\xee\xbd\xe6\x55\xc2\xae\x4b\x9d\xc0\
\x27\x9e\x78\x22\x28\xa5\xf4\x9e\x5d\x5b\x9f\xd4\xd6\xc6\xda\xb8\
\x72\x6e\x6c\x29\xd7\x36\xc9\x32\x13\xe7\xda\x2a\xef\x02\x39\xcf\
\x83\x1e\x34\x68\x3c\xec\x22\x0e\xce\x5d\x59\xd9\x0b\x28\xbc\x84\
\x76\xbb\xb5\xb6\xb2\xb4\xd8\xc9\xb3\xde\x9a\xd1\xd9\x5a\xaf\xd7\
\x39\x6b\xad\x59\xe2\x10\xda\xcc\xa1\x00\x02\x8a\x1c\x7f\x66\xf6\
\x18\x48\x08\xa0\xef\x25\xb0\x63\x66\x6f\x8c\xe1\x28\x52\xae\xf0\
\x18\xd8\xaf\x6f\xb9\xf8\xbc\xd6\xf9\xca\xca\xca\xf2\x85\x61\x37\
\x75\xf0\x5e\x21\x30\x02\x02\x59\xeb\xc8\x1a\x2b\x72\x63\x95\xd6\
\x36\x32\xd6\x46\xd6\x7a\xe1\x9c\x6f\xff\xd0\xf7\x7d\xf0\xc8\xf8\
\xf8\xf8\x35\xef\xfd\xd7\x0d\x00\x44\x84\x56\xab\x15\x7e\xfa\xc7\
\xbf\xef\x64\x9a\xc4\xaf\x68\x63\x4a\x5a\x9b\x72\x37\xd3\x69\xa6\
\x75\xac\x33\xa3\x32\xad\x85\x0f\x8e\xbc\x07\x85\x10\xbe\xab\xdb\
\x0f\x24\xc3\xd5\x6a\xfc\x0e\xce\xbd\xff\xfd\xef\x9f\x7f\xf2\xc9\
\x27\xa7\x07\xf7\x5b\xa3\x7b\xdd\x4e\x6b\xa1\xdb\x69\x9d\x0b\xcc\
\x1e\xa2\x60\x76\x60\x76\x85\x04\xa0\x42\x1d\x14\x92\xc0\x81\xe0\
\x9f\x7b\xee\x3b\x23\x77\xdf\x7d\xd7\x22\xfa\xc0\x60\xb0\x03\xb3\
\xe3\x50\x78\x00\xc1\x07\xd3\x6e\x35\x17\x87\xdc\xd5\xf5\xff\xef\
\xbd\x27\x1f\x98\x9c\x75\x42\x6b\x2b\x73\x6d\x95\xd6\x46\x69\x63\
\x62\x63\x9c\xca\x32\x8d\x2d\x9b\xa7\x5f\x96\x82\xf3\xeb\xd1\xfb\
\x81\xeb\x58\x29\xf4\x89\x27\x9e\x08\x9b\x36\x6d\x32\x8f\x3e\xf4\
\x8e\x2f\x5b\xe3\x42\x96\xdb\xb2\xc9\x4d\xa5\xd7\xd3\x69\xa6\x75\
\x94\xe7\x45\x6c\xa0\x5f\x53\x8f\xaf\xe8\x5d\xfd\xcd\x25\xe6\x0f\
\x5f\xbf\xf2\x9e\x81\xd4\x18\x5c\x97\x52\xc6\x00\x7b\x04\xf6\x02\
\xf0\x60\xf6\x1c\x38\x10\xb3\x27\x62\x4f\x5c\xd4\x1f\x06\xb3\xef\
\x75\x3b\x54\xab\xd5\x34\x02\x3b\x70\xf0\x14\xd8\x33\x87\xc1\xf5\
\x90\xf5\x3a\xcb\xce\xb9\x30\xf4\x7f\xfb\x03\x55\x9e\xbc\x0f\x70\
\xce\x92\xb5\x8e\xb4\xb1\x4a\xe7\x5a\xe5\xda\xc4\xda\xd8\x48\x6b\
\x23\x20\x68\xfe\x27\x7e\xe4\xe3\x67\x76\xee\xdc\x79\x5d\x7a\x3f\
\x70\x1d\x6b\x05\x13\x11\x9e\x7a\xea\xa9\xf0\xbe\xf7\xdc\xbf\xf2\
\xcc\x81\x57\xbe\xb6\xb6\xd6\x7e\x34\x33\x52\x47\x5a\xe7\xbd\x2c\
\x36\x71\xaa\x5d\xa2\x63\x1f\x45\x2a\x08\x41\x24\x84\x87\x10\x82\
\x87\xc5\xfe\xb0\x9e\x1d\xba\x36\x28\xee\xc8\x00\xf0\x9e\xf7\x3e\
\xb2\xf8\xab\xbf\xf1\x47\xef\x2f\x35\x66\xa3\x58\xc9\xac\x5e\x2b\
\x2d\xef\xd9\x36\xb5\x6c\x82\xb5\xde\x05\xc7\xcc\x0e\x5c\x78\x01\
\x27\xce\x2e\x96\x5f\x7f\xe3\xc2\xf4\x72\xab\x33\xda\xe9\xea\x5a\
\xd6\x5e\xc6\xfb\xdf\x7d\xe7\xf3\x1c\xe0\xfa\x6e\x29\x06\xeb\x05\
\x15\xc7\x01\x81\xc3\x65\xef\xd2\x5f\x2a\x86\xbd\xf7\x70\x81\xe1\
\xac\xa7\xdc\x18\x99\x6b\xad\x32\x63\x63\x9d\xdb\xc8\x68\xab\x7a\
\x99\xe6\x7d\xb7\xed\x7e\x71\x7a\x7a\xda\xdc\x7e\xfb\xed\xd7\xad\
\x64\xfc\x75\x5d\x32\xe6\xb7\x7e\xeb\xb7\xf0\xe8\xa3\x8f\xe2\x3d\
\x0f\xde\xb7\xf0\x97\xdf\x7a\xe1\x76\x66\x54\x84\x94\x4e\x0a\x72\
\x52\x4a\xa7\x22\x0a\x52\x4a\x2f\x95\x80\xfc\xee\xe9\xdf\xf8\xab\
\x46\x0d\x87\x41\xf6\xb9\xaf\x1c\x7a\xe8\xc2\x6a\xfe\x70\x1c\x25\
\x1d\x10\xf1\x5a\x27\x9f\x3e\x7e\x76\xf1\x96\x93\xe7\x16\xf6\x74\
\x7a\x56\xa5\x49\xdc\x7e\xf1\xf5\x53\x3b\x0e\x9f\x9c\x7f\xdb\xd1\
\x37\x56\x36\x2f\x35\xdb\x0d\x6d\x5c\x64\xad\x57\x17\x96\x5b\xbb\
\x4f\xcd\x77\x36\x3d\x78\xcf\x8e\xd7\xfa\x16\x48\x51\xf0\xb3\xcf\
\xee\xc0\x81\x9d\x75\x46\xeb\xdc\x0a\x21\x65\xb5\x5a\x1d\x49\x92\
\xb4\xbc\xb6\xd6\xcc\x9d\x0b\x64\x8c\x16\xbd\x2c\x57\xbd\x9e\x8e\
\x3a\xbd\x3c\xee\xf5\xf2\xb4\x97\xeb\xb4\x97\xe5\x52\xa9\xe8\xd8\
\x8f\x7c\xff\x07\x8e\x5e\xb8\x70\xc1\x5d\x8b\xcc\x9f\xbf\x8a\xae\
\xfb\x82\x11\x27\x4e\x9c\xf0\xef\x7b\xdf\xfb\xb2\x9b\xb6\x6f\xfe\
\xe2\xc9\x93\x73\x3f\x16\x47\xaa\x9c\x45\x52\xa7\x59\xa4\xb3\x38\
\xb2\x49\x64\x65\x12\x45\xc1\x2b\x86\x2c\x0c\xc2\xcb\x7a\xdc\x55\
\x8e\x01\xac\x47\xdf\x90\x6b\x37\xb2\x63\x76\xe6\xc9\x87\xee\xbd\
\xe9\xf9\x5a\xb9\x6c\x2c\x07\xb1\xd2\xec\x96\x5e\x3e\x71\xfa\x96\
\x95\x95\xce\xc4\x17\x9f\x7e\xed\x5d\x52\xb2\xdd\x34\x31\x7a\xe8\
\x3d\xfb\xf6\xbe\x36\xd6\xa8\xf6\xd2\x24\x72\xce\x79\x3a\xf0\xda\
\xc9\x6d\x2f\xbe\x3e\xf7\xa0\x35\x3e\x88\x62\x3a\x01\x98\x43\x11\
\xcf\x67\xa6\x10\x02\x39\x67\x1d\x00\xae\x8d\x8c\xd4\x09\x90\xab\
\xab\x2b\xad\x10\x98\x6d\xf0\x64\x8c\x15\xda\x58\x95\x69\xad\x72\
\xad\x63\x6d\x4c\xac\xb5\x51\xb9\x31\xfa\x13\x1f\x7a\xf4\xe5\xf1\
\xf1\x71\xfb\xce\x77\xbe\xf3\xba\x2e\x18\x71\xdd\x01\xb0\x7f\xff\
\x7e\xec\xdd\xbb\xd7\xff\xf4\x8f\x7d\xe2\xf8\x2f\xfc\x8b\x7f\x77\
\x38\x37\x76\x57\x6c\xac\xee\x64\x5a\xc7\x49\x62\x93\x38\x77\x51\
\x24\x83\x8c\x24\x17\x29\x5d\x9e\x86\xc5\xfd\xb0\xd8\x1f\x54\xe8\
\x02\xc0\x83\x1a\xbf\x8d\x5a\x7a\xbe\x5e\x4d\x9b\x9b\x26\x6a\xdd\
\x38\x56\x41\x08\x81\x99\xb1\x72\x76\xeb\xce\xc9\x67\x5b\x6d\x1d\
\x59\x88\xc6\x68\xad\x2c\x1a\xb5\xb2\x91\x52\x04\x21\x8a\x67\x78\
\xef\x31\x33\x31\xb2\x38\x39\x56\x3d\x46\x14\x6c\xe0\x4b\x1e\x49\
\x60\x26\x0e\x01\xce\x39\x67\x8c\x71\x71\x9c\xa4\xcc\x81\xb5\x31\
\x79\xb7\xdb\x35\xc6\x79\x72\xc6\x8a\x5c\x5b\x99\x6b\xa3\xf2\x5c\
\xc7\x79\x66\x62\xad\x6d\x94\xe7\x46\x34\x46\xeb\x2f\xbf\xe7\x9d\
\xf7\x75\xfe\xf4\x4f\xff\xf4\xba\x2f\x23\x77\x43\xac\x1a\xf6\xd9\
\xcf\x7e\x96\x4f\x9e\x3c\xc9\xce\xf9\x8b\xc7\xde\x38\xf3\x80\x20\
\x41\xaa\x5f\x86\x45\x49\xe1\x95\x8a\x42\xa4\x94\x97\x52\x72\xb1\
\x06\x43\x91\x62\x35\x2c\xf2\x07\x79\x84\xb8\xbc\x10\x34\x00\xe8\
\xe5\x56\x56\xdb\x32\x5d\x5f\x89\x94\x18\xe4\x67\x43\x10\x71\xa5\
\x14\x87\x89\xd1\x1a\x55\x4a\xb1\x23\x42\x00\x73\x51\xcb\x3b\x04\
\xb6\xd6\xe3\xf4\xf9\x95\x91\x6a\x39\x6e\x6d\x9d\xae\x37\xbd\xf7\
\xbc\xbe\x7a\xa8\xf7\xa1\xdd\x6e\xb7\xba\xdd\x6e\xee\x9c\x0b\x0c\
\xc0\x3b\xe7\x5b\xad\x56\x6e\xad\x87\x73\x56\xf4\x7a\x5a\xf6\x7a\
\x59\xd4\xe9\x65\x71\xb7\x97\xa5\xbd\xcc\x94\x7a\x59\x1e\x5b\xef\
\x96\x7f\xe6\x27\xbe\xff\x99\xe5\xe5\x25\xf3\xb3\x3f\xfb\xb3\xd7\
\x7d\xcd\xa0\xeb\xe6\x05\x0c\xd3\x20\x38\xf4\xbe\xf7\xdc\xb7\xd8\
\xa8\xd7\xbe\xae\x8d\x2e\x65\xb9\x2d\x67\xb9\x2e\xf5\x72\x9d\xf4\
\xb4\x56\x59\x6e\xa4\xb5\x56\xf8\x00\x2a\xd6\xec\xbb\xd4\xf8\x52\
\xef\xe4\x2b\xaf\xdd\xbc\x6d\x7c\xd9\xf9\x20\x17\x56\xba\xa9\x2f\
\x0a\x35\xc3\x7b\x0f\xe7\x1c\x84\x10\x11\x80\xe0\x5c\xf0\xfd\x16\
\xbc\xe7\xe0\x5c\xf0\xed\x4e\x8e\x8b\xcb\xed\xca\x2d\xdb\xa7\xce\
\x39\x17\x9c\xf7\xec\x9c\x63\xe7\x9c\x77\xad\x56\xab\x6d\x8c\xf1\
\x7d\x40\x40\xe7\xb9\x6b\xb5\x5a\xb9\x35\x26\x58\xe7\xc9\x68\x2b\
\x72\xa3\x55\xa6\x4d\x94\x65\x3a\xee\xe5\x26\xc9\xb5\x89\xb2\x5c\
\x63\xd7\x4d\xdb\xbe\xb3\x79\xf3\x8c\xbe\xd6\x21\xdf\xbf\x8a\x6e\
\x08\x00\x00\xc5\x40\xd1\xc2\xc2\x82\xfb\x87\x7f\xef\x07\xbe\x15\
\x18\x6b\x59\x11\x1d\x2c\xf7\xba\x3a\xed\xf5\xf2\x24\xcb\x33\xa5\
\x8d\x15\xd6\x59\x78\x1f\x2e\x0b\x10\xf5\x99\xfd\x5d\x6a\xa1\x3f\
\x96\xc4\x5b\xa7\x46\xe6\x17\x56\xdb\xb5\xcc\x38\x31\x0c\x1a\xad\
\xb5\xcd\xf3\xcc\x7a\x6f\xdd\xa0\x39\x67\x9c\xb1\xda\x5f\x58\x6e\
\x26\x95\x92\x6a\xd5\xab\x51\xcf\x7b\xeb\xbd\xb7\xde\x39\xed\x7a\
\xbd\x5e\x3e\x98\xec\x31\xf8\xbf\x45\xb4\xcf\x93\xf3\x4c\xce\x19\
\xca\x8d\x11\x79\x6e\x54\x96\xe9\xa8\x97\xe9\x44\x6b\x9b\x64\x79\
\xae\x92\x38\x3e\xf6\x03\x1f\x7f\xec\x42\x96\x65\xd7\xcd\xed\xbb\
\x92\x6e\x08\x15\x30\xa0\xbb\xee\xba\x8b\xef\xb8\x63\x5f\x38\x72\
\xfc\xd4\xda\xca\x72\xf3\x6e\x29\xfa\x5e\x80\x20\x17\x45\xca\x29\
\x25\x7d\x24\x65\xb1\x2c\x9b\xb8\x34\x65\x1c\xdf\x2d\xf6\x07\xc4\
\x00\x30\xd1\x28\xf7\x4e\x9e\x5f\x1b\x17\x24\xdc\x48\x25\x32\xc3\
\x00\x19\x24\x5f\x30\x33\x0f\xc4\xfc\xf2\x5a\x57\x1d\x3f\xbb\x3c\
\x7e\xd7\x9e\x99\x39\x41\xf0\xde\x7b\xb6\xd6\x86\x2c\xcb\xac\x73\
\x8e\x87\x41\xd4\xb7\x17\xc8\x39\x27\xac\xf7\x94\x67\x5a\x76\x7b\
\x79\xd4\xe9\x66\x71\xa7\x9b\x95\x7a\x99\x2e\x75\xb3\x3c\xc9\x72\
\xad\x3f\xf2\x3d\x0f\x7f\x75\xf7\xce\xed\xbd\xf7\xbe\xf7\xbd\xd7\
\x5d\xf4\x0f\xe8\x86\x91\x00\x40\x61\x10\xce\xcf\xcf\xfb\x7f\xf0\
\xe3\xdf\x7b\x2c\x8a\xe3\x23\x99\xb1\xe5\x4c\xeb\x72\x96\x9b\xb4\
\xd7\xcb\x93\x5e\x4f\x47\xb9\xb6\xd2\x7b\x27\x9c\xf3\xe4\xbd\xbf\
\xac\xe7\x0f\xab\x83\x7e\xaf\x2c\x82\x40\xcc\x68\x54\x93\xf6\xe9\
\x0b\x2b\x13\xbd\xdc\x88\x2b\x24\x07\xb4\xd6\x2e\xcf\x73\x67\x8c\
\xf1\x59\x96\x87\x93\xe7\x56\x2a\xde\x39\x23\x29\x68\xad\xf5\x60\
\x11\x29\x3b\x50\x1f\x57\x7e\x3e\x84\x00\xeb\x19\xd6\x3a\xca\x73\
\xa3\xb2\x3c\x57\xbd\x2c\x8f\xb3\xbe\xe5\x9f\x65\x5a\x4e\x34\x46\
\x5f\x78\xe8\xdd\xf7\xb7\x1f\x7c\xf0\xc1\x1b\x42\xf4\x0f\xe8\x86\
\x02\x00\x50\x44\x08\x85\x10\xe6\xa1\x77\xdd\xfb\x17\xc6\x58\x68\
\x6d\x4a\x59\xae\xcb\xbd\x4c\x27\x59\x6e\x22\xad\x8d\xb4\xc6\x91\
\x2b\xac\xf0\xef\xb2\x05\x06\x4c\xbf\xd2\x1e\xd8\x31\x5b\x6f\x2e\
\x37\x7b\xe3\xdd\x9e\x8e\x06\x36\xc0\xe0\xde\xfe\x31\x1b\x63\x42\
\xb7\x9b\xe3\xfc\xc5\x66\xbd\x51\x4d\x9a\xd6\xda\x60\x8c\x09\x43\
\xff\x87\x43\x08\x3c\x00\xc2\xba\xe8\x77\x1e\xce\x59\xd2\xda\x88\
\x5c\x1b\x99\x65\x3a\xce\xb5\x89\xb5\xb6\x49\x2f\xd7\x11\x80\x85\
\xff\xf1\xa7\x7f\xf8\xf5\x13\x27\x4e\xdc\x30\xa2\x7f\x40\x37\x1c\
\x00\x06\x06\xe1\x43\xef\xbc\x67\x69\x6c\x74\xe4\x9b\xb9\xb6\x69\
\xae\x6d\xb9\xa7\x75\xa9\x97\xe7\x49\x37\xd7\x51\xa6\xb5\x74\xce\
\xa1\x2f\x05\x68\xb8\xc7\x5f\x1a\x81\xf3\x97\xf5\x52\x49\x08\x3e\
\x04\xd5\xed\x19\xe5\xbd\x87\xf3\x1e\x67\xce\x2f\x97\x9f\x79\xf5\
\xc4\xcc\x89\x33\x8b\xd5\x2c\xd7\xc2\x7b\x8f\xdc\x38\xd1\xed\xe9\
\x4a\x39\x91\xe6\x4a\x23\x73\xc0\xf4\xe1\xe6\x9c\x83\x75\x9e\xac\
\xb1\x94\xe5\xb9\xec\x65\xb9\xea\xe5\x3a\xce\x33\x9b\x68\x6d\x62\
\x9d\x1b\xba\xf5\xe6\x9d\xdf\xda\xb6\x6d\xab\xb9\x51\x0c\xbf\x61\
\xba\xee\x71\x80\xab\xd1\x23\x8f\x3c\xc2\xbf\xfd\xdb\xbf\xed\xfe\
\xe1\x4f\x3d\xf1\xad\x7f\xf1\xab\xbf\x75\xbb\xd6\xb6\xa4\x73\x5b\
\xea\x25\xb9\x4e\x93\xc8\x26\x71\x64\xe3\x48\x49\x21\x84\x13\x85\
\x35\xbf\x1e\x07\x00\x2e\xa5\x8d\x0f\x0a\x2d\x0e\x5a\xf0\x81\xfe\
\xec\x6b\xaf\x7d\xb8\xd3\xcb\xb6\x18\xeb\xc6\x10\x41\x4a\x81\x8c\
\x19\x8a\x9c\xc8\xa3\x48\x35\xcb\x69\x3c\x5f\xaf\xd7\x57\xd3\x58\
\xba\xc1\x52\xb3\xc3\xea\xe5\x0a\xf1\x4f\xce\x05\x18\x67\x49\x1b\
\x2b\xf2\x22\xe0\x13\xe5\xb9\x8e\x73\xab\x93\x5c\x1b\x95\x24\xf1\
\xd1\x1f\xf9\xfe\x0f\x5c\x7c\xe5\x95\x57\xc2\xad\xb7\xde\x7a\xbd\
\x7f\xda\xef\xa2\x1b\xca\x08\x1c\xa6\xbb\xee\xba\x8b\xef\xba\xeb\
\x0e\x7e\xf5\xe0\xd1\x7c\x75\xad\xbd\x4f\x49\xe9\x95\x10\x56\x4a\
\x69\xa3\x48\x3a\x25\x65\x88\x22\x15\x8a\xa5\xda\x2e\x31\xfe\x6a\
\xcf\x22\x22\x7c\xe5\x99\xa3\x37\x1f\x3a\x7e\xfe\x47\xa3\x48\x2c\
\x6f\xdb\x3c\xf6\xed\x89\xe9\xca\xe1\xea\x68\x72\x7a\xdb\xa6\xc6\
\x8b\xe5\x5a\x7c\x6e\x6a\xba\x76\x70\xc7\xcc\xd4\x2b\xd6\x58\x79\
\xee\x62\xf3\xdd\xab\x6b\xad\xb5\x3d\xdb\x26\xe7\xaf\xc6\xfc\x41\
\x8e\x9f\x73\x8e\x6c\x11\xf4\x91\xdd\x2c\x53\x9d\xae\x8e\xbb\xdd\
\xac\xd4\xcd\xf3\x52\xde\xd3\x69\x2f\xd3\xfc\xce\x07\xee\x7a\x72\
\xdb\x96\x99\xee\xc7\x3e\xf6\xb1\x1b\xae\xf7\x03\x37\xa0\x0a\x18\
\xd0\xfe\xfd\xfb\x71\xf4\xe8\x51\xf7\x8f\x7f\xf6\xc7\x5f\x97\x52\
\xce\x6b\x63\x53\x63\x5c\xaa\xb5\x4b\xb4\x36\x91\xb1\x4e\x5a\x63\
\xa5\xbb\x24\xee\xf9\x6a\xbd\x74\x60\x17\x1c\x3c\x76\xe1\xe3\xbb\
\xb7\x8d\xfd\xee\xf7\x3f\x7e\xfb\x67\xdf\xff\xe0\x9e\x57\x93\x2a\
\x65\x8d\x91\x78\x61\xeb\xf4\xc8\xf9\x7a\x2d\x5a\xf4\x64\xd5\x3b\
\xee\xd8\x72\xfa\x23\x0f\xed\xfd\xf6\x03\xfb\x36\xff\xf6\x1b\x67\
\x96\xde\x7f\xa5\x2a\xb9\xd2\xc0\xf4\x3e\xc0\x79\x47\x99\x36\x42\
\xe7\x56\x69\x63\x23\x6d\x6d\x6c\x8d\x8f\x73\xe3\x64\xb5\x5a\x3e\
\x78\xff\x3d\xb7\xad\x3d\xfe\xf8\xe3\xd7\x2c\xcd\xfb\x6f\x4a\x37\
\x2c\x00\x00\xe0\xf4\xe9\xd3\xc1\x18\xa3\x77\xdd\xb4\xe5\x29\x6d\
\x6c\xa4\x8d\x4b\xb5\x31\x71\xa6\x6d\x94\x1b\x23\x8d\x71\x54\x18\
\x61\xdf\xad\xf3\xaf\x64\x18\x88\xfc\xcd\xdb\x26\x4f\xd5\x4a\x89\
\x21\x80\x8d\xb5\xb1\x12\x42\x57\xe2\x38\x17\x04\xdb\x33\xba\x1a\
\x42\x60\x29\x29\x4c\x4d\xd6\x9a\xfd\x24\x91\xab\x3e\x6b\xb0\x6f\
\x5d\x80\xb7\x8e\x8c\x75\x42\x5b\x27\xb5\x31\x05\x08\xb4\x8b\xac\
\x75\xfa\x7b\x3f\xf2\xd8\x4b\x6b\x6b\x6b\xd7\x75\x6d\xe0\xbf\x8e\
\x6e\x68\x00\xec\xdf\xbf\x1f\x2b\x2b\x2b\xfe\xa7\x7e\xf4\xa3\x27\
\xa3\x48\x9d\xd1\xd6\xa6\xd6\xf8\x92\xd1\x2e\x31\xda\x2b\xe3\x9c\
\x0c\x2e\x88\x80\xb0\x3e\xa1\xe4\x4a\x10\x0c\x3c\x02\x02\x3b\x29\
\xe0\xfb\x92\x82\x3d\x07\x8a\x04\xb9\x52\x1c\x1b\x25\xa4\x65\x66\
\xd1\xff\x2c\x0b\x42\x00\x21\xfc\x55\xa0\xea\x7b\x0d\xc4\x08\xe4\
\x3c\x0b\x67\xbd\x74\xd6\x29\x63\x5c\x6c\xad\x8f\xb5\x35\xb2\x56\
\xad\xbc\xb6\xf7\x96\x1d\xdd\xd3\xa7\x4f\xdf\x90\xa2\x7f\x40\x37\
\x34\x00\x80\xc2\x2d\x5c\x58\x58\xb0\x5b\x66\xa7\x9f\x33\xc6\x28\
\xed\x4c\x62\xac\x8d\xb5\x31\x91\xb5\x41\x5a\x1f\x48\x0c\x2d\x15\
\x3a\x48\x25\x1f\xa6\xbe\x81\xe8\x19\x97\x5c\x43\x0e\x0c\x21\x84\
\x8f\x89\x02\x11\x17\x2b\x7b\xf6\xad\x7c\x22\x0a\xfd\x4c\xe1\x4b\
\xf1\xff\x22\xde\x30\x20\x00\x92\x43\x08\x64\x9d\x23\xeb\xbd\xd4\
\xc6\x29\x6b\x7d\x64\x8c\x53\xd6\x38\xfb\xbe\x87\x1e\x38\x38\x33\
\x33\xe3\xaf\xf5\x42\x90\x7f\x53\xba\xe1\x01\x40\x44\xb8\xef\xbe\
\xfb\xfc\xcf\xfc\xc4\xf7\x9d\x20\x12\x4b\xd6\xf8\xd8\x18\x1f\x1b\
\xeb\x22\x6b\x9d\x0a\x21\x50\x60\x08\xe0\x52\x15\xf1\xe1\x41\xa2\
\xa1\x21\x62\xcf\xe2\xd2\xd0\x31\xc0\x4c\xa0\x00\x80\x05\x44\x51\
\x10\x7e\x70\x2d\x14\xf7\x0f\x7a\xfc\xe0\x3d\x86\x6b\x16\x32\x31\
\x81\x01\xe7\xbd\x74\xc6\x4b\x6d\x6d\x64\x9d\x8b\xb4\x75\xa2\x5c\
\x2e\x1d\x79\xdb\x9d\xb7\xf6\x3e\xf3\x99\xcf\xdc\xd0\xbd\x1f\xd8\
\x00\x00\x00\x80\xcf\x7c\xe6\x33\x61\x75\x75\xd5\x4c\x34\x46\x5e\
\xd4\xc6\x2a\xe3\x5c\x5c\xf4\x36\x2b\xad\xf3\xd2\x39\x4f\x81\x2e\
\x2d\x16\x75\xb5\x46\x44\x41\x92\x1c\xa4\x93\xf7\xcb\xff\x33\x5b\
\x80\x69\xbd\x5a\x51\x71\x4d\x92\x08\x83\x75\x08\xaf\x6c\x00\x40\
\x24\x01\x06\xac\x63\xe1\x5c\x10\xda\x59\xe5\x7d\x50\xc6\xd8\xc8\
\x68\xcd\xf7\xdc\x7e\xf3\xc1\xe5\xe5\x65\x77\xa3\xf7\x7e\x60\x83\
\x00\x60\xff\xfe\xfd\xf8\xda\xd7\xbe\x16\xee\xde\x77\xd3\x31\xe7\
\xac\x33\xc6\xc4\xc6\xba\xc8\x38\xaf\xac\x73\x32\x04\x26\x0a\x97\
\x7a\xf0\xd5\xb2\x86\x00\x78\xd9\x9f\x79\x3c\xa8\x21\x84\xf5\x7d\
\x62\xc2\x50\x99\x39\x25\x18\xfd\x5a\x43\x57\x79\xce\xfa\x04\xcf\
\x10\x3c\x59\xe7\x84\xb7\x5e\x5a\xeb\x22\xad\x8d\x04\x78\x3e\x12\
\xba\xfd\x6b\xbf\xf6\x6b\x37\x4c\xbc\xff\x3f\x47\x37\x64\x20\xe8\
\x6a\xf4\xdc\x73\xcf\x85\xed\xdb\xb7\x3b\x01\x9e\x33\xc6\x6c\x75\
\xd6\x45\xd6\x5a\x65\x8c\x17\xd6\x39\x11\x90\x10\x86\x98\x38\x00\
\xc2\x80\x84\xa0\x20\xf0\xdd\x4b\xc9\x15\xc7\x54\x08\x84\xf5\x6b\
\xcc\xfd\x15\x40\xbf\xeb\x3d\x88\x08\x90\x02\x41\x7b\x61\x9c\x93\
\xd6\x05\x69\x83\x97\xc6\x59\x95\xe7\x3a\x92\xe4\xe7\x0e\x1d\x3a\
\x14\x9e\x7e\xfa\xe9\xbf\x03\xc0\x7f\x4b\xfa\xf6\xb7\xbf\xcd\xcc\
\xec\xe2\xca\xf8\xa9\x3c\xc7\x2e\x63\x5d\xe4\x5c\x90\x3e\x78\xe9\
\x83\x27\xf6\x9e\x38\xe9\x97\xda\xbb\x22\x37\xb0\x38\x09\xaf\x86\
\x0a\x4c\x81\x89\x05\x44\x5f\x22\x88\x7e\x89\xd6\xe2\x9a\x52\x8a\
\xd7\xab\x8d\x5d\x8d\x98\xe1\x19\x14\x5c\xa0\xe0\xbd\xf0\xd6\x4b\
\x93\x69\x95\xe9\x8c\xeb\x89\x3b\xff\xca\x2b\x6f\xdc\xb0\x7e\xff\
\x95\xb4\x61\x00\x00\x80\xcf\x9d\x3b\xe7\xb7\xdf\x14\x2d\x38\xae\
\x59\x63\x4c\xec\x82\x57\xce\x06\xe1\x1d\x28\x90\x20\xc2\xa5\x35\
\x81\xaf\xfc\x30\x81\x42\xa4\x44\x18\x5c\x1b\x54\x23\x93\x42\x30\
\x09\x62\xf4\x17\xa2\x02\x00\x25\x45\x20\x81\x70\xb5\xe7\x84\x10\
\x40\xa2\xa8\xe7\xc3\x08\xe4\x98\x85\x75\x5e\x65\x3a\x8f\x83\xd5\
\x0b\x17\x57\x2f\xea\x66\xb3\x79\xc3\x1b\x7f\x03\xda\x10\x36\xc0\
\x80\xb4\xd6\x7e\x79\x69\xc9\x7a\xab\x57\x8d\x35\xb1\xf7\x5e\xfa\
\xe0\x64\x80\x17\xc1\x07\x50\x5f\xec\x5f\x59\x5b\x50\x08\x01\x12\
\xf0\x42\x5d\xaa\x3a\x5a\xd4\x2a\x46\xb1\xd4\xac\x20\x26\xbe\x94\
\x5d\x2c\x23\xc5\xa2\xa8\x3d\x74\xd9\x33\x80\xbe\x01\x18\x40\x00\
\x93\xf7\x10\xec\x82\x30\xd6\xca\x3c\xcf\x23\x67\x7a\xcd\xb5\xb5\
\x35\xb7\xb4\xb4\xb4\x21\xc4\x3f\xb0\xb1\x24\x00\x16\x16\x16\x42\
\x1c\xc7\x3e\x4a\xb3\xb6\x35\x66\xdc\x59\xa7\xbc\x0f\xc2\x5a\x16\
\xc1\x33\x85\x7e\x81\xb6\x2b\xca\xca\x15\xfb\xa0\x10\x29\x15\x2e\
\xa5\x90\x83\x19\xcc\x42\x08\x30\x0a\x15\x30\x58\xb2\x76\xe0\x35\
\x0c\xac\xfe\xc1\x33\x84\x10\x08\x08\xc4\x81\xe1\x3c\xc8\xbb\x40\
\xde\x07\xe1\x9d\x55\x46\xe7\x51\xd6\x6d\xb5\xd6\xd6\xd6\x1c\x70\
\xed\x0a\x3c\xfc\xd7\xd2\x86\x92\x00\x00\x42\x96\x65\xd6\xe8\x6e\
\x4b\xe7\x79\x6c\xad\x8d\x9c\xf3\xca\x07\x2f\x5d\x70\x32\x38\x2f\
\x3c\x8a\x81\x9a\xc1\x07\x2e\x49\x04\xe9\x63\x19\x0d\x2d\x2a\x25\
\x58\x2a\x05\x12\x02\x02\x02\x24\x2e\xad\x31\x24\x0a\xe9\xe0\xae\
\x28\x58\x09\xe7\x82\x08\xc1\x0b\x17\x82\x08\xde\x09\xc7\x41\x5a\
\xef\x44\x96\x65\x91\xd1\x9a\x74\xd6\xee\x84\x10\xd6\x8b\x3e\x6f\
\x04\xda\x50\x12\x00\x00\xb2\x2c\xb3\x59\xb7\xd9\x89\x2b\x63\x91\
\x31\x26\xb6\xce\x4b\xad\xad\xd4\xda\xc9\x3c\xf6\x42\x29\x2f\x28\
\xa6\x20\xd8\x83\xb8\x58\x94\xa9\xf0\xfa\x42\x08\x28\xc6\x0c\x00\
\x0c\x56\x75\xe8\x07\x7a\x42\xbf\x16\x6c\x71\x2d\x30\x93\x28\x4a\
\x14\xa0\x88\x0b\x79\x0a\x00\x5c\xf0\xb0\xd6\x91\x31\x56\xe6\xda\
\x4a\x6b\x9c\xb0\xd6\x46\x79\x9e\xa7\x5a\x67\x5a\x6b\x6d\xae\x65\
\x7d\x9f\xff\x16\xb4\xd1\x00\xc0\xcc\xec\xb2\x2c\x33\x65\x9d\xdb\
\x5c\xe7\x25\x63\x6c\xa2\x73\xad\x7b\x59\xe6\x95\x12\x0c\x62\x4a\
\x5c\xe4\x23\x29\x83\x10\xc5\x82\x53\xdc\xaf\x2f\x68\x83\x17\x8e\
\x95\x80\x07\x10\x40\xc1\x33\x59\xeb\x05\x73\x20\x30\xc3\x05\x16\
\x0c\x86\xf5\x5e\x40\x80\x73\xa3\x15\xf5\xc7\x02\x9c\x0f\x64\x9d\
\x17\x5a\x5b\xd9\xe9\xe9\x28\xcb\x4d\xac\xb5\x8d\xb3\x5c\x27\x59\
\x96\x95\x75\xde\x6b\x6a\xad\x4d\x96\x65\x7f\x07\x80\x37\x91\x38\
\xcb\x32\x17\xc7\xb1\xd6\xba\xe7\xf2\x4c\x57\xb2\x4c\xa7\x4a\x29\
\x4b\x42\xb2\xf7\x4c\xb9\x76\x22\x4d\x23\x47\x80\xec\xf5\xda\xe3\
\x79\x9e\x4f\x38\x63\xc6\x04\xe7\x77\xbc\xf6\xf2\xcb\x92\x98\x23\
\x06\xc7\x3a\xd3\x5b\x2f\xb6\x57\x4d\x73\xee\x8c\xc9\xbd\x57\xc1\
\xba\xfa\x77\x9e\x79\x76\x1b\x11\x19\x06\x59\xc5\x7a\xef\x6b\xaf\
\xbe\xb6\x14\x25\xc9\x72\x5a\x2a\x2d\x25\x71\xa9\xed\x3d\x8b\xdc\
\x68\x99\x65\x3a\xea\x76\xb3\xb4\x97\x65\x69\x96\xe7\xe5\x2c\xcb\
\x2b\x46\x67\x17\xac\xb5\x1a\x1b\x48\xfc\x03\x1b\x0f\x00\x00\xe0\
\x9c\x73\x99\xd5\x59\xd6\xcb\xb2\x2d\xdd\xac\xd7\x15\x82\x67\x4c\
\xde\xd9\xda\x14\xbc\x8b\x88\xb7\x11\x30\x0e\x70\x7d\xbd\x4e\x17\
\x00\x11\x80\xb5\x35\xf3\xe1\x7e\xee\x08\x7c\x60\x68\x57\x2c\xd8\
\x63\x02\x03\x21\xa0\xe5\xdd\xfb\x81\xc1\x7a\x3f\xc0\xe2\xe2\xc2\
\xc7\x80\x7e\xa9\x60\xc0\x02\x58\x66\xd0\x02\x33\x1d\xf3\x4c\x27\
\xad\xc7\x5c\xa7\xdd\x4d\xb3\x3c\x1b\x31\x79\x77\xcd\x18\xa3\x01\
\x6c\x18\x17\x10\xd8\x98\x00\xf0\xd6\xda\x6e\xd6\x6d\x2e\x06\x3f\
\xf3\xbd\x6c\xb3\xbb\x4c\xee\xab\x41\x0e\x16\x8e\x2c\x0a\x41\x8a\
\x7e\xb8\x57\xf4\xab\x44\x86\x40\x70\x6e\x3d\x02\x5c\xd4\x83\xf3\
\x04\x26\xc0\x31\xc3\x83\x61\xd7\x17\x81\x02\x42\x00\xac\xe5\xf5\
\x81\x03\x0e\x88\x18\x98\x09\x21\xcc\x30\xe3\x4e\x1f\x02\x9c\xf3\
\x1c\x5c\x76\xda\x59\xfd\x54\x9e\x75\x9b\xcc\xac\xaf\xf7\x8f\xf3\
\x37\xa5\x8d\x08\x80\xe0\xbd\x6f\xf7\xba\x9d\x95\xbc\xd7\x9a\x59\
\x5e\x15\xd5\xd1\x7a\x1d\x71\x1c\x17\xcb\xc6\x53\xb1\x54\x1b\x89\
\xc1\xc2\x24\x83\x5a\xbd\x04\x63\x2f\x49\x67\xdb\x97\x04\x12\x45\
\xc1\x1f\x0b\x40\xf3\xa0\x8e\x00\xe0\x1c\x21\xef\x23\xa2\x18\x63\
\x00\xc2\xd0\x3e\x03\xd0\xb9\xa6\x6e\xb7\xb7\xdd\xe9\x9e\x36\xc6\
\x2c\xff\x9d\x0a\xb8\x36\xc4\xde\xfb\xcc\x5a\xbb\xba\x7c\xf1\xec\
\x89\xe5\xe5\xe5\x5d\x17\xcb\x23\x18\x6b\x34\xb0\x69\x66\x1a\xb5\
\x91\x1a\x22\x35\x58\x69\x14\x20\x12\x88\x22\x85\x46\x43\x15\xcb\
\xca\x09\x82\x00\x41\x28\x09\x21\x24\xa4\x94\x88\xe3\x18\x51\x1c\
\xf7\x81\x53\x8c\x0d\x48\x21\x11\xc5\x11\x88\x44\xb1\xd8\x21\x03\
\xc6\x18\x2c\x2c\x2e\xe2\xe0\xc1\x83\x38\x79\xf2\x24\x7a\xdd\x2e\
\x26\xc7\x47\x7a\x8b\x17\xcf\x3e\x67\xad\x5d\x41\x81\xa5\x0d\x45\
\x1b\x11\x00\x40\xb1\xd4\xac\x66\xf6\xc1\xf6\x56\x10\x91\x41\x32\
\x59\x43\xb5\x12\xa3\x56\x49\x11\x18\x30\xc6\xc1\x58\x8f\x62\xd1\
\x10\x09\x12\x80\x77\x01\xbd\x4e\x07\xd6\x39\x44\x91\x42\x1c\x27\
\x48\xd2\x04\x49\xec\x21\xa5\xe9\xa7\x78\x3b\x90\x10\x68\x8c\x36\
\x50\x53\x31\x40\x80\x14\x02\xe5\x52\x8a\x5a\xad\x06\x67\x0d\xca\
\x89\x02\xdb\x2e\xf2\xf6\x3c\xa8\x11\x07\xef\xbd\xee\xaf\x03\xb4\
\xa1\x7a\x3f\xb0\x71\x01\xd0\x1f\xed\xeb\x2f\xb1\x14\x02\x62\x19\
\x30\x33\x5e\xc1\xb6\xad\x9b\x90\xa6\x49\xbf\x5a\x38\xc3\x39\x86\
\x8a\x12\x80\x04\xce\x9c\x3a\x83\x73\x27\xcf\xa2\xd5\xea\xc0\xf9\
\x80\xc0\x97\x02\x45\x52\x16\x01\xa0\x48\x49\xdc\x79\xd7\x1d\xf8\
\xe0\xe3\xef\xc6\xee\x3d\x7b\x30\x3a\x52\x43\xb9\x5c\x02\x33\xa3\
\xd5\x6a\xe1\x3b\xdf\xf9\x0e\xce\x9c\x38\xd4\x2f\x2f\xb4\xf1\x69\
\xc3\x02\x00\xeb\x2a\x99\x11\xd8\xc3\x68\x8d\x3c\xcf\xe1\x9c\x05\
\x73\x5c\xa8\x00\x10\xd2\x58\xa1\x5c\x49\x21\x84\xc2\x59\x18\xe4\
\xdd\x26\xf2\x5e\x07\x46\x5b\x18\xab\xa1\x8d\x83\xf3\x0e\x60\x20\
\x8a\x22\x54\x2b\x15\x38\xb3\x07\xe3\x63\x75\x4c\x4d\x8c\x41\xa9\
\x4b\x3f\x91\x94\x12\xa5\x52\x09\x23\x23\x35\x94\x4a\xe9\xf0\x94\
\xf4\x1b\x37\xeb\xf3\xaf\xa1\x1b\x76\x5e\xc0\x5f\x47\xcc\x5c\xaa\
\xd7\xeb\x93\x42\x88\x3b\x83\xf7\xa5\x28\x8e\x50\xab\x55\x51\xaf\
\x8f\x20\x4d\x53\x08\x29\x40\x20\x08\x12\x88\x92\x18\x51\x14\xc1\
\x1a\x8b\xf9\xf9\x8b\x68\xf7\xd5\x80\xb5\x0e\xd6\x15\xd3\xc4\x98\
\x8b\x65\xa2\x04\x09\x58\x6b\x90\x26\x29\xa6\xa7\x67\x50\xa9\x54\
\xd6\x57\x2f\x0f\x21\x20\xcf\x73\x2c\x2c\x2c\xe0\xdc\xb9\x73\x68\
\x36\x57\x51\xad\x56\xcd\xf2\xf2\xca\x9f\x79\xef\x8f\x03\xc8\xae\
\xf7\xef\xf2\x37\xa5\x8d\x2a\x01\x18\x40\x67\x79\x79\xe5\xc8\xe8\
\xe8\xc8\x51\x06\xde\xee\x9c\x43\x9e\x6b\x68\x63\xfb\x13\x42\x71\
\x59\xbf\x94\x4a\x61\xdb\xf6\xed\xb8\xb3\xd3\x06\x83\xb1\xbc\xb4\
\x02\x5b\x4a\xe1\x9d\x07\x83\x21\x84\x44\x14\x45\x28\x95\x4a\xa8\
\x55\x2b\x58\x58\xb8\x88\xa5\xa5\x45\x4c\x4c\x4c\x20\x8a\x8a\x3c\
\x83\x7e\x11\x28\xd4\xeb\x75\x8c\x8c\x8c\xf4\xcf\xd3\x77\xd7\xaa\
\xdb\x40\xb4\x51\x01\x20\x84\x10\x8d\xf1\xf1\xc6\x3d\xde\xfb\xbb\
\x95\x52\x60\x06\x56\x56\x57\x71\xec\xf8\x31\xcc\x5f\x9c\x47\x12\
\xc5\x50\x91\x42\x9a\x96\x30\x32\x32\x82\x7a\x7d\x04\xf5\x91\x51\
\xec\xdc\xb9\x0b\x69\x92\xe2\xcc\x99\x33\x30\x5a\x43\x08\x81\x24\
\x4d\x51\xaf\x8f\xa2\x31\x36\x86\x89\x89\x49\x4c\x4f\x4f\x63\xf3\
\xe6\x59\x6c\xd9\xb6\x15\x2a\x52\x85\x71\x68\x0d\x96\x96\x96\x70\
\xfc\xf8\x31\x1c\x3f\x7e\x1c\x2b\x2b\xcb\x70\xce\x21\x4d\x93\x55\
\x66\xee\x62\x03\x7a\x00\xc0\xc6\x05\x80\x02\x30\x26\x88\x3e\xda\
\xd3\x3a\xa9\x56\x2a\xa8\x56\x2b\x98\x9a\x9a\xc4\x96\xd9\x59\x8c\
\x8e\xd6\x91\xa6\x29\x22\x15\x41\x29\x85\x4a\xa5\x82\x4a\xb5\x8a\
\x6a\xa5\x86\x24\x4d\x30\xda\x68\x60\xcb\xd6\xad\xf0\xce\x23\x70\
\x51\x4b\x68\x6c\x6c\x02\xb3\x5b\x66\x31\x3e\x3e\x81\x28\x8e\xc1\
\x21\xc0\x68\x8d\xb3\x67\xcf\xa0\xd7\xeb\xa1\xb9\xda\xc4\x89\x93\
\x27\xf0\xfc\x81\x03\x78\xe5\x95\x57\x30\x77\x6e\x0e\x8d\x46\x23\
\x7f\xed\xb5\x43\xff\xc1\x7b\x7f\x1e\x40\x8e\xbf\xf3\x02\xae\x19\
\x05\x00\x3d\x10\xbd\xce\xcc\x37\x3b\xe7\xd0\x6a\xb5\x31\x37\x77\
\x0e\xbd\x5e\x86\xd1\xfa\x08\xca\xe5\x32\xd2\x34\x41\x9a\xa6\x18\
\x19\xa9\xa3\x5e\xaf\x23\x1f\xc9\xe1\xac\xc3\xc9\x53\x27\xb1\xb4\
\xb0\x84\x5e\x9e\xa3\xd3\xe9\xa0\x5a\xad\xe2\x23\x1f\xf9\x28\x66\
\x36\x6d\x46\xb5\x52\x85\x75\x16\x79\x9e\x21\xcb\x33\x68\xad\xd7\
\x6d\x84\x10\x02\xb4\x31\xe8\xf6\xba\xc8\xb2\x1c\x21\x70\x34\x35\
\x35\x39\x32\x37\x77\x2e\xc6\xc6\x1b\x5a\x07\xb0\x71\x01\xe0\x42\
\x08\x17\x97\x97\x57\x7f\x7f\xac\x51\xdf\x9b\xe5\xf9\x9e\x34\x78\
\x94\xcb\x25\x6c\x99\xdd\x84\x99\x99\x19\xd4\xaa\x55\x44\x51\x84\
\x28\x8a\x8b\xde\x5f\xad\x42\x0a\x89\xc3\xaf\xbf\x8e\x23\xaf\x1f\
\xc6\xe2\xe2\x02\x9a\x6b\x6d\x74\xba\x3d\x4c\x4c\x4e\x60\x71\x69\
\x09\xbd\x6e\x0f\xa3\xa3\x0d\x54\x6b\x35\x44\x51\xb4\x5e\x12\xce\
\x5a\x8b\xd5\xd5\x55\x18\x6b\x71\xf4\xe8\x51\x54\x2a\x55\x24\x71\
\x8c\x76\xbb\x2d\x27\x27\x27\x7f\x5a\x4a\x79\xc0\x7b\x7f\x18\x85\
\x11\xb8\xa1\xa4\xc0\x46\x05\x00\x50\x94\x82\x63\x06\xba\x45\xa1\
\x26\x8d\x66\x73\x0d\xf3\xf3\x0b\x90\x52\x01\x20\x8c\xd4\xaa\x10\
\x52\x0e\xd5\x0b\x00\xac\x35\x10\x82\x90\x24\x29\x46\x46\x08\x23\
\xf5\x3a\xc6\xc6\xc6\xb0\xb4\xb8\x88\xe3\xc7\x8f\x21\x8a\x14\x2a\
\xd5\x2a\x4a\x69\x09\xbd\xac\x87\xd5\xd5\x15\xac\xae\xae\x62\x65\
\x79\x19\x73\x73\x67\x91\xf5\x7a\x28\xa5\x29\x2a\xd5\x2a\x72\xad\
\xa1\xb5\x2e\x03\x88\xf0\x77\x12\xe0\xda\xbe\xb7\x10\x62\x7a\x7c\
\x7c\xec\xa3\x59\xd6\xbb\x7b\x90\xe3\xd7\xe9\xf6\x70\xfe\xc2\x05\
\xe4\x5a\xa3\xb9\xda\x44\x7d\xb4\x8e\x4a\xa5\x82\x7a\xbd\x50\x01\
\xb5\xda\x08\x66\x36\x6d\x42\x14\x27\x70\xd6\x22\xee\x1b\x7f\x13\
\x13\x93\x18\x9f\x98\xc4\xd8\x58\xe1\xf7\x13\x01\x9d\x6e\x1b\x79\
\x9e\x83\x48\x60\x74\x74\x14\x49\x9c\xc0\x58\x87\x73\xe7\x2e\x20\
\x8a\xa2\x41\x79\x5a\x94\xcb\xd5\x3f\x0c\x61\x7e\x0e\x40\x0f\x1b\
\xac\xf7\x03\x1b\x17\x00\x02\x40\x95\x08\xf7\x7a\xef\x91\xc4\x31\
\x2a\x95\x32\x36\xcf\xcc\x60\xcb\x96\x2d\x98\x99\x99\x42\x63\xb4\
\x81\x52\x39\x45\x12\x27\xa8\x54\xab\x28\x97\x2b\xa8\x54\xaa\x05\
\x43\x1b\xe3\x58\x5e\x5a\x42\xbb\xdb\xc5\x85\xf9\x79\x9c\x38\x79\
\x12\x49\x9c\xe0\x1d\x0f\x3e\x88\xd9\xd9\x59\x34\x1a\x63\x20\x22\
\x68\x9d\x23\xcb\x32\xf4\x7a\x3d\x18\x63\x60\xb4\xc6\x5a\x6b\x0d\
\xab\xcd\x26\x7a\xbd\x2e\xa6\xa7\xa7\xbb\x47\x8f\x1e\x7f\xde\x39\
\xb7\x80\x62\x2d\x82\x0d\x47\x1b\x15\x00\x9e\x88\xd6\xac\x75\x4f\
\xc7\x71\xbc\x0b\x04\xb2\xc6\xa2\xd9\x5c\x03\x09\x42\xaf\xd7\x45\
\xad\xba\x88\x52\xb9\x8c\x72\xa9\x70\x03\x47\xea\x75\x54\xca\x15\
\x2c\xaf\x2c\xe3\xf0\xe1\xa3\x98\x9b\x9b\xc3\xea\x6a\x13\x9d\x6e\
\x07\x1c\x18\x7b\xf7\xee\xc3\x23\x8f\x3e\x0a\xa5\x14\x5a\xad\x35\
\x78\xef\x10\x42\xb1\x66\x60\x1c\x45\xa8\xd5\x46\x30\x36\x3e\x81\
\x7a\xbd\x8e\x28\x52\xf0\x3e\xe0\xc2\x85\x0b\xe5\xad\x5b\xb7\xdc\
\x7e\xf2\xe4\xa9\xe7\x9c\x73\x27\x51\x0c\x2a\x6e\x28\x29\xb0\x61\
\x01\xe0\xbd\x5f\x6c\xb5\xda\x4f\xd5\xeb\xb5\x3b\xad\xb5\x0f\x30\
\x02\xd2\x72\x09\x33\x33\x33\xd8\x32\xbb\x19\x8d\xd1\x51\xa4\x49\
\x0a\x15\x47\xa8\xf6\x25\x40\x9e\xe7\x78\xe3\xf8\x71\x1c\x3d\x72\
\x18\x6b\xad\x36\x7a\xbd\x0c\xda\x18\x28\x29\xb1\xb2\xb2\x82\xd7\
\x5e\x7d\x15\x5b\xb6\x6c\xc1\xe6\xcd\x9b\x51\xaf\xd7\xa1\xb5\xc6\
\xda\xda\x1a\x9a\xcd\x26\x96\x96\x16\x71\xfe\xfc\x1c\xba\x9d\x0e\
\x88\x08\x51\x14\xa1\xd7\xeb\x51\xaf\xd7\xfd\x11\x21\xc4\x93\x00\
\x5e\x05\xd0\xbd\xde\x3f\xcc\xdf\x94\x36\x2a\x00\x48\x08\x91\x48\
\x29\xab\x52\xca\xcd\x5a\x6b\x18\xe3\xd0\x6c\xae\xe2\xec\x59\x09\
\xad\x0d\x26\xc6\xc6\x50\xa9\x94\x91\xa4\x09\x2a\x95\x0a\xaa\xd5\
\x2a\x88\x8a\xa1\xdf\x91\xda\x08\xa4\x54\x18\x6b\x8c\xa1\x54\x2e\
\x63\x6a\x6a\x0a\x37\xdf\xbc\x07\x9b\x37\x6f\x86\x35\xa6\x6f\x34\
\x06\x48\xa9\xd0\x68\x34\x50\xad\xd6\xfa\x00\x32\x38\x7f\xee\x3c\
\xaa\xd5\x1a\x94\x54\x20\x22\xa4\x69\xc9\xa2\x08\x02\x6d\xa8\x9e\
\x3f\xa0\x8d\x0a\x00\x05\x60\xba\xd1\xa8\xff\x54\xab\xd5\xda\x22\
\x84\x00\xd6\xb3\x79\x08\x51\xa4\x50\x2a\x97\x50\x1f\x19\x41\xb9\
\x52\xc1\x68\xa3\x8e\x5a\xad\x8e\x52\x5a\xc2\xe4\xc4\x04\x76\xef\
\xbe\x19\xc6\xe8\xc2\x13\x18\x6d\x60\x72\x62\x02\x3b\x77\xed\xc2\
\xd6\x6d\xdb\x50\x2e\x57\x50\x44\x16\x8b\xe4\x0f\x6b\x0d\xb2\x2c\
\xc3\xca\xca\x32\xce\x9f\x9f\xc3\xdc\xb9\x39\xac\xac\xac\x40\x1b\
\x8d\x6a\xb5\xea\x85\x90\xbf\x11\x42\x38\x8d\xbf\x33\x02\xaf\x29\
\x11\x00\x72\xce\x27\x83\x13\x42\x0a\x54\x2a\x15\x4c\x8c\x35\x30\
\x31\x3e\x51\x0c\x0a\x95\x52\x48\x29\xd1\x8f\xd7\x83\x48\xa0\x52\
\xad\xa0\x3e\x5a\xc7\xd2\xd2\x12\x16\x97\x16\x71\xe2\xc4\x09\x8c\
\x36\x1a\xd8\xba\x7d\x3b\xd2\xb4\x04\x6b\x0d\x9a\xcd\x55\x64\xbd\
\x1e\x3a\xdd\x0e\x3a\xed\x36\x3a\x9d\x2e\xce\x9f\x3f\x87\x83\x07\
\x5f\xc3\xf1\x63\x47\xb1\xb8\xb0\x80\x3c\xcf\x31\x35\x35\xa5\x4f\
\x9d\x3a\x33\xc7\xcc\x6d\x6c\xd0\x50\xf0\x86\x1d\x0d\x24\x22\x99\
\xa6\xa9\x51\x4a\xdd\x1f\xbc\x2f\x2b\xa9\xa0\x84\x80\x90\x02\xcc\
\x0c\x63\x8a\x9e\x9b\xe7\x1a\xd6\x5a\x38\xe7\xd0\xeb\xf6\x70\xe4\
\xc8\x61\x3c\xf7\xdc\x77\xf0\xf2\x4b\x2f\xe3\xb5\x57\x5f\xc3\xe1\
\x23\x47\xd1\x6e\xb7\xb1\x67\xcf\x2d\xd8\x7e\xd3\x4d\x90\x42\x20\
\x70\x80\x20\x81\x24\x4e\x50\x1f\xad\x63\x64\xa4\x0e\xa9\x14\x96\
\x96\x96\x70\xea\xf4\x69\x2c\x2e\x2d\x21\xcf\x73\x34\x9b\xcd\x68\
\xcf\x9e\x3d\xb5\xc5\xc5\xa5\x97\x42\x08\x17\x00\x6c\xb8\x9c\xc0\
\x8d\x0a\x00\x66\x66\xa3\xb5\xb1\x63\x63\x8d\xad\xd6\xf9\x5b\x4b\
\xe5\x14\xd3\xd3\x93\xd8\xb5\x73\x17\xf6\xec\xde\x85\xd9\x2d\xb3\
\x98\x98\x18\xc7\xd8\x68\x03\xe3\x13\x93\x7d\xd7\x0e\x38\x7e\xec\
\x18\xe6\xce\xce\xa1\xd3\xe9\x42\x1b\x03\x06\x10\x45\x31\xd2\x24\
\xc1\xcc\xcc\x26\x4c\x4f\x4f\x23\x4d\x53\xc4\x49\x02\x06\x23\xcf\
\x35\x3a\x9d\x36\x16\x16\x16\x70\xe1\xc2\x79\x2c\x2f\x2f\xa1\xdb\
\xed\xac\x83\xca\x39\xbb\xb9\xd7\xcb\xbe\x1c\x42\x38\x81\x42\x0d\
\x6c\x28\xda\xa8\x2a\x80\x01\xe4\xde\xfb\x93\x2b\x2b\xcd\xdf\xdb\
\xb6\x6d\xf6\x2e\x02\x76\x08\x92\x68\x77\xda\x98\xbf\x38\x8f\x56\
\xbb\x85\x24\x4e\x90\x24\x09\x6a\x5a\xc3\x18\x03\xa5\x14\xa6\xa6\
\xa6\xe0\xbc\x87\x20\x89\x72\xb5\x8a\xd1\xd1\x06\x26\x26\x26\x30\
\xb3\x79\x33\xac\xb3\xe8\x74\x3a\x00\x01\xce\x39\x18\x53\xa4\x89\
\x15\xb3\x86\x02\x00\x81\x52\xa9\x82\x91\x91\x51\x84\x00\x34\x1a\
\x0d\x23\xa5\xfa\xea\xc5\x8b\x8b\x1b\xb2\xf7\x03\x1b\x38\x93\xa5\
\x4f\xb1\x10\x62\x5b\xad\x56\x7b\xe8\xb6\x5b\x6f\xfe\xb9\xcd\x9b\
\x37\xdd\xbd\x6d\xeb\x16\x4c\x8c\x8f\x21\x8a\xe2\x22\x3d\x5c\x4a\
\x94\xcb\x65\x94\x4b\x15\x24\x69\x8a\xc0\x0c\x6b\x0c\x7a\xbd\x1c\
\xad\x76\x1b\xcd\xb5\x35\xac\xac\xac\xe0\xe2\xc5\x05\x8c\x8f\x8f\
\xe3\xce\x3b\xef\xc4\xea\xca\x0a\x96\x57\x96\xb1\xb2\xb2\x82\xb5\
\xb5\x16\x3a\x9d\x36\xb4\x36\xeb\xf5\x85\x6b\xb5\x1a\x94\x92\x2f\
\x3c\xff\xfc\x0b\xbf\xd9\xed\x76\x0f\x3a\xe7\x0e\x03\x58\xc0\x06\
\xb4\x03\x36\x3a\x00\x08\x40\x22\x84\xd8\x12\xc7\xf1\xbd\x52\xca\
\xff\x3e\x4d\xd3\x87\x07\x6e\x5f\xb9\x5c\x46\x12\x17\xd9\x40\x81\
\x0b\xd7\x8e\x39\xc0\xb9\x4b\x0b\x47\x0c\xe6\x03\x02\x97\x17\x96\
\xb8\x72\x3b\xa8\x0b\x6c\x8c\xf1\xad\x56\xeb\x4f\xd6\xd6\xd6\x7e\
\xd7\x18\xf3\x72\x08\x61\x1e\x85\xe8\xdf\x70\xcc\x07\x36\xae\x0a\
\x18\x10\x03\xc8\x43\x08\x67\x8c\x31\x26\x8a\x22\xef\x9c\x63\x6b\
\xed\x23\x79\x9e\xaf\x57\x0c\x93\xaa\x18\x1c\xb2\xd6\x42\xeb\xc2\
\x28\xb4\xd6\xae\x17\x97\xa4\xa2\x38\x04\x94\x52\x88\xa2\x08\x71\
\x1c\x23\x8e\x63\x28\xa5\xa0\xb5\xee\x1b\x93\x39\x7a\xbd\x9e\xee\
\x74\x3a\xff\xb1\xd7\xeb\xfd\xb1\x31\xe6\x45\x00\x8b\xd8\x80\xd1\
\xbf\x61\xda\xa8\x46\xe0\x95\xe4\x99\xb9\xc7\xcc\x2d\x66\x3e\xaf\
\x94\x9a\x8c\xa2\x68\x47\xbf\x5a\xc8\x3a\x73\x87\x57\x14\x1b\xee\
\xe1\xc3\xf7\x14\x43\xc8\x11\x92\x24\xe9\xc7\x01\xec\x00\x38\x9d\
\x56\xab\xf5\x6f\xbb\xdd\xee\x9f\x58\x6b\x87\x99\xbf\xa1\xe9\xad\
\x02\x00\x00\x08\x03\x10\x38\xe7\xce\xc7\x71\x7c\x19\x08\x8a\x51\
\xbe\xf5\x55\xc5\x00\x60\xbd\x92\x88\x94\x72\x9d\xf1\x03\x10\x08\
\x21\x60\xad\x85\x31\x06\x5a\xeb\xe5\xe5\xe5\xe5\x7f\xd3\xe9\x74\
\xbe\x68\xad\x7d\x19\xc0\x86\x9c\x04\x72\x35\x7a\x2b\x01\x00\x18\
\x02\x81\x31\xe6\x7c\xa9\x54\x1a\x9f\x9c\x18\xdf\x1c\x98\x15\x80\
\xf5\x14\xef\xe1\xa2\x8f\x42\x08\x28\xa5\xd6\xdb\x40\xf4\x13\x11\
\x46\x46\x46\x1c\x33\x9f\x3d\x71\xe2\xc4\xff\xdd\xe9\x74\x9e\x74\
\xce\x1d\x04\xd0\x44\xb1\xb2\xf8\x5b\x82\x36\xba\x0d\x70\x35\xb2\
\x21\x84\x0b\xd6\xda\xef\xcc\xcf\xcf\xe3\x03\xef\x7f\x14\x6f\x7f\
\xfb\xfd\x8f\x26\x49\x6a\x93\x24\x45\x92\xa4\x14\x45\x91\x50\x51\
\x24\xa2\xa8\x98\x19\x34\x52\xab\xa1\x36\x52\x0f\x69\x9a\x86\x38\
\x8e\xb9\xaf\x02\xd4\xb9\x73\xe7\x16\x3e\xf4\xa1\x0f\xfd\x5a\xb7\
\xdb\x7d\xda\x39\x77\x04\x40\x1b\x1b\x6c\xf6\xef\x5f\x47\x6f\x35\
\x09\x30\xa0\x75\x49\xf0\xea\x6b\x07\xcf\x6f\xdd\x32\x3b\xbe\x6d\
\xdb\xb6\xdd\xe5\x72\x59\x56\x2a\x15\x51\x1b\xa9\x53\xa3\x51\xf8\
\xff\x93\x53\x53\x98\x9c\x9c\x44\xa3\x31\x46\x95\x4a\x45\x24\x49\
\x22\x95\x52\xf2\xdc\xb9\x73\x17\x3e\xf8\xc1\x0f\xfe\xca\xd1\xa3\
\x47\x9f\xb6\xd6\x1e\x06\xd0\xc1\x5b\x8c\xf9\xc0\x5b\x17\x00\xc0\
\x10\x08\x5e\x7c\xe9\xe5\xf3\xdb\xb7\x6f\x9b\x9a\x9c\x98\xb8\x49\
\x48\x85\x28\x8e\x8a\x68\x5f\x7f\x52\x68\x1c\xc7\x00\x11\xf2\x7e\
\x92\xe8\xa9\x53\xa7\x2e\x7c\xec\x63\x1f\xfb\x95\x63\xc7\x8e\xbd\
\xa5\x99\x0f\x6c\xfc\x38\xc0\x7f\x09\x45\x42\x88\x19\xa5\xd4\x1d\
\x3b\x76\xec\x78\xfb\xcc\xcc\xcc\xae\x24\x49\x6a\x51\x14\x55\x2a\
\xe5\x72\x49\x45\x91\xec\x74\x3a\x99\xd6\x3a\xb3\xd6\x76\xd7\xd6\
\xd6\x2e\x1e\x3b\x76\xec\xc5\x2c\xcb\x5e\xb4\xd6\x1e\xc7\x5b\x98\
\xf9\xc0\xdf\x0e\x00\x00\x85\xad\x53\x53\x4a\x8d\x31\xf3\x08\x80\
\x84\x88\x22\x22\x52\x44\x44\xfd\xca\x5e\x36\x84\x60\x89\xa8\x07\
\x60\xd5\x7b\xdf\x44\x91\xeb\xff\x96\x65\x3e\xf0\xb7\x07\x00\x40\
\x91\x47\x38\x68\x74\x45\x1b\x30\x39\xa0\x08\xea\x0c\x4a\xbd\x6d\
\xd8\x00\xcf\x7f\x29\xfd\x7f\x10\xf8\xf3\x4c\xcc\x6e\xed\x5f\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x05\
\x00\x6f\xa6\x53\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x1e\
\x01\xea\x7e\xa7\
\x00\x47\
\x00\x6e\x00\x6f\x00\x6d\x00\x65\x00\x2d\x00\x4d\x00\x65\x00\x64\x00\x69\x00\x61\x00\x2d\x00\x50\x00\x6c\x00\x61\x00\x79\x00\x62\
\x00\x61\x00\x63\x00\x6b\x00\x2d\x00\x53\x00\x74\x00\x61\x00\x72\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x0f\xe3\xd5\x67\
\x00\x64\
\x00\x6f\x00\x63\x00\x75\x00\x6d\x00\x65\x00\x6e\x00\x74\x00\x2d\x00\x73\x00\x61\x00\x76\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x09\
\x06\x98\x83\x27\
\x00\x63\
\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x01\xa6\xc4\x87\
\x00\x64\
\x00\x6f\x00\x63\x00\x75\x00\x6d\x00\x65\x00\x6e\x00\x74\x00\x2d\x00\x6f\x00\x70\x00\x65\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x0a\
\x05\xab\x56\x47\
\x00\x70\
\x00\x79\x00\x71\x00\x6f\x00\x64\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x14\
\x0d\x9a\x02\xe7\
\x00\x47\
\x00\x6e\x00\x6f\x00\x6d\x00\x65\x00\x2d\x00\x53\x00\x79\x00\x73\x00\x74\x00\x65\x00\x6d\x00\x2d\x00\x52\x00\x75\x00\x6e\x00\x2e\
\x00\x70\x00\x6e\x00\x67\
\x00\x10\
\x0c\xbc\x2e\x67\
\x00\x64\
\x00\x6f\x00\x63\x00\x75\x00\x6d\x00\x65\x00\x6e\x00\x74\x00\x2d\x00\x6e\x00\x65\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x14\
\x0b\xa9\xab\x27\
\x00\x64\
\x00\x6f\x00\x63\x00\x75\x00\x6d\x00\x65\x00\x6e\x00\x74\x00\x2d\x00\x73\x00\x61\x00\x76\x00\x65\x00\x2d\x00\x61\x00\x73\x00\x2e\
\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x06\xc7\x98\x67\
\x00\x61\
\x00\x62\x00\x6f\x00\x75\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x09\x00\x00\x00\x02\
\x00\x00\x00\x92\x00\x00\x00\x00\x00\x01\x00\x00\x15\xb4\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\xba\x00\x00\x00\x00\x00\x01\x00\x00\x1c\x49\
\x00\x00\x00\x7a\x00\x00\x00\x00\x00\x01\x00\x00\x09\x20\
\x00\x00\x01\x56\x00\x00\x00\x00\x00\x01\x00\x00\x86\x20\
\x00\x00\x01\x28\x00\x00\x00\x00\x00\x01\x00\x00\x7b\x61\
\x00\x00\x01\x02\x00\x00\x00\x00\x00\x01\x00\x00\x71\x4f\
\x00\x00\x00\xd4\x00\x00\x00\x00\x00\x01\x00\x00\x5d\x0f\
\x00\x00\x00\x52\x00\x00\x00\x00\x00\x01\x00\x00\x04\xbf\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
mwx1993/TACTIC
|
refs/heads/master
|
src/client/tactic_client_lib/scm/scm_impl.py
|
7
|
############################################################
#
# Copyright (c) 2012, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
__all__ = ['ScmImpl', 'ScmException']
import os, shutil
class ScmException(Exception):
pass
class ScmImpl(object):
def __init__(my, **kwargs):
my.kwargs = kwargs
my.user = kwargs.get('user')
my.password = kwargs.get('password')
my.tag = kwargs.get('tab')
my.branch = kwargs.get('branch')
my.trunk = kwargs.get('trunk')
my.root = kwargs.get('root')
my.sync_dir = kwargs.get("sync_dir")
my.log = []
def set_root(my, root):
'''set the root of the server depot. Each scm will specify it's
own root. ie: SVN uses URLs'''
def set_branch(my, branch):
'''set the current branch that is being worked on'''
my.branch = branch
def set_sync_dir(my, sync_dir):
'''set the absolute base directory of the sync (or workspace or
sandbox)'''
my.sync_dir = sync_dir
def get_log(my):
return my.log
def checkout(my, repo_dir, sync_dir, depth=None):
'''Method to check out some root from the repository to a destination
directory
@params
repo_dir: directory in the repo to checkout relative to the root
sync_dir: the directory to check these files out to
depth: ??? (empty)
'''
pass
def commit(my, sync_path):
'''Method to check-in a list of files
@params:
'''
pass
#
# Higher level functions
#
def deliver_file(my, src_path, repo_path):
repo_dir = os.path.dirname(repo_path)
sync_path = "%s/%s" % (my.sync_dir, repo_path)
sync_dir = os.path.dirname(sync_path)
# Trick to checkout a single file. Not sure if this works
my.checkout(repo_dir, sync_dir, depth="empty")
try:
my.export(repo_path, sync_path)
except Exception, e:
print "WARNING: ", e
exists = False
else:
exists = os.path.exists( sync_path )
if exists:
return
# create a dummy file
shutil.copy(src_path, sync_path)
# if it doesn't exist, add it
if not exists:
print "--> add"
my.add(sync_path)
else:
print "--> update"
my.update(sync_path)
my.commit(sync_path, "this is a test")
|
singlebrook/AWS-ElasticBeanstalk-CLI
|
refs/heads/master
|
eb/macosx/python2.7/lib/aws/requests/packages/charade/euctwprober.py
|
2993
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCTWDistributionAnalysis
from .mbcssm import EUCTWSMModel
class EUCTWProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(EUCTWSMModel)
self._mDistributionAnalyzer = EUCTWDistributionAnalysis()
self.reset()
def get_charset_name(self):
return "EUC-TW"
|
sjdv1982/seamless
|
refs/heads/master
|
seamless/highlevel/library/include.py
|
1
|
from copy import deepcopy
from inspect import Signature, Parameter
from collections import OrderedDict
class IncludedLibraryContainer:
def __init__(self, ctx, path):
assert isinstance(ctx, Context)
self._ctx = ctx
self._path = path
def __dir__(self):
ctx = self._ctx
libs = ctx._get_libs(self._path)
attrs = set([p[0] for p in libs])
return attrs
def __getattr__(self, attr):
attr2 = (attr,)
ctx = self._ctx
libs = ctx._get_libs(self._path)
if attr2 in libs:
lib = libs[attr2].copy()
lib["path"] = self._path + attr2
return IncludedLibrary(
ctx=ctx,
**lib
)
attrs = set([p[0] for p in libs])
if attr in attrs:
return IncludedLibraryContainer(
self._ctx,
self._path + attr2
)
else:
raise AttributeError(attr)
class IncludedLibrary:
def __init__(self, ctx, path, graph, constructor, params, **kwargs):
self._ctx = ctx
self._path = path
self._graph = graph
self._constructor = constructor
self._params = OrderedDict(params)
identifier = ".".join(self._path)
cached_compile(self._constructor, identifier) # just to validate
def __call__(self, **kwargs):
kwargs2 = kwargs.copy()
params = list(self._params.items())
for n in range(len(params)):
k,v = params[n]
if k in kwargs2:
continue
default = v.get("default")
if default is not None:
kwargs2[k] = default
arguments = {}
for argname, argvalue in kwargs2.items():
par = self._params[argname]
arguments[argname] = parse_argument(argname, argvalue, par)
libinstance = LibInstance(self._ctx, libpath=self._path, arguments=arguments)
return libinstance
from ...core.cached_compile import cached_compile
from .libinstance import LibInstance
from .argument import parse_argument
from ..Context import Context
|
mlufei/depot_tools
|
refs/heads/master
|
third_party/logilab/astroid/bases.py
|
58
|
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""This module contains base classes and functions for the nodes and some
inference utils.
"""
__docformat__ = "restructuredtext en"
import sys
from contextlib import contextmanager
from logilab.common.decorators import cachedproperty
from astroid.exceptions import (InferenceError, AstroidError, NotFoundError,
UnresolvableName, UseInferenceDefault)
if sys.version_info >= (3, 0):
BUILTINS = 'builtins'
else:
BUILTINS = '__builtin__'
class Proxy(object):
"""a simple proxy object"""
_proxied = None # proxied object may be set by class or by instance
def __init__(self, proxied=None):
if proxied is not None:
self._proxied = proxied
def __getattr__(self, name):
if name == '_proxied':
return getattr(self.__class__, '_proxied')
if name in self.__dict__:
return self.__dict__[name]
return getattr(self._proxied, name)
def infer(self, context=None):
yield self
# Inference ##################################################################
MISSING = object()
class InferenceContext(object):
__slots__ = ('path', 'callcontext', 'boundnode', 'infered')
def __init__(self,
path=None, callcontext=None, boundnode=None, infered=None):
if path is None:
self.path = frozenset()
else:
self.path = path
self.callcontext = callcontext
self.boundnode = boundnode
if infered is None:
self.infered = {}
else:
self.infered = infered
def push(self, key):
# This returns a NEW context with the same attributes, but a new key
# added to `path`. The intention is that it's only passed to callees
# and then destroyed; otherwise scope() may not work correctly.
# The cache will be shared, since it's the same exact dict.
if key in self.path:
# End the containing generator
raise StopIteration
return InferenceContext(
self.path.union([key]),
self.callcontext,
self.boundnode,
self.infered,
)
@contextmanager
def scope(self, callcontext=MISSING, boundnode=MISSING):
try:
orig = self.callcontext, self.boundnode
if callcontext is not MISSING:
self.callcontext = callcontext
if boundnode is not MISSING:
self.boundnode = boundnode
yield
finally:
self.callcontext, self.boundnode = orig
def cache_generator(self, key, generator):
results = []
for result in generator:
results.append(result)
yield result
self.infered[key] = tuple(results)
return
def _infer_stmts(stmts, context, frame=None, lookupname=None):
"""return an iterator on statements inferred by each statement in <stmts>
"""
stmt = None
infered = False
if context is None:
context = InferenceContext()
for stmt in stmts:
if stmt is YES:
yield stmt
infered = True
continue
kw = {}
infered_name = stmt._infer_name(frame, lookupname)
if infered_name is not None:
# only returns not None if .infer() accepts a lookupname kwarg
kw['lookupname'] = infered_name
try:
for infered in stmt.infer(context, **kw):
yield infered
infered = True
except UnresolvableName:
continue
except InferenceError:
yield YES
infered = True
if not infered:
raise InferenceError(str(stmt))
# special inference objects (e.g. may be returned as nodes by .infer()) #######
class _Yes(object):
"""a yes object"""
def __repr__(self):
return 'YES'
def __getattribute__(self, name):
if name == 'next':
raise AttributeError('next method should not be called')
if name.startswith('__') and name.endswith('__'):
# to avoid inspection pb
return super(_Yes, self).__getattribute__(name)
return self
def __call__(self, *args, **kwargs):
return self
YES = _Yes()
class Instance(Proxy):
"""a special node representing a class instance"""
def getattr(self, name, context=None, lookupclass=True):
try:
values = self._proxied.instance_attr(name, context)
except NotFoundError:
if name == '__class__':
return [self._proxied]
if lookupclass:
# class attributes not available through the instance
# unless they are explicitly defined
if name in ('__name__', '__bases__', '__mro__', '__subclasses__'):
return self._proxied.local_attr(name)
return self._proxied.getattr(name, context)
raise NotFoundError(name)
# since we've no context information, return matching class members as
# well
if lookupclass:
try:
return values + self._proxied.getattr(name, context)
except NotFoundError:
pass
return values
def igetattr(self, name, context=None):
"""inferred getattr"""
if not context:
context = InferenceContext()
try:
# avoid recursively inferring the same attr on the same class
new_context = context.push((self._proxied, name))
# XXX frame should be self._proxied, or not ?
get_attr = self.getattr(name, new_context, lookupclass=False)
return _infer_stmts(
self._wrap_attr(get_attr, new_context),
new_context,
frame=self,
)
except NotFoundError:
try:
# fallback to class'igetattr since it has some logic to handle
# descriptors
return self._wrap_attr(self._proxied.igetattr(name, context),
context)
except NotFoundError:
raise InferenceError(name)
def _wrap_attr(self, attrs, context=None):
"""wrap bound methods of attrs in a InstanceMethod proxies"""
for attr in attrs:
if isinstance(attr, UnboundMethod):
if BUILTINS + '.property' in attr.decoratornames():
for infered in attr.infer_call_result(self, context):
yield infered
else:
yield BoundMethod(attr, self)
else:
yield attr
def infer_call_result(self, caller, context=None):
"""infer what a class instance is returning when called"""
infered = False
for node in self._proxied.igetattr('__call__', context):
if node is YES:
continue
for res in node.infer_call_result(caller, context):
infered = True
yield res
if not infered:
raise InferenceError()
def __repr__(self):
return '<Instance of %s.%s at 0x%s>' % (self._proxied.root().name,
self._proxied.name,
id(self))
def __str__(self):
return 'Instance of %s.%s' % (self._proxied.root().name,
self._proxied.name)
def callable(self):
try:
self._proxied.getattr('__call__')
return True
except NotFoundError:
return False
def pytype(self):
return self._proxied.qname()
def display_type(self):
return 'Instance of'
class UnboundMethod(Proxy):
"""a special node representing a method not bound to an instance"""
def __repr__(self):
frame = self._proxied.parent.frame()
return '<%s %s of %s at 0x%s' % (self.__class__.__name__,
self._proxied.name,
frame.qname(), id(self))
def is_bound(self):
return False
def getattr(self, name, context=None):
if name == 'im_func':
return [self._proxied]
return super(UnboundMethod, self).getattr(name, context)
def igetattr(self, name, context=None):
if name == 'im_func':
return iter((self._proxied,))
return super(UnboundMethod, self).igetattr(name, context)
def infer_call_result(self, caller, context):
# If we're unbound method __new__ of builtin object, the result is an
# instance of the class given as first argument.
if (self._proxied.name == '__new__' and
self._proxied.parent.frame().qname() == '%s.object' % BUILTINS):
infer = caller.args[0].infer() if caller.args else []
return ((x is YES and x or Instance(x)) for x in infer)
return self._proxied.infer_call_result(caller, context)
class BoundMethod(UnboundMethod):
"""a special node representing a method bound to an instance"""
def __init__(self, proxy, bound):
UnboundMethod.__init__(self, proxy)
self.bound = bound
def is_bound(self):
return True
def infer_call_result(self, caller, context):
with context.scope(boundnode=self.bound):
for infered in self._proxied.infer_call_result(caller, context):
yield infered
class Generator(Instance):
"""a special node representing a generator.
Proxied class is set once for all in raw_building.
"""
def callable(self):
return False
def pytype(self):
return '%s.generator' % BUILTINS
def display_type(self):
return 'Generator'
def __repr__(self):
return '<Generator(%s) l.%s at 0x%s>' % (self._proxied.name, self.lineno, id(self))
def __str__(self):
return 'Generator(%s)' % (self._proxied.name)
# decorators ##################################################################
def path_wrapper(func):
"""return the given infer function wrapped to handle the path"""
def wrapped(node, context=None, _func=func, **kwargs):
"""wrapper function handling context"""
if context is None:
context = InferenceContext()
context = context.push((node, kwargs.get('lookupname')))
yielded = set()
for res in _func(node, context, **kwargs):
# unproxy only true instance, not const, tuple, dict...
if res.__class__ is Instance:
ares = res._proxied
else:
ares = res
if not ares in yielded:
yield res
yielded.add(ares)
return wrapped
def yes_if_nothing_infered(func):
def wrapper(*args, **kwargs):
infered = False
for node in func(*args, **kwargs):
infered = True
yield node
if not infered:
yield YES
return wrapper
def raise_if_nothing_infered(func):
def wrapper(*args, **kwargs):
infered = False
for node in func(*args, **kwargs):
infered = True
yield node
if not infered:
raise InferenceError()
return wrapper
# Node ######################################################################
class NodeNG(object):
"""Base Class for all Astroid node classes.
It represents a node of the new abstract syntax tree.
"""
is_statement = False
optional_assign = False # True for For (and for Comprehension if py <3.0)
is_function = False # True for Function nodes
# attributes below are set by the builder module or by raw factories
lineno = None
fromlineno = None
tolineno = None
col_offset = None
# parent node in the tree
parent = None
# attributes containing child node(s) redefined in most concrete classes:
_astroid_fields = ()
# instance specific inference function infer(node, context)
_explicit_inference = None
def infer(self, context=None, **kwargs):
"""main interface to the interface system, return a generator on infered
values.
If the instance has some explicit inference function set, it will be
called instead of the default interface.
"""
if self._explicit_inference is not None:
# explicit_inference is not bound, give it self explicitly
try:
return self._explicit_inference(self, context, **kwargs)
except UseInferenceDefault:
pass
if not context:
return self._infer(context, **kwargs)
key = (self, kwargs.get('lookupname'), context.callcontext, context.boundnode)
if key in context.infered:
return iter(context.infered[key])
return context.cache_generator(key, self._infer(context, **kwargs))
def _repr_name(self):
"""return self.name or self.attrname or '' for nice representation"""
return getattr(self, 'name', getattr(self, 'attrname', ''))
def __str__(self):
return '%s(%s)' % (self.__class__.__name__, self._repr_name())
def __repr__(self):
return '<%s(%s) l.%s [%s] at 0x%x>' % (self.__class__.__name__,
self._repr_name(),
self.fromlineno,
self.root().name,
id(self))
def accept(self, visitor):
func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
return func(self)
def get_children(self):
for field in self._astroid_fields:
attr = getattr(self, field)
if attr is None:
continue
if isinstance(attr, (list, tuple)):
for elt in attr:
yield elt
else:
yield attr
def last_child(self):
"""an optimized version of list(get_children())[-1]"""
for field in self._astroid_fields[::-1]:
attr = getattr(self, field)
if not attr: # None or empty listy / tuple
continue
if attr.__class__ in (list, tuple):
return attr[-1]
else:
return attr
return None
def parent_of(self, node):
"""return true if i'm a parent of the given node"""
parent = node.parent
while parent is not None:
if self is parent:
return True
parent = parent.parent
return False
def statement(self):
"""return the first parent node marked as statement node"""
if self.is_statement:
return self
return self.parent.statement()
def frame(self):
"""return the first parent frame node (i.e. Module, Function or Class)
"""
return self.parent.frame()
def scope(self):
"""return the first node defining a new scope (i.e. Module, Function,
Class, Lambda but also GenExpr)
"""
return self.parent.scope()
def root(self):
"""return the root node of the tree, (i.e. a Module)"""
if self.parent:
return self.parent.root()
return self
def child_sequence(self, child):
"""search for the right sequence where the child lies in"""
for field in self._astroid_fields:
node_or_sequence = getattr(self, field)
if node_or_sequence is child:
return [node_or_sequence]
# /!\ compiler.ast Nodes have an __iter__ walking over child nodes
if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
return node_or_sequence
else:
msg = 'Could not find %s in %s\'s children'
raise AstroidError(msg % (repr(child), repr(self)))
def locate_child(self, child):
"""return a 2-uple (child attribute name, sequence or node)"""
for field in self._astroid_fields:
node_or_sequence = getattr(self, field)
# /!\ compiler.ast Nodes have an __iter__ walking over child nodes
if child is node_or_sequence:
return field, child
if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
return field, node_or_sequence
msg = 'Could not find %s in %s\'s children'
raise AstroidError(msg % (repr(child), repr(self)))
# FIXME : should we merge child_sequence and locate_child ? locate_child
# is only used in are_exclusive, child_sequence one time in pylint.
def next_sibling(self):
"""return the next sibling statement"""
return self.parent.next_sibling()
def previous_sibling(self):
"""return the previous sibling statement"""
return self.parent.previous_sibling()
def nearest(self, nodes):
"""return the node which is the nearest before this one in the
given list of nodes
"""
myroot = self.root()
mylineno = self.fromlineno
nearest = None, 0
for node in nodes:
assert node.root() is myroot, \
'nodes %s and %s are not from the same module' % (self, node)
lineno = node.fromlineno
if node.fromlineno > mylineno:
break
if lineno > nearest[1]:
nearest = node, lineno
# FIXME: raise an exception if nearest is None ?
return nearest[0]
# these are lazy because they're relatively expensive to compute for every
# single node, and they rarely get looked at
@cachedproperty
def fromlineno(self):
if self.lineno is None:
return self._fixed_source_line()
else:
return self.lineno
@cachedproperty
def tolineno(self):
if not self._astroid_fields:
# can't have children
lastchild = None
else:
lastchild = self.last_child()
if lastchild is None:
return self.fromlineno
else:
return lastchild.tolineno
# TODO / FIXME:
assert self.fromlineno is not None, self
assert self.tolineno is not None, self
def _fixed_source_line(self):
"""return the line number where the given node appears
we need this method since not all nodes have the lineno attribute
correctly set...
"""
line = self.lineno
_node = self
try:
while line is None:
_node = next(_node.get_children())
line = _node.lineno
except StopIteration:
_node = self.parent
while _node and line is None:
line = _node.lineno
_node = _node.parent
return line
def block_range(self, lineno):
"""handle block line numbers range for non block opening statements
"""
return lineno, self.tolineno
def set_local(self, name, stmt):
"""delegate to a scoped parent handling a locals dictionary"""
self.parent.set_local(name, stmt)
def nodes_of_class(self, klass, skip_klass=None):
"""return an iterator on nodes which are instance of the given class(es)
klass may be a class object or a tuple of class objects
"""
if isinstance(self, klass):
yield self
for child_node in self.get_children():
if skip_klass is not None and isinstance(child_node, skip_klass):
continue
for matching in child_node.nodes_of_class(klass, skip_klass):
yield matching
def _infer_name(self, frame, name):
# overridden for From, Import, Global, TryExcept and Arguments
return None
def _infer(self, context=None):
"""we don't know how to resolve a statement by default"""
# this method is overridden by most concrete classes
raise InferenceError(self.__class__.__name__)
def infered(self):
'''return list of infered values for a more simple inference usage'''
return list(self.infer())
def instanciate_class(self):
"""instanciate a node if it is a Class node, else return self"""
return self
def has_base(self, node):
return False
def callable(self):
return False
def eq(self, value):
return False
def as_string(self):
from astroid.as_string import to_code
return to_code(self)
def repr_tree(self, ids=False):
from astroid.as_string import dump
return dump(self)
class Statement(NodeNG):
"""Statement node adding a few attributes"""
is_statement = True
def next_sibling(self):
"""return the next sibling statement"""
stmts = self.parent.child_sequence(self)
index = stmts.index(self)
try:
return stmts[index +1]
except IndexError:
pass
def previous_sibling(self):
"""return the previous sibling statement"""
stmts = self.parent.child_sequence(self)
index = stmts.index(self)
if index >= 1:
return stmts[index -1]
|
michalliu/OpenWrt-Firefly-Libraries
|
refs/heads/master
|
staging_dir/host/lib/python3.4/test/test_os.py
|
8
|
# As a test suite for the os module, this is woefully inadequate, but this
# does add tests for a few functions which have been determined to be more
# portable than they had been thought to be.
import os
import errno
import unittest
import warnings
import sys
import signal
import subprocess
import time
import shutil
from test import support
import contextlib
import mmap
import platform
import re
import uuid
import asyncore
import asynchat
import socket
import itertools
import stat
import locale
import codecs
import decimal
import fractions
import pickle
import sysconfig
try:
import threading
except ImportError:
threading = None
try:
import resource
except ImportError:
resource = None
try:
import fcntl
except ImportError:
fcntl = None
from test.script_helper import assert_python_ok
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
os.stat_float_times(True)
st = os.stat(__file__)
stat_supports_subsecond = (
# check if float and int timestamps are different
(st.st_atime != st[7])
or (st.st_mtime != st[8])
or (st.st_ctime != st[9]))
# Detect whether we're on a Linux system that uses the (now outdated
# and unmaintained) linuxthreads threading library. There's an issue
# when combining linuxthreads with a failed execv call: see
# http://bugs.python.org/issue4970.
if hasattr(sys, 'thread_info') and sys.thread_info.version:
USING_LINUXTHREADS = sys.thread_info.version.startswith("linuxthreads")
else:
USING_LINUXTHREADS = False
# Issue #14110: Some tests fail on FreeBSD if the user is in the wheel group.
HAVE_WHEEL_GROUP = sys.platform.startswith('freebsd') and os.getgid() == 0
# Tests creating TESTFN
class FileTests(unittest.TestCase):
def setUp(self):
if os.path.exists(support.TESTFN):
os.unlink(support.TESTFN)
tearDown = setUp
def test_access(self):
f = os.open(support.TESTFN, os.O_CREAT|os.O_RDWR)
os.close(f)
self.assertTrue(os.access(support.TESTFN, os.W_OK))
def test_closerange(self):
first = os.open(support.TESTFN, os.O_CREAT|os.O_RDWR)
# We must allocate two consecutive file descriptors, otherwise
# it will mess up other file descriptors (perhaps even the three
# standard ones).
second = os.dup(first)
try:
retries = 0
while second != first + 1:
os.close(first)
retries += 1
if retries > 10:
# XXX test skipped
self.skipTest("couldn't allocate two consecutive fds")
first, second = second, os.dup(second)
finally:
os.close(second)
# close a fd that is open, and one that isn't
os.closerange(first, first + 2)
self.assertRaises(OSError, os.write, first, b"a")
@support.cpython_only
def test_rename(self):
path = support.TESTFN
old = sys.getrefcount(path)
self.assertRaises(TypeError, os.rename, path, 0)
new = sys.getrefcount(path)
self.assertEqual(old, new)
def test_read(self):
with open(support.TESTFN, "w+b") as fobj:
fobj.write(b"spam")
fobj.flush()
fd = fobj.fileno()
os.lseek(fd, 0, 0)
s = os.read(fd, 4)
self.assertEqual(type(s), bytes)
self.assertEqual(s, b"spam")
def test_write(self):
# os.write() accepts bytes- and buffer-like objects but not strings
fd = os.open(support.TESTFN, os.O_CREAT | os.O_WRONLY)
self.assertRaises(TypeError, os.write, fd, "beans")
os.write(fd, b"bacon\n")
os.write(fd, bytearray(b"eggs\n"))
os.write(fd, memoryview(b"spam\n"))
os.close(fd)
with open(support.TESTFN, "rb") as fobj:
self.assertEqual(fobj.read().splitlines(),
[b"bacon", b"eggs", b"spam"])
def write_windows_console(self, *args):
retcode = subprocess.call(args,
# use a new console to not flood the test output
creationflags=subprocess.CREATE_NEW_CONSOLE,
# use a shell to hide the console window (SW_HIDE)
shell=True)
self.assertEqual(retcode, 0)
@unittest.skipUnless(sys.platform == 'win32',
'test specific to the Windows console')
def test_write_windows_console(self):
# Issue #11395: the Windows console returns an error (12: not enough
# space error) on writing into stdout if stdout mode is binary and the
# length is greater than 66,000 bytes (or less, depending on heap
# usage).
code = "print('x' * 100000)"
self.write_windows_console(sys.executable, "-c", code)
self.write_windows_console(sys.executable, "-u", "-c", code)
def fdopen_helper(self, *args):
fd = os.open(support.TESTFN, os.O_RDONLY)
f = os.fdopen(fd, *args)
f.close()
def test_fdopen(self):
fd = os.open(support.TESTFN, os.O_CREAT|os.O_RDWR)
os.close(fd)
self.fdopen_helper()
self.fdopen_helper('r')
self.fdopen_helper('r', 100)
def test_replace(self):
TESTFN2 = support.TESTFN + ".2"
with open(support.TESTFN, 'w') as f:
f.write("1")
with open(TESTFN2, 'w') as f:
f.write("2")
self.addCleanup(os.unlink, TESTFN2)
os.replace(support.TESTFN, TESTFN2)
self.assertRaises(FileNotFoundError, os.stat, support.TESTFN)
with open(TESTFN2, 'r') as f:
self.assertEqual(f.read(), "1")
# Test attributes on return values from os.*stat* family.
class StatAttributeTests(unittest.TestCase):
def setUp(self):
os.mkdir(support.TESTFN)
self.fname = os.path.join(support.TESTFN, "f1")
f = open(self.fname, 'wb')
f.write(b"ABC")
f.close()
def tearDown(self):
os.unlink(self.fname)
os.rmdir(support.TESTFN)
@unittest.skipUnless(hasattr(os, 'stat'), 'test needs os.stat()')
def check_stat_attributes(self, fname):
result = os.stat(fname)
# Make sure direct access works
self.assertEqual(result[stat.ST_SIZE], 3)
self.assertEqual(result.st_size, 3)
# Make sure all the attributes are there
members = dir(result)
for name in dir(stat):
if name[:3] == 'ST_':
attr = name.lower()
if name.endswith("TIME"):
def trunc(x): return int(x)
else:
def trunc(x): return x
self.assertEqual(trunc(getattr(result, attr)),
result[getattr(stat, name)])
self.assertIn(attr, members)
# Make sure that the st_?time and st_?time_ns fields roughly agree
# (they should always agree up to around tens-of-microseconds)
for name in 'st_atime st_mtime st_ctime'.split():
floaty = int(getattr(result, name) * 100000)
nanosecondy = getattr(result, name + "_ns") // 10000
self.assertAlmostEqual(floaty, nanosecondy, delta=2)
try:
result[200]
self.fail("No exception raised")
except IndexError:
pass
# Make sure that assignment fails
try:
result.st_mode = 1
self.fail("No exception raised")
except AttributeError:
pass
try:
result.st_rdev = 1
self.fail("No exception raised")
except (AttributeError, TypeError):
pass
try:
result.parrot = 1
self.fail("No exception raised")
except AttributeError:
pass
# Use the stat_result constructor with a too-short tuple.
try:
result2 = os.stat_result((10,))
self.fail("No exception raised")
except TypeError:
pass
# Use the constructor with a too-long tuple.
try:
result2 = os.stat_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
except TypeError:
pass
def test_stat_attributes(self):
self.check_stat_attributes(self.fname)
def test_stat_attributes_bytes(self):
try:
fname = self.fname.encode(sys.getfilesystemencoding())
except UnicodeEncodeError:
self.skipTest("cannot encode %a for the filesystem" % self.fname)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.check_stat_attributes(fname)
def test_stat_result_pickle(self):
result = os.stat(self.fname)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
p = pickle.dumps(result, proto)
self.assertIn(b'stat_result', p)
if proto < 4:
self.assertIn(b'cos\nstat_result\n', p)
unpickled = pickle.loads(p)
self.assertEqual(result, unpickled)
@unittest.skipUnless(hasattr(os, 'statvfs'), 'test needs os.statvfs()')
def test_statvfs_attributes(self):
try:
result = os.statvfs(self.fname)
except OSError as e:
# On AtheOS, glibc always returns ENOSYS
if e.errno == errno.ENOSYS:
self.skipTest('os.statvfs() failed with ENOSYS')
# Make sure direct access works
self.assertEqual(result.f_bfree, result[3])
# Make sure all the attributes are there.
members = ('bsize', 'frsize', 'blocks', 'bfree', 'bavail', 'files',
'ffree', 'favail', 'flag', 'namemax')
for value, member in enumerate(members):
self.assertEqual(getattr(result, 'f_' + member), result[value])
# Make sure that assignment really fails
try:
result.f_bfree = 1
self.fail("No exception raised")
except AttributeError:
pass
try:
result.parrot = 1
self.fail("No exception raised")
except AttributeError:
pass
# Use the constructor with a too-short tuple.
try:
result2 = os.statvfs_result((10,))
self.fail("No exception raised")
except TypeError:
pass
# Use the constructor with a too-long tuple.
try:
result2 = os.statvfs_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
except TypeError:
pass
@unittest.skipUnless(hasattr(os, 'statvfs'),
"need os.statvfs()")
def test_statvfs_result_pickle(self):
try:
result = os.statvfs(self.fname)
except OSError as e:
# On AtheOS, glibc always returns ENOSYS
if e.errno == errno.ENOSYS:
self.skipTest('os.statvfs() failed with ENOSYS')
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
p = pickle.dumps(result, proto)
self.assertIn(b'statvfs_result', p)
if proto < 4:
self.assertIn(b'cos\nstatvfs_result\n', p)
unpickled = pickle.loads(p)
self.assertEqual(result, unpickled)
def test_utime_dir(self):
delta = 1000000
st = os.stat(support.TESTFN)
# round to int, because some systems may support sub-second
# time stamps in stat, but not in utime.
os.utime(support.TESTFN, (st.st_atime, int(st.st_mtime-delta)))
st2 = os.stat(support.TESTFN)
self.assertEqual(st2.st_mtime, int(st.st_mtime-delta))
def _test_utime(self, filename, attr, utime, delta):
# Issue #13327 removed the requirement to pass None as the
# second argument. Check that the previous methods of passing
# a time tuple or None work in addition to no argument.
st0 = os.stat(filename)
# Doesn't set anything new, but sets the time tuple way
utime(filename, (attr(st0, "st_atime"), attr(st0, "st_mtime")))
# Setting the time to the time you just read, then reading again,
# should always return exactly the same times.
st1 = os.stat(filename)
self.assertEqual(attr(st0, "st_mtime"), attr(st1, "st_mtime"))
self.assertEqual(attr(st0, "st_atime"), attr(st1, "st_atime"))
# Set to the current time in the old explicit way.
os.utime(filename, None)
st2 = os.stat(support.TESTFN)
# Set to the current time in the new way
os.utime(filename)
st3 = os.stat(filename)
self.assertAlmostEqual(attr(st2, "st_mtime"), attr(st3, "st_mtime"), delta=delta)
def test_utime(self):
def utime(file, times):
return os.utime(file, times)
self._test_utime(self.fname, getattr, utime, 10)
self._test_utime(support.TESTFN, getattr, utime, 10)
def _test_utime_ns(self, set_times_ns, test_dir=True):
def getattr_ns(o, attr):
return getattr(o, attr + "_ns")
ten_s = 10 * 1000 * 1000 * 1000
self._test_utime(self.fname, getattr_ns, set_times_ns, ten_s)
if test_dir:
self._test_utime(support.TESTFN, getattr_ns, set_times_ns, ten_s)
def test_utime_ns(self):
def utime_ns(file, times):
return os.utime(file, ns=times)
self._test_utime_ns(utime_ns)
requires_utime_dir_fd = unittest.skipUnless(
os.utime in os.supports_dir_fd,
"dir_fd support for utime required for this test.")
requires_utime_fd = unittest.skipUnless(
os.utime in os.supports_fd,
"fd support for utime required for this test.")
requires_utime_nofollow_symlinks = unittest.skipUnless(
os.utime in os.supports_follow_symlinks,
"follow_symlinks support for utime required for this test.")
@requires_utime_nofollow_symlinks
def test_lutimes_ns(self):
def lutimes_ns(file, times):
return os.utime(file, ns=times, follow_symlinks=False)
self._test_utime_ns(lutimes_ns)
@requires_utime_fd
def test_futimes_ns(self):
def futimes_ns(file, times):
with open(file, "wb") as f:
os.utime(f.fileno(), ns=times)
self._test_utime_ns(futimes_ns, test_dir=False)
def _utime_invalid_arguments(self, name, arg):
with self.assertRaises(ValueError):
getattr(os, name)(arg, (5, 5), ns=(5, 5))
def test_utime_invalid_arguments(self):
self._utime_invalid_arguments('utime', self.fname)
@unittest.skipUnless(stat_supports_subsecond,
"os.stat() doesn't has a subsecond resolution")
def _test_utime_subsecond(self, set_time_func):
asec, amsec = 1, 901
atime = asec + amsec * 1e-3
msec, mmsec = 2, 901
mtime = msec + mmsec * 1e-3
filename = self.fname
os.utime(filename, (0, 0))
set_time_func(filename, atime, mtime)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
os.stat_float_times(True)
st = os.stat(filename)
self.assertAlmostEqual(st.st_atime, atime, places=3)
self.assertAlmostEqual(st.st_mtime, mtime, places=3)
def test_utime_subsecond(self):
def set_time(filename, atime, mtime):
os.utime(filename, (atime, mtime))
self._test_utime_subsecond(set_time)
@requires_utime_fd
def test_futimes_subsecond(self):
def set_time(filename, atime, mtime):
with open(filename, "wb") as f:
os.utime(f.fileno(), times=(atime, mtime))
self._test_utime_subsecond(set_time)
@requires_utime_fd
def test_futimens_subsecond(self):
def set_time(filename, atime, mtime):
with open(filename, "wb") as f:
os.utime(f.fileno(), times=(atime, mtime))
self._test_utime_subsecond(set_time)
@requires_utime_dir_fd
def test_futimesat_subsecond(self):
def set_time(filename, atime, mtime):
dirname = os.path.dirname(filename)
dirfd = os.open(dirname, os.O_RDONLY)
try:
os.utime(os.path.basename(filename), dir_fd=dirfd,
times=(atime, mtime))
finally:
os.close(dirfd)
self._test_utime_subsecond(set_time)
@requires_utime_nofollow_symlinks
def test_lutimes_subsecond(self):
def set_time(filename, atime, mtime):
os.utime(filename, (atime, mtime), follow_symlinks=False)
self._test_utime_subsecond(set_time)
@requires_utime_dir_fd
def test_utimensat_subsecond(self):
def set_time(filename, atime, mtime):
dirname = os.path.dirname(filename)
dirfd = os.open(dirname, os.O_RDONLY)
try:
os.utime(os.path.basename(filename), dir_fd=dirfd,
times=(atime, mtime))
finally:
os.close(dirfd)
self._test_utime_subsecond(set_time)
# Restrict tests to Win32, since there is no guarantee other
# systems support centiseconds
def get_file_system(path):
if sys.platform == 'win32':
root = os.path.splitdrive(os.path.abspath(path))[0] + '\\'
import ctypes
kernel32 = ctypes.windll.kernel32
buf = ctypes.create_unicode_buffer("", 100)
if kernel32.GetVolumeInformationW(root, None, 0, None, None, None, buf, len(buf)):
return buf.value
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
@unittest.skipUnless(get_file_system(support.TESTFN) == "NTFS",
"requires NTFS")
def test_1565150(self):
t1 = 1159195039.25
os.utime(self.fname, (t1, t1))
self.assertEqual(os.stat(self.fname).st_mtime, t1)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
@unittest.skipUnless(get_file_system(support.TESTFN) == "NTFS",
"requires NTFS")
def test_large_time(self):
t1 = 5000000000 # some day in 2128
os.utime(self.fname, (t1, t1))
self.assertEqual(os.stat(self.fname).st_mtime, t1)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
def test_1686475(self):
# Verify that an open file can be stat'ed
try:
os.stat(r"c:\pagefile.sys")
except FileNotFoundError:
self.skipTest(r'c:\pagefile.sys does not exist')
except OSError as e:
self.fail("Could not stat pagefile.sys")
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
def test_15261(self):
# Verify that stat'ing a closed fd does not cause crash
r, w = os.pipe()
try:
os.stat(r) # should not raise error
finally:
os.close(r)
os.close(w)
with self.assertRaises(OSError) as ctx:
os.stat(r)
self.assertEqual(ctx.exception.errno, errno.EBADF)
from test import mapping_tests
class EnvironTests(mapping_tests.BasicTestMappingProtocol):
"""check that os.environ object conform to mapping protocol"""
type2test = None
def setUp(self):
self.__save = dict(os.environ)
if os.supports_bytes_environ:
self.__saveb = dict(os.environb)
for key, value in self._reference().items():
os.environ[key] = value
def tearDown(self):
os.environ.clear()
os.environ.update(self.__save)
if os.supports_bytes_environ:
os.environb.clear()
os.environb.update(self.__saveb)
def _reference(self):
return {"KEY1":"VALUE1", "KEY2":"VALUE2", "KEY3":"VALUE3"}
def _empty_mapping(self):
os.environ.clear()
return os.environ
# Bug 1110478
@unittest.skipUnless(os.path.exists('/bin/sh'), 'requires /bin/sh')
def test_update2(self):
os.environ.clear()
os.environ.update(HELLO="World")
with os.popen("/bin/sh -c 'echo $HELLO'") as popen:
value = popen.read().strip()
self.assertEqual(value, "World")
@unittest.skipUnless(os.path.exists('/bin/sh'), 'requires /bin/sh')
def test_os_popen_iter(self):
with os.popen(
"/bin/sh -c 'echo \"line1\nline2\nline3\"'") as popen:
it = iter(popen)
self.assertEqual(next(it), "line1\n")
self.assertEqual(next(it), "line2\n")
self.assertEqual(next(it), "line3\n")
self.assertRaises(StopIteration, next, it)
# Verify environ keys and values from the OS are of the
# correct str type.
def test_keyvalue_types(self):
for key, val in os.environ.items():
self.assertEqual(type(key), str)
self.assertEqual(type(val), str)
def test_items(self):
for key, value in self._reference().items():
self.assertEqual(os.environ.get(key), value)
# Issue 7310
def test___repr__(self):
"""Check that the repr() of os.environ looks like environ({...})."""
env = os.environ
self.assertEqual(repr(env), 'environ({{{}}})'.format(', '.join(
'{!r}: {!r}'.format(key, value)
for key, value in env.items())))
def test_get_exec_path(self):
defpath_list = os.defpath.split(os.pathsep)
test_path = ['/monty', '/python', '', '/flying/circus']
test_env = {'PATH': os.pathsep.join(test_path)}
saved_environ = os.environ
try:
os.environ = dict(test_env)
# Test that defaulting to os.environ works.
self.assertSequenceEqual(test_path, os.get_exec_path())
self.assertSequenceEqual(test_path, os.get_exec_path(env=None))
finally:
os.environ = saved_environ
# No PATH environment variable
self.assertSequenceEqual(defpath_list, os.get_exec_path({}))
# Empty PATH environment variable
self.assertSequenceEqual(('',), os.get_exec_path({'PATH':''}))
# Supplied PATH environment variable
self.assertSequenceEqual(test_path, os.get_exec_path(test_env))
if os.supports_bytes_environ:
# env cannot contain 'PATH' and b'PATH' keys
try:
# ignore BytesWarning warning
with warnings.catch_warnings(record=True):
mixed_env = {'PATH': '1', b'PATH': b'2'}
except BytesWarning:
# mixed_env cannot be created with python -bb
pass
else:
self.assertRaises(ValueError, os.get_exec_path, mixed_env)
# bytes key and/or value
self.assertSequenceEqual(os.get_exec_path({b'PATH': b'abc'}),
['abc'])
self.assertSequenceEqual(os.get_exec_path({b'PATH': 'abc'}),
['abc'])
self.assertSequenceEqual(os.get_exec_path({'PATH': b'abc'}),
['abc'])
@unittest.skipUnless(os.supports_bytes_environ,
"os.environb required for this test.")
def test_environb(self):
# os.environ -> os.environb
value = 'euro\u20ac'
try:
value_bytes = value.encode(sys.getfilesystemencoding(),
'surrogateescape')
except UnicodeEncodeError:
msg = "U+20AC character is not encodable to %s" % (
sys.getfilesystemencoding(),)
self.skipTest(msg)
os.environ['unicode'] = value
self.assertEqual(os.environ['unicode'], value)
self.assertEqual(os.environb[b'unicode'], value_bytes)
# os.environb -> os.environ
value = b'\xff'
os.environb[b'bytes'] = value
self.assertEqual(os.environb[b'bytes'], value)
value_str = value.decode(sys.getfilesystemencoding(), 'surrogateescape')
self.assertEqual(os.environ['bytes'], value_str)
# On FreeBSD < 7 and OS X < 10.6, unsetenv() doesn't return a value (issue
# #13415).
@support.requires_freebsd_version(7)
@support.requires_mac_ver(10, 6)
def test_unset_error(self):
if sys.platform == "win32":
# an environment variable is limited to 32,767 characters
key = 'x' * 50000
self.assertRaises(ValueError, os.environ.__delitem__, key)
else:
# "=" is not allowed in a variable name
key = 'key='
self.assertRaises(OSError, os.environ.__delitem__, key)
def test_key_type(self):
missing = 'missingkey'
self.assertNotIn(missing, os.environ)
with self.assertRaises(KeyError) as cm:
os.environ[missing]
self.assertIs(cm.exception.args[0], missing)
self.assertTrue(cm.exception.__suppress_context__)
with self.assertRaises(KeyError) as cm:
del os.environ[missing]
self.assertIs(cm.exception.args[0], missing)
self.assertTrue(cm.exception.__suppress_context__)
class WalkTests(unittest.TestCase):
"""Tests for os.walk()."""
def setUp(self):
import os
from os.path import join
# Build:
# TESTFN/
# TEST1/ a file kid and two directory kids
# tmp1
# SUB1/ a file kid and a directory kid
# tmp2
# SUB11/ no kids
# SUB2/ a file kid and a dirsymlink kid
# tmp3
# link/ a symlink to TESTFN.2
# broken_link
# TEST2/
# tmp4 a lone file
walk_path = join(support.TESTFN, "TEST1")
sub1_path = join(walk_path, "SUB1")
sub11_path = join(sub1_path, "SUB11")
sub2_path = join(walk_path, "SUB2")
tmp1_path = join(walk_path, "tmp1")
tmp2_path = join(sub1_path, "tmp2")
tmp3_path = join(sub2_path, "tmp3")
link_path = join(sub2_path, "link")
t2_path = join(support.TESTFN, "TEST2")
tmp4_path = join(support.TESTFN, "TEST2", "tmp4")
link_path = join(sub2_path, "link")
broken_link_path = join(sub2_path, "broken_link")
# Create stuff.
os.makedirs(sub11_path)
os.makedirs(sub2_path)
os.makedirs(t2_path)
for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path:
f = open(path, "w")
f.write("I'm " + path + " and proud of it. Blame test_os.\n")
f.close()
if support.can_symlink():
os.symlink(os.path.abspath(t2_path), link_path)
os.symlink('broken', broken_link_path, True)
sub2_tree = (sub2_path, ["link"], ["broken_link", "tmp3"])
else:
sub2_tree = (sub2_path, [], ["tmp3"])
# Walk top-down.
all = list(os.walk(walk_path))
self.assertEqual(len(all), 4)
# We can't know which order SUB1 and SUB2 will appear in.
# Not flipped: TESTFN, SUB1, SUB11, SUB2
# flipped: TESTFN, SUB2, SUB1, SUB11
flipped = all[0][1][0] != "SUB1"
all[0][1].sort()
all[3 - 2 * flipped][-1].sort()
self.assertEqual(all[0], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
self.assertEqual(all[1 + flipped], (sub1_path, ["SUB11"], ["tmp2"]))
self.assertEqual(all[2 + flipped], (sub11_path, [], []))
self.assertEqual(all[3 - 2 * flipped], sub2_tree)
# Prune the search.
all = []
for root, dirs, files in os.walk(walk_path):
all.append((root, dirs, files))
# Don't descend into SUB1.
if 'SUB1' in dirs:
# Note that this also mutates the dirs we appended to all!
dirs.remove('SUB1')
self.assertEqual(len(all), 2)
self.assertEqual(all[0], (walk_path, ["SUB2"], ["tmp1"]))
all[1][-1].sort()
self.assertEqual(all[1], sub2_tree)
# Walk bottom-up.
all = list(os.walk(walk_path, topdown=False))
self.assertEqual(len(all), 4)
# We can't know which order SUB1 and SUB2 will appear in.
# Not flipped: SUB11, SUB1, SUB2, TESTFN
# flipped: SUB2, SUB11, SUB1, TESTFN
flipped = all[3][1][0] != "SUB1"
all[3][1].sort()
all[2 - 2 * flipped][-1].sort()
self.assertEqual(all[3], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
self.assertEqual(all[flipped], (sub11_path, [], []))
self.assertEqual(all[flipped + 1], (sub1_path, ["SUB11"], ["tmp2"]))
self.assertEqual(all[2 - 2 * flipped], sub2_tree)
if support.can_symlink():
# Walk, following symlinks.
for root, dirs, files in os.walk(walk_path, followlinks=True):
if root == link_path:
self.assertEqual(dirs, [])
self.assertEqual(files, ["tmp4"])
break
else:
self.fail("Didn't follow symlink with followlinks=True")
def tearDown(self):
# Tear everything down. This is a decent use for bottom-up on
# Windows, which doesn't have a recursive delete command. The
# (not so) subtlety is that rmdir will fail unless the dir's
# kids are removed first, so bottom up is essential.
for root, dirs, files in os.walk(support.TESTFN, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
dirname = os.path.join(root, name)
if not os.path.islink(dirname):
os.rmdir(dirname)
else:
os.remove(dirname)
os.rmdir(support.TESTFN)
@unittest.skipUnless(hasattr(os, 'fwalk'), "Test needs os.fwalk()")
class FwalkTests(WalkTests):
"""Tests for os.fwalk()."""
def _compare_to_walk(self, walk_kwargs, fwalk_kwargs):
"""
compare with walk() results.
"""
walk_kwargs = walk_kwargs.copy()
fwalk_kwargs = fwalk_kwargs.copy()
for topdown, follow_symlinks in itertools.product((True, False), repeat=2):
walk_kwargs.update(topdown=topdown, followlinks=follow_symlinks)
fwalk_kwargs.update(topdown=topdown, follow_symlinks=follow_symlinks)
expected = {}
for root, dirs, files in os.walk(**walk_kwargs):
expected[root] = (set(dirs), set(files))
for root, dirs, files, rootfd in os.fwalk(**fwalk_kwargs):
self.assertIn(root, expected)
self.assertEqual(expected[root], (set(dirs), set(files)))
def test_compare_to_walk(self):
kwargs = {'top': support.TESTFN}
self._compare_to_walk(kwargs, kwargs)
def test_dir_fd(self):
try:
fd = os.open(".", os.O_RDONLY)
walk_kwargs = {'top': support.TESTFN}
fwalk_kwargs = walk_kwargs.copy()
fwalk_kwargs['dir_fd'] = fd
self._compare_to_walk(walk_kwargs, fwalk_kwargs)
finally:
os.close(fd)
def test_yields_correct_dir_fd(self):
# check returned file descriptors
for topdown, follow_symlinks in itertools.product((True, False), repeat=2):
args = support.TESTFN, topdown, None
for root, dirs, files, rootfd in os.fwalk(*args, follow_symlinks=follow_symlinks):
# check that the FD is valid
os.fstat(rootfd)
# redundant check
os.stat(rootfd)
# check that listdir() returns consistent information
self.assertEqual(set(os.listdir(rootfd)), set(dirs) | set(files))
def test_fd_leak(self):
# Since we're opening a lot of FDs, we must be careful to avoid leaks:
# we both check that calling fwalk() a large number of times doesn't
# yield EMFILE, and that the minimum allocated FD hasn't changed.
minfd = os.dup(1)
os.close(minfd)
for i in range(256):
for x in os.fwalk(support.TESTFN):
pass
newfd = os.dup(1)
self.addCleanup(os.close, newfd)
self.assertEqual(newfd, minfd)
def tearDown(self):
# cleanup
for root, dirs, files, rootfd in os.fwalk(support.TESTFN, topdown=False):
for name in files:
os.unlink(name, dir_fd=rootfd)
for name in dirs:
st = os.stat(name, dir_fd=rootfd, follow_symlinks=False)
if stat.S_ISDIR(st.st_mode):
os.rmdir(name, dir_fd=rootfd)
else:
os.unlink(name, dir_fd=rootfd)
os.rmdir(support.TESTFN)
class MakedirTests(unittest.TestCase):
def setUp(self):
os.mkdir(support.TESTFN)
def test_makedir(self):
base = support.TESTFN
path = os.path.join(base, 'dir1', 'dir2', 'dir3')
os.makedirs(path) # Should work
path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4')
os.makedirs(path)
# Try paths with a '.' in them
self.assertRaises(OSError, os.makedirs, os.curdir)
path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4', 'dir5', os.curdir)
os.makedirs(path)
path = os.path.join(base, 'dir1', os.curdir, 'dir2', 'dir3', 'dir4',
'dir5', 'dir6')
os.makedirs(path)
def test_exist_ok_existing_directory(self):
path = os.path.join(support.TESTFN, 'dir1')
mode = 0o777
old_mask = os.umask(0o022)
os.makedirs(path, mode)
self.assertRaises(OSError, os.makedirs, path, mode)
self.assertRaises(OSError, os.makedirs, path, mode, exist_ok=False)
os.makedirs(path, 0o776, exist_ok=True)
os.makedirs(path, mode=mode, exist_ok=True)
os.umask(old_mask)
@unittest.skipUnless(hasattr(os, 'chown'), 'test needs os.chown')
def test_chown_uid_gid_arguments_must_be_index(self):
stat = os.stat(support.TESTFN)
uid = stat.st_uid
gid = stat.st_gid
for value in (-1.0, -1j, decimal.Decimal(-1), fractions.Fraction(-2, 2)):
self.assertRaises(TypeError, os.chown, support.TESTFN, value, gid)
self.assertRaises(TypeError, os.chown, support.TESTFN, uid, value)
self.assertIsNone(os.chown(support.TESTFN, uid, gid))
self.assertIsNone(os.chown(support.TESTFN, -1, -1))
def test_exist_ok_s_isgid_directory(self):
path = os.path.join(support.TESTFN, 'dir1')
S_ISGID = stat.S_ISGID
mode = 0o777
old_mask = os.umask(0o022)
try:
existing_testfn_mode = stat.S_IMODE(
os.lstat(support.TESTFN).st_mode)
try:
os.chmod(support.TESTFN, existing_testfn_mode | S_ISGID)
except PermissionError:
raise unittest.SkipTest('Cannot set S_ISGID for dir.')
if (os.lstat(support.TESTFN).st_mode & S_ISGID != S_ISGID):
raise unittest.SkipTest('No support for S_ISGID dir mode.')
# The os should apply S_ISGID from the parent dir for us, but
# this test need not depend on that behavior. Be explicit.
os.makedirs(path, mode | S_ISGID)
# http://bugs.python.org/issue14992
# Should not fail when the bit is already set.
os.makedirs(path, mode, exist_ok=True)
# remove the bit.
os.chmod(path, stat.S_IMODE(os.lstat(path).st_mode) & ~S_ISGID)
# May work even when the bit is not already set when demanded.
os.makedirs(path, mode | S_ISGID, exist_ok=True)
finally:
os.umask(old_mask)
def test_exist_ok_existing_regular_file(self):
base = support.TESTFN
path = os.path.join(support.TESTFN, 'dir1')
f = open(path, 'w')
f.write('abc')
f.close()
self.assertRaises(OSError, os.makedirs, path)
self.assertRaises(OSError, os.makedirs, path, exist_ok=False)
self.assertRaises(OSError, os.makedirs, path, exist_ok=True)
os.remove(path)
def tearDown(self):
path = os.path.join(support.TESTFN, 'dir1', 'dir2', 'dir3',
'dir4', 'dir5', 'dir6')
# If the tests failed, the bottom-most directory ('../dir6')
# may not have been created, so we look for the outermost directory
# that exists.
while not os.path.exists(path) and path != support.TESTFN:
path = os.path.dirname(path)
os.removedirs(path)
class RemoveDirsTests(unittest.TestCase):
def setUp(self):
os.makedirs(support.TESTFN)
def tearDown(self):
support.rmtree(support.TESTFN)
def test_remove_all(self):
dira = os.path.join(support.TESTFN, 'dira')
os.mkdir(dira)
dirb = os.path.join(dira, 'dirb')
os.mkdir(dirb)
os.removedirs(dirb)
self.assertFalse(os.path.exists(dirb))
self.assertFalse(os.path.exists(dira))
self.assertFalse(os.path.exists(support.TESTFN))
def test_remove_partial(self):
dira = os.path.join(support.TESTFN, 'dira')
os.mkdir(dira)
dirb = os.path.join(dira, 'dirb')
os.mkdir(dirb)
with open(os.path.join(dira, 'file.txt'), 'w') as f:
f.write('text')
os.removedirs(dirb)
self.assertFalse(os.path.exists(dirb))
self.assertTrue(os.path.exists(dira))
self.assertTrue(os.path.exists(support.TESTFN))
def test_remove_nothing(self):
dira = os.path.join(support.TESTFN, 'dira')
os.mkdir(dira)
dirb = os.path.join(dira, 'dirb')
os.mkdir(dirb)
with open(os.path.join(dirb, 'file.txt'), 'w') as f:
f.write('text')
with self.assertRaises(OSError):
os.removedirs(dirb)
self.assertTrue(os.path.exists(dirb))
self.assertTrue(os.path.exists(dira))
self.assertTrue(os.path.exists(support.TESTFN))
class DevNullTests(unittest.TestCase):
def test_devnull(self):
with open(os.devnull, 'wb') as f:
f.write(b'hello')
f.close()
with open(os.devnull, 'rb') as f:
self.assertEqual(f.read(), b'')
class URandomTests(unittest.TestCase):
def test_urandom_length(self):
self.assertEqual(len(os.urandom(0)), 0)
self.assertEqual(len(os.urandom(1)), 1)
self.assertEqual(len(os.urandom(10)), 10)
self.assertEqual(len(os.urandom(100)), 100)
self.assertEqual(len(os.urandom(1000)), 1000)
def test_urandom_value(self):
data1 = os.urandom(16)
data2 = os.urandom(16)
self.assertNotEqual(data1, data2)
def get_urandom_subprocess(self, count):
code = '\n'.join((
'import os, sys',
'data = os.urandom(%s)' % count,
'sys.stdout.buffer.write(data)',
'sys.stdout.buffer.flush()'))
out = assert_python_ok('-c', code)
stdout = out[1]
self.assertEqual(len(stdout), 16)
return stdout
def test_urandom_subprocess(self):
data1 = self.get_urandom_subprocess(16)
data2 = self.get_urandom_subprocess(16)
self.assertNotEqual(data1, data2)
HAVE_GETENTROPY = (sysconfig.get_config_var('HAVE_GETENTROPY') == 1)
@unittest.skipIf(HAVE_GETENTROPY,
"getentropy() does not use a file descriptor")
class URandomFDTests(unittest.TestCase):
@unittest.skipUnless(resource, "test requires the resource module")
def test_urandom_failure(self):
# Check urandom() failing when it is not able to open /dev/random.
# We spawn a new process to make the test more robust (if getrlimit()
# failed to restore the file descriptor limit after this, the whole
# test suite would crash; this actually happened on the OS X Tiger
# buildbot).
code = """if 1:
import errno
import os
import resource
soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (1, hard_limit))
try:
os.urandom(16)
except OSError as e:
assert e.errno == errno.EMFILE, e.errno
else:
raise AssertionError("OSError not raised")
"""
assert_python_ok('-c', code)
def test_urandom_fd_closed(self):
# Issue #21207: urandom() should reopen its fd to /dev/urandom if
# closed.
code = """if 1:
import os
import sys
os.urandom(4)
os.closerange(3, 256)
sys.stdout.buffer.write(os.urandom(4))
"""
rc, out, err = assert_python_ok('-Sc', code)
def test_urandom_fd_reopened(self):
# Issue #21207: urandom() should detect its fd to /dev/urandom
# changed to something else, and reopen it.
with open(support.TESTFN, 'wb') as f:
f.write(b"x" * 256)
self.addCleanup(os.unlink, support.TESTFN)
code = """if 1:
import os
import sys
os.urandom(4)
for fd in range(3, 256):
try:
os.close(fd)
except OSError:
pass
else:
# Found the urandom fd (XXX hopefully)
break
os.closerange(3, 256)
with open({TESTFN!r}, 'rb') as f:
os.dup2(f.fileno(), fd)
sys.stdout.buffer.write(os.urandom(4))
sys.stdout.buffer.write(os.urandom(4))
""".format(TESTFN=support.TESTFN)
rc, out, err = assert_python_ok('-Sc', code)
self.assertEqual(len(out), 8)
self.assertNotEqual(out[0:4], out[4:8])
rc, out2, err2 = assert_python_ok('-Sc', code)
self.assertEqual(len(out2), 8)
self.assertNotEqual(out2, out)
@contextlib.contextmanager
def _execvpe_mockup(defpath=None):
"""
Stubs out execv and execve functions when used as context manager.
Records exec calls. The mock execv and execve functions always raise an
exception as they would normally never return.
"""
# A list of tuples containing (function name, first arg, args)
# of calls to execv or execve that have been made.
calls = []
def mock_execv(name, *args):
calls.append(('execv', name, args))
raise RuntimeError("execv called")
def mock_execve(name, *args):
calls.append(('execve', name, args))
raise OSError(errno.ENOTDIR, "execve called")
try:
orig_execv = os.execv
orig_execve = os.execve
orig_defpath = os.defpath
os.execv = mock_execv
os.execve = mock_execve
if defpath is not None:
os.defpath = defpath
yield calls
finally:
os.execv = orig_execv
os.execve = orig_execve
os.defpath = orig_defpath
class ExecTests(unittest.TestCase):
@unittest.skipIf(USING_LINUXTHREADS,
"avoid triggering a linuxthreads bug: see issue #4970")
def test_execvpe_with_bad_program(self):
self.assertRaises(OSError, os.execvpe, 'no such app-',
['no such app-'], None)
def test_execvpe_with_bad_arglist(self):
self.assertRaises(ValueError, os.execvpe, 'notepad', [], None)
@unittest.skipUnless(hasattr(os, '_execvpe'),
"No internal os._execvpe function to test.")
def _test_internal_execvpe(self, test_type):
program_path = os.sep + 'absolutepath'
if test_type is bytes:
program = b'executable'
fullpath = os.path.join(os.fsencode(program_path), program)
native_fullpath = fullpath
arguments = [b'progname', 'arg1', 'arg2']
else:
program = 'executable'
arguments = ['progname', 'arg1', 'arg2']
fullpath = os.path.join(program_path, program)
if os.name != "nt":
native_fullpath = os.fsencode(fullpath)
else:
native_fullpath = fullpath
env = {'spam': 'beans'}
# test os._execvpe() with an absolute path
with _execvpe_mockup() as calls:
self.assertRaises(RuntimeError,
os._execvpe, fullpath, arguments)
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0], ('execv', fullpath, (arguments,)))
# test os._execvpe() with a relative path:
# os.get_exec_path() returns defpath
with _execvpe_mockup(defpath=program_path) as calls:
self.assertRaises(OSError,
os._execvpe, program, arguments, env=env)
self.assertEqual(len(calls), 1)
self.assertSequenceEqual(calls[0],
('execve', native_fullpath, (arguments, env)))
# test os._execvpe() with a relative path:
# os.get_exec_path() reads the 'PATH' variable
with _execvpe_mockup() as calls:
env_path = env.copy()
if test_type is bytes:
env_path[b'PATH'] = program_path
else:
env_path['PATH'] = program_path
self.assertRaises(OSError,
os._execvpe, program, arguments, env=env_path)
self.assertEqual(len(calls), 1)
self.assertSequenceEqual(calls[0],
('execve', native_fullpath, (arguments, env_path)))
def test_internal_execvpe_str(self):
self._test_internal_execvpe(str)
if os.name != "nt":
self._test_internal_execvpe(bytes)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32ErrorTests(unittest.TestCase):
def test_rename(self):
self.assertRaises(OSError, os.rename, support.TESTFN, support.TESTFN+".bak")
def test_remove(self):
self.assertRaises(OSError, os.remove, support.TESTFN)
def test_chdir(self):
self.assertRaises(OSError, os.chdir, support.TESTFN)
def test_mkdir(self):
f = open(support.TESTFN, "w")
try:
self.assertRaises(OSError, os.mkdir, support.TESTFN)
finally:
f.close()
os.unlink(support.TESTFN)
def test_utime(self):
self.assertRaises(OSError, os.utime, support.TESTFN, None)
def test_chmod(self):
self.assertRaises(OSError, os.chmod, support.TESTFN, 0)
class TestInvalidFD(unittest.TestCase):
singles = ["fchdir", "dup", "fdopen", "fdatasync", "fstat",
"fstatvfs", "fsync", "tcgetpgrp", "ttyname"]
#singles.append("close")
#We omit close because it doesn'r raise an exception on some platforms
def get_single(f):
def helper(self):
if hasattr(os, f):
self.check(getattr(os, f))
return helper
for f in singles:
locals()["test_"+f] = get_single(f)
def check(self, f, *args):
try:
f(support.make_bad_fd(), *args)
except OSError as e:
self.assertEqual(e.errno, errno.EBADF)
else:
self.fail("%r didn't raise a OSError with a bad file descriptor"
% f)
@unittest.skipUnless(hasattr(os, 'isatty'), 'test needs os.isatty()')
def test_isatty(self):
self.assertEqual(os.isatty(support.make_bad_fd()), False)
@unittest.skipUnless(hasattr(os, 'closerange'), 'test needs os.closerange()')
def test_closerange(self):
fd = support.make_bad_fd()
# Make sure none of the descriptors we are about to close are
# currently valid (issue 6542).
for i in range(10):
try: os.fstat(fd+i)
except OSError:
pass
else:
break
if i < 2:
raise unittest.SkipTest(
"Unable to acquire a range of invalid file descriptors")
self.assertEqual(os.closerange(fd, fd + i-1), None)
@unittest.skipUnless(hasattr(os, 'dup2'), 'test needs os.dup2()')
def test_dup2(self):
self.check(os.dup2, 20)
@unittest.skipUnless(hasattr(os, 'fchmod'), 'test needs os.fchmod()')
def test_fchmod(self):
self.check(os.fchmod, 0)
@unittest.skipUnless(hasattr(os, 'fchown'), 'test needs os.fchown()')
def test_fchown(self):
self.check(os.fchown, -1, -1)
@unittest.skipUnless(hasattr(os, 'fpathconf'), 'test needs os.fpathconf()')
def test_fpathconf(self):
self.check(os.pathconf, "PC_NAME_MAX")
self.check(os.fpathconf, "PC_NAME_MAX")
@unittest.skipUnless(hasattr(os, 'ftruncate'), 'test needs os.ftruncate()')
def test_ftruncate(self):
self.check(os.truncate, 0)
self.check(os.ftruncate, 0)
@unittest.skipUnless(hasattr(os, 'lseek'), 'test needs os.lseek()')
def test_lseek(self):
self.check(os.lseek, 0, 0)
@unittest.skipUnless(hasattr(os, 'read'), 'test needs os.read()')
def test_read(self):
self.check(os.read, 1)
@unittest.skipUnless(hasattr(os, 'readv'), 'test needs os.readv()')
def test_readv(self):
buf = bytearray(10)
self.check(os.readv, [buf])
@unittest.skipUnless(hasattr(os, 'tcsetpgrp'), 'test needs os.tcsetpgrp()')
def test_tcsetpgrpt(self):
self.check(os.tcsetpgrp, 0)
@unittest.skipUnless(hasattr(os, 'write'), 'test needs os.write()')
def test_write(self):
self.check(os.write, b" ")
@unittest.skipUnless(hasattr(os, 'writev'), 'test needs os.writev()')
def test_writev(self):
self.check(os.writev, [b'abc'])
class LinkTests(unittest.TestCase):
def setUp(self):
self.file1 = support.TESTFN
self.file2 = os.path.join(support.TESTFN + "2")
def tearDown(self):
for file in (self.file1, self.file2):
if os.path.exists(file):
os.unlink(file)
def _test_link(self, file1, file2):
with open(file1, "w") as f1:
f1.write("test")
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
os.link(file1, file2)
with open(file1, "r") as f1, open(file2, "r") as f2:
self.assertTrue(os.path.sameopenfile(f1.fileno(), f2.fileno()))
def test_link(self):
self._test_link(self.file1, self.file2)
def test_link_bytes(self):
self._test_link(bytes(self.file1, sys.getfilesystemencoding()),
bytes(self.file2, sys.getfilesystemencoding()))
def test_unicode_name(self):
try:
os.fsencode("\xf1")
except UnicodeError:
raise unittest.SkipTest("Unable to encode for this platform.")
self.file1 += "\xf1"
self.file2 = self.file1 + "2"
self._test_link(self.file1, self.file2)
@unittest.skipIf(sys.platform == "win32", "Posix specific tests")
class PosixUidGidTests(unittest.TestCase):
@unittest.skipUnless(hasattr(os, 'setuid'), 'test needs os.setuid()')
def test_setuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.setuid, 0)
self.assertRaises(OverflowError, os.setuid, 1<<32)
@unittest.skipUnless(hasattr(os, 'setgid'), 'test needs os.setgid()')
def test_setgid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setgid, 0)
self.assertRaises(OverflowError, os.setgid, 1<<32)
@unittest.skipUnless(hasattr(os, 'seteuid'), 'test needs os.seteuid()')
def test_seteuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.seteuid, 0)
self.assertRaises(OverflowError, os.seteuid, 1<<32)
@unittest.skipUnless(hasattr(os, 'setegid'), 'test needs os.setegid()')
def test_setegid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setegid, 0)
self.assertRaises(OverflowError, os.setegid, 1<<32)
@unittest.skipUnless(hasattr(os, 'setreuid'), 'test needs os.setreuid()')
def test_setreuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.setreuid, 0, 0)
self.assertRaises(OverflowError, os.setreuid, 1<<32, 0)
self.assertRaises(OverflowError, os.setreuid, 0, 1<<32)
@unittest.skipUnless(hasattr(os, 'setreuid'), 'test needs os.setreuid()')
def test_setreuid_neg1(self):
# Needs to accept -1. We run this in a subprocess to avoid
# altering the test runner's process state (issue8045).
subprocess.check_call([
sys.executable, '-c',
'import os,sys;os.setreuid(-1,-1);sys.exit(0)'])
@unittest.skipUnless(hasattr(os, 'setregid'), 'test needs os.setregid()')
def test_setregid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setregid, 0, 0)
self.assertRaises(OverflowError, os.setregid, 1<<32, 0)
self.assertRaises(OverflowError, os.setregid, 0, 1<<32)
@unittest.skipUnless(hasattr(os, 'setregid'), 'test needs os.setregid()')
def test_setregid_neg1(self):
# Needs to accept -1. We run this in a subprocess to avoid
# altering the test runner's process state (issue8045).
subprocess.check_call([
sys.executable, '-c',
'import os,sys;os.setregid(-1,-1);sys.exit(0)'])
@unittest.skipIf(sys.platform == "win32", "Posix specific tests")
class Pep383Tests(unittest.TestCase):
def setUp(self):
if support.TESTFN_UNENCODABLE:
self.dir = support.TESTFN_UNENCODABLE
elif support.TESTFN_NONASCII:
self.dir = support.TESTFN_NONASCII
else:
self.dir = support.TESTFN
self.bdir = os.fsencode(self.dir)
bytesfn = []
def add_filename(fn):
try:
fn = os.fsencode(fn)
except UnicodeEncodeError:
return
bytesfn.append(fn)
add_filename(support.TESTFN_UNICODE)
if support.TESTFN_UNENCODABLE:
add_filename(support.TESTFN_UNENCODABLE)
if support.TESTFN_NONASCII:
add_filename(support.TESTFN_NONASCII)
if not bytesfn:
self.skipTest("couldn't create any non-ascii filename")
self.unicodefn = set()
os.mkdir(self.dir)
try:
for fn in bytesfn:
support.create_empty_file(os.path.join(self.bdir, fn))
fn = os.fsdecode(fn)
if fn in self.unicodefn:
raise ValueError("duplicate filename")
self.unicodefn.add(fn)
except:
shutil.rmtree(self.dir)
raise
def tearDown(self):
shutil.rmtree(self.dir)
def test_listdir(self):
expected = self.unicodefn
found = set(os.listdir(self.dir))
self.assertEqual(found, expected)
# test listdir without arguments
current_directory = os.getcwd()
try:
os.chdir(os.sep)
self.assertEqual(set(os.listdir()), set(os.listdir(os.sep)))
finally:
os.chdir(current_directory)
def test_open(self):
for fn in self.unicodefn:
f = open(os.path.join(self.dir, fn), 'rb')
f.close()
@unittest.skipUnless(hasattr(os, 'statvfs'),
"need os.statvfs()")
def test_statvfs(self):
# issue #9645
for fn in self.unicodefn:
# should not fail with file not found error
fullname = os.path.join(self.dir, fn)
os.statvfs(fullname)
def test_stat(self):
for fn in self.unicodefn:
os.stat(os.path.join(self.dir, fn))
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32KillTests(unittest.TestCase):
def _kill(self, sig):
# Start sys.executable as a subprocess and communicate from the
# subprocess to the parent that the interpreter is ready. When it
# becomes ready, send *sig* via os.kill to the subprocess and check
# that the return code is equal to *sig*.
import ctypes
from ctypes import wintypes
import msvcrt
# Since we can't access the contents of the process' stdout until the
# process has exited, use PeekNamedPipe to see what's inside stdout
# without waiting. This is done so we can tell that the interpreter
# is started and running at a point where it could handle a signal.
PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
PeekNamedPipe.restype = wintypes.BOOL
PeekNamedPipe.argtypes = (wintypes.HANDLE, # Pipe handle
ctypes.POINTER(ctypes.c_char), # stdout buf
wintypes.DWORD, # Buffer size
ctypes.POINTER(wintypes.DWORD), # bytes read
ctypes.POINTER(wintypes.DWORD), # bytes avail
ctypes.POINTER(wintypes.DWORD)) # bytes left
msg = "running"
proc = subprocess.Popen([sys.executable, "-c",
"import sys;"
"sys.stdout.write('{}');"
"sys.stdout.flush();"
"input()".format(msg)],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
self.addCleanup(proc.stdout.close)
self.addCleanup(proc.stderr.close)
self.addCleanup(proc.stdin.close)
count, max = 0, 100
while count < max and proc.poll() is None:
# Create a string buffer to store the result of stdout from the pipe
buf = ctypes.create_string_buffer(len(msg))
# Obtain the text currently in proc.stdout
# Bytes read/avail/left are left as NULL and unused
rslt = PeekNamedPipe(msvcrt.get_osfhandle(proc.stdout.fileno()),
buf, ctypes.sizeof(buf), None, None, None)
self.assertNotEqual(rslt, 0, "PeekNamedPipe failed")
if buf.value:
self.assertEqual(msg, buf.value.decode())
break
time.sleep(0.1)
count += 1
else:
self.fail("Did not receive communication from the subprocess")
os.kill(proc.pid, sig)
self.assertEqual(proc.wait(), sig)
def test_kill_sigterm(self):
# SIGTERM doesn't mean anything special, but make sure it works
self._kill(signal.SIGTERM)
def test_kill_int(self):
# os.kill on Windows can take an int which gets set as the exit code
self._kill(100)
def _kill_with_event(self, event, name):
tagname = "test_os_%s" % uuid.uuid1()
m = mmap.mmap(-1, 1, tagname)
m[0] = 0
# Run a script which has console control handling enabled.
proc = subprocess.Popen([sys.executable,
os.path.join(os.path.dirname(__file__),
"win_console_handler.py"), tagname],
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
# Let the interpreter startup before we send signals. See #3137.
count, max = 0, 100
while count < max and proc.poll() is None:
if m[0] == 1:
break
time.sleep(0.1)
count += 1
else:
# Forcefully kill the process if we weren't able to signal it.
os.kill(proc.pid, signal.SIGINT)
self.fail("Subprocess didn't finish initialization")
os.kill(proc.pid, event)
# proc.send_signal(event) could also be done here.
# Allow time for the signal to be passed and the process to exit.
time.sleep(0.5)
if not proc.poll():
# Forcefully kill the process if we weren't able to signal it.
os.kill(proc.pid, signal.SIGINT)
self.fail("subprocess did not stop on {}".format(name))
@unittest.skip("subprocesses aren't inheriting CTRL+C property")
def test_CTRL_C_EVENT(self):
from ctypes import wintypes
import ctypes
# Make a NULL value by creating a pointer with no argument.
NULL = ctypes.POINTER(ctypes.c_int)()
SetConsoleCtrlHandler = ctypes.windll.kernel32.SetConsoleCtrlHandler
SetConsoleCtrlHandler.argtypes = (ctypes.POINTER(ctypes.c_int),
wintypes.BOOL)
SetConsoleCtrlHandler.restype = wintypes.BOOL
# Calling this with NULL and FALSE causes the calling process to
# handle CTRL+C, rather than ignore it. This property is inherited
# by subprocesses.
SetConsoleCtrlHandler(NULL, 0)
self._kill_with_event(signal.CTRL_C_EVENT, "CTRL_C_EVENT")
def test_CTRL_BREAK_EVENT(self):
self._kill_with_event(signal.CTRL_BREAK_EVENT, "CTRL_BREAK_EVENT")
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32ListdirTests(unittest.TestCase):
"""Test listdir on Windows."""
def setUp(self):
self.created_paths = []
for i in range(2):
dir_name = 'SUB%d' % i
dir_path = os.path.join(support.TESTFN, dir_name)
file_name = 'FILE%d' % i
file_path = os.path.join(support.TESTFN, file_name)
os.makedirs(dir_path)
with open(file_path, 'w') as f:
f.write("I'm %s and proud of it. Blame test_os.\n" % file_path)
self.created_paths.extend([dir_name, file_name])
self.created_paths.sort()
def tearDown(self):
shutil.rmtree(support.TESTFN)
def test_listdir_no_extended_path(self):
"""Test when the path is not an "extended" path."""
# unicode
self.assertEqual(
sorted(os.listdir(support.TESTFN)),
self.created_paths)
# bytes
self.assertEqual(
sorted(os.listdir(os.fsencode(support.TESTFN))),
[os.fsencode(path) for path in self.created_paths])
def test_listdir_extended_path(self):
"""Test when the path starts with '\\\\?\\'."""
# See: http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx#maxpath
# unicode
path = '\\\\?\\' + os.path.abspath(support.TESTFN)
self.assertEqual(
sorted(os.listdir(path)),
self.created_paths)
# bytes
path = b'\\\\?\\' + os.fsencode(os.path.abspath(support.TESTFN))
self.assertEqual(
sorted(os.listdir(path)),
[os.fsencode(path) for path in self.created_paths])
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
@support.skip_unless_symlink
class Win32SymlinkTests(unittest.TestCase):
filelink = 'filelinktest'
filelink_target = os.path.abspath(__file__)
dirlink = 'dirlinktest'
dirlink_target = os.path.dirname(filelink_target)
missing_link = 'missing link'
def setUp(self):
assert os.path.exists(self.dirlink_target)
assert os.path.exists(self.filelink_target)
assert not os.path.exists(self.dirlink)
assert not os.path.exists(self.filelink)
assert not os.path.exists(self.missing_link)
def tearDown(self):
if os.path.exists(self.filelink):
os.remove(self.filelink)
if os.path.exists(self.dirlink):
os.rmdir(self.dirlink)
if os.path.lexists(self.missing_link):
os.remove(self.missing_link)
def test_directory_link(self):
os.symlink(self.dirlink_target, self.dirlink)
self.assertTrue(os.path.exists(self.dirlink))
self.assertTrue(os.path.isdir(self.dirlink))
self.assertTrue(os.path.islink(self.dirlink))
self.check_stat(self.dirlink, self.dirlink_target)
def test_file_link(self):
os.symlink(self.filelink_target, self.filelink)
self.assertTrue(os.path.exists(self.filelink))
self.assertTrue(os.path.isfile(self.filelink))
self.assertTrue(os.path.islink(self.filelink))
self.check_stat(self.filelink, self.filelink_target)
def _create_missing_dir_link(self):
'Create a "directory" link to a non-existent target'
linkname = self.missing_link
if os.path.lexists(linkname):
os.remove(linkname)
target = r'c:\\target does not exist.29r3c740'
assert not os.path.exists(target)
target_is_dir = True
os.symlink(target, linkname, target_is_dir)
def test_remove_directory_link_to_missing_target(self):
self._create_missing_dir_link()
# For compatibility with Unix, os.remove will check the
# directory status and call RemoveDirectory if the symlink
# was created with target_is_dir==True.
os.remove(self.missing_link)
@unittest.skip("currently fails; consider for improvement")
def test_isdir_on_directory_link_to_missing_target(self):
self._create_missing_dir_link()
# consider having isdir return true for directory links
self.assertTrue(os.path.isdir(self.missing_link))
@unittest.skip("currently fails; consider for improvement")
def test_rmdir_on_directory_link_to_missing_target(self):
self._create_missing_dir_link()
# consider allowing rmdir to remove directory links
os.rmdir(self.missing_link)
def check_stat(self, link, target):
self.assertEqual(os.stat(link), os.stat(target))
self.assertNotEqual(os.lstat(link), os.stat(link))
bytes_link = os.fsencode(link)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.assertEqual(os.stat(bytes_link), os.stat(target))
self.assertNotEqual(os.lstat(bytes_link), os.stat(bytes_link))
def test_12084(self):
level1 = os.path.abspath(support.TESTFN)
level2 = os.path.join(level1, "level2")
level3 = os.path.join(level2, "level3")
try:
os.mkdir(level1)
os.mkdir(level2)
os.mkdir(level3)
file1 = os.path.abspath(os.path.join(level1, "file1"))
with open(file1, "w") as f:
f.write("file1")
orig_dir = os.getcwd()
try:
os.chdir(level2)
link = os.path.join(level2, "link")
os.symlink(os.path.relpath(file1), "link")
self.assertIn("link", os.listdir(os.getcwd()))
# Check os.stat calls from the same dir as the link
self.assertEqual(os.stat(file1), os.stat("link"))
# Check os.stat calls from a dir below the link
os.chdir(level1)
self.assertEqual(os.stat(file1),
os.stat(os.path.relpath(link)))
# Check os.stat calls from a dir above the link
os.chdir(level3)
self.assertEqual(os.stat(file1),
os.stat(os.path.relpath(link)))
finally:
os.chdir(orig_dir)
except OSError as err:
self.fail(err)
finally:
os.remove(file1)
shutil.rmtree(level1)
@support.skip_unless_symlink
class NonLocalSymlinkTests(unittest.TestCase):
def setUp(self):
"""
Create this structure:
base
\___ some_dir
"""
os.makedirs('base/some_dir')
def tearDown(self):
shutil.rmtree('base')
def test_directory_link_nonlocal(self):
"""
The symlink target should resolve relative to the link, not relative
to the current directory.
Then, link base/some_link -> base/some_dir and ensure that some_link
is resolved as a directory.
In issue13772, it was discovered that directory detection failed if
the symlink target was not specified relative to the current
directory, which was a defect in the implementation.
"""
src = os.path.join('base', 'some_link')
os.symlink('some_dir', src)
assert os.path.isdir(src)
class FSEncodingTests(unittest.TestCase):
def test_nop(self):
self.assertEqual(os.fsencode(b'abc\xff'), b'abc\xff')
self.assertEqual(os.fsdecode('abc\u0141'), 'abc\u0141')
def test_identity(self):
# assert fsdecode(fsencode(x)) == x
for fn in ('unicode\u0141', 'latin\xe9', 'ascii'):
try:
bytesfn = os.fsencode(fn)
except UnicodeEncodeError:
continue
self.assertEqual(os.fsdecode(bytesfn), fn)
class DeviceEncodingTests(unittest.TestCase):
def test_bad_fd(self):
# Return None when an fd doesn't actually exist.
self.assertIsNone(os.device_encoding(123456))
@unittest.skipUnless(os.isatty(0) and (sys.platform.startswith('win') or
(hasattr(locale, 'nl_langinfo') and hasattr(locale, 'CODESET'))),
'test requires a tty and either Windows or nl_langinfo(CODESET)')
def test_device_encoding(self):
encoding = os.device_encoding(0)
self.assertIsNotNone(encoding)
self.assertTrue(codecs.lookup(encoding))
class PidTests(unittest.TestCase):
@unittest.skipUnless(hasattr(os, 'getppid'), "test needs os.getppid")
def test_getppid(self):
p = subprocess.Popen([sys.executable, '-c',
'import os; print(os.getppid())'],
stdout=subprocess.PIPE)
stdout, _ = p.communicate()
# We are the parent of our subprocess
self.assertEqual(int(stdout), os.getpid())
# The introduction of this TestCase caused at least two different errors on
# *nix buildbots. Temporarily skip this to let the buildbots move along.
@unittest.skip("Skip due to platform/environment differences on *NIX buildbots")
@unittest.skipUnless(hasattr(os, 'getlogin'), "test needs os.getlogin")
class LoginTests(unittest.TestCase):
def test_getlogin(self):
user_name = os.getlogin()
self.assertNotEqual(len(user_name), 0)
@unittest.skipUnless(hasattr(os, 'getpriority') and hasattr(os, 'setpriority'),
"needs os.getpriority and os.setpriority")
class ProgramPriorityTests(unittest.TestCase):
"""Tests for os.getpriority() and os.setpriority()."""
def test_set_get_priority(self):
base = os.getpriority(os.PRIO_PROCESS, os.getpid())
os.setpriority(os.PRIO_PROCESS, os.getpid(), base + 1)
try:
new_prio = os.getpriority(os.PRIO_PROCESS, os.getpid())
if base >= 19 and new_prio <= 19:
raise unittest.SkipTest(
"unable to reliably test setpriority at current nice level of %s" % base)
else:
self.assertEqual(new_prio, base + 1)
finally:
try:
os.setpriority(os.PRIO_PROCESS, os.getpid(), base)
except OSError as err:
if err.errno != errno.EACCES:
raise
if threading is not None:
class SendfileTestServer(asyncore.dispatcher, threading.Thread):
class Handler(asynchat.async_chat):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.in_buffer = []
self.closed = False
self.push(b"220 ready\r\n")
def handle_read(self):
data = self.recv(4096)
self.in_buffer.append(data)
def get_data(self):
return b''.join(self.in_buffer)
def handle_close(self):
self.close()
self.closed = True
def handle_error(self):
raise
def __init__(self, address):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
self._active = False
self._active_lock = threading.Lock()
# --- public API
@property
def running(self):
return self._active
def start(self):
assert not self.running
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def stop(self):
assert self.running
self._active = False
self.join()
def wait(self):
# wait for handler connection to be closed, then stop the server
while not getattr(self.handler_instance, "closed", False):
time.sleep(0.001)
self.stop()
# --- internals
def run(self):
self._active = True
self.__flag.set()
while self._active and asyncore.socket_map:
self._active_lock.acquire()
asyncore.loop(timeout=0.001, count=1)
self._active_lock.release()
asyncore.close_all()
def handle_accept(self):
conn, addr = self.accept()
self.handler_instance = self.Handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
@unittest.skipUnless(threading is not None, "test needs threading module")
@unittest.skipUnless(hasattr(os, 'sendfile'), "test needs os.sendfile()")
class TestSendfile(unittest.TestCase):
DATA = b"12345abcde" * 16 * 1024 # 160 KB
SUPPORT_HEADERS_TRAILERS = not sys.platform.startswith("linux") and \
not sys.platform.startswith("solaris") and \
not sys.platform.startswith("sunos")
requires_headers_trailers = unittest.skipUnless(SUPPORT_HEADERS_TRAILERS,
'requires headers and trailers support')
@classmethod
def setUpClass(cls):
with open(support.TESTFN, "wb") as f:
f.write(cls.DATA)
@classmethod
def tearDownClass(cls):
support.unlink(support.TESTFN)
def setUp(self):
self.server = SendfileTestServer((support.HOST, 0))
self.server.start()
self.client = socket.socket()
self.client.connect((self.server.host, self.server.port))
self.client.settimeout(1)
# synchronize by waiting for "220 ready" response
self.client.recv(1024)
self.sockno = self.client.fileno()
self.file = open(support.TESTFN, 'rb')
self.fileno = self.file.fileno()
def tearDown(self):
self.file.close()
self.client.close()
if self.server.running:
self.server.stop()
def sendfile_wrapper(self, sock, file, offset, nbytes, headers=[], trailers=[]):
"""A higher level wrapper representing how an application is
supposed to use sendfile().
"""
while 1:
try:
if self.SUPPORT_HEADERS_TRAILERS:
return os.sendfile(sock, file, offset, nbytes, headers,
trailers)
else:
return os.sendfile(sock, file, offset, nbytes)
except OSError as err:
if err.errno == errno.ECONNRESET:
# disconnected
raise
elif err.errno in (errno.EAGAIN, errno.EBUSY):
# we have to retry send data
continue
else:
raise
def test_send_whole_file(self):
# normal send
total_sent = 0
offset = 0
nbytes = 4096
while total_sent < len(self.DATA):
sent = self.sendfile_wrapper(self.sockno, self.fileno, offset, nbytes)
if sent == 0:
break
offset += sent
total_sent += sent
self.assertTrue(sent <= nbytes)
self.assertEqual(offset, total_sent)
self.assertEqual(total_sent, len(self.DATA))
self.client.shutdown(socket.SHUT_RDWR)
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(len(data), len(self.DATA))
self.assertEqual(data, self.DATA)
def test_send_at_certain_offset(self):
# start sending a file at a certain offset
total_sent = 0
offset = len(self.DATA) // 2
must_send = len(self.DATA) - offset
nbytes = 4096
while total_sent < must_send:
sent = self.sendfile_wrapper(self.sockno, self.fileno, offset, nbytes)
if sent == 0:
break
offset += sent
total_sent += sent
self.assertTrue(sent <= nbytes)
self.client.shutdown(socket.SHUT_RDWR)
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
expected = self.DATA[len(self.DATA) // 2:]
self.assertEqual(total_sent, len(expected))
self.assertEqual(len(data), len(expected))
self.assertEqual(data, expected)
def test_offset_overflow(self):
# specify an offset > file size
offset = len(self.DATA) + 4096
try:
sent = os.sendfile(self.sockno, self.fileno, offset, 4096)
except OSError as e:
# Solaris can raise EINVAL if offset >= file length, ignore.
if e.errno != errno.EINVAL:
raise
else:
self.assertEqual(sent, 0)
self.client.shutdown(socket.SHUT_RDWR)
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(data, b'')
def test_invalid_offset(self):
with self.assertRaises(OSError) as cm:
os.sendfile(self.sockno, self.fileno, -1, 4096)
self.assertEqual(cm.exception.errno, errno.EINVAL)
# --- headers / trailers tests
@requires_headers_trailers
def test_headers(self):
total_sent = 0
sent = os.sendfile(self.sockno, self.fileno, 0, 4096,
headers=[b"x" * 512])
total_sent += sent
offset = 4096
nbytes = 4096
while 1:
sent = self.sendfile_wrapper(self.sockno, self.fileno,
offset, nbytes)
if sent == 0:
break
total_sent += sent
offset += sent
expected_data = b"x" * 512 + self.DATA
self.assertEqual(total_sent, len(expected_data))
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(hash(data), hash(expected_data))
@requires_headers_trailers
def test_trailers(self):
TESTFN2 = support.TESTFN + "2"
file_data = b"abcdef"
with open(TESTFN2, 'wb') as f:
f.write(file_data)
with open(TESTFN2, 'rb')as f:
self.addCleanup(os.remove, TESTFN2)
os.sendfile(self.sockno, f.fileno(), 0, len(file_data),
trailers=[b"1234"])
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(data, b"abcdef1234")
@requires_headers_trailers
@unittest.skipUnless(hasattr(os, 'SF_NODISKIO'),
'test needs os.SF_NODISKIO')
def test_flags(self):
try:
os.sendfile(self.sockno, self.fileno, 0, 4096,
flags=os.SF_NODISKIO)
except OSError as err:
if err.errno not in (errno.EBUSY, errno.EAGAIN):
raise
def supports_extended_attributes():
if not hasattr(os, "setxattr"):
return False
try:
with open(support.TESTFN, "wb") as fp:
try:
os.setxattr(fp.fileno(), b"user.test", b"")
except OSError:
return False
finally:
support.unlink(support.TESTFN)
# Kernels < 2.6.39 don't respect setxattr flags.
kernel_version = platform.release()
m = re.match("2.6.(\d{1,2})", kernel_version)
return m is None or int(m.group(1)) >= 39
@unittest.skipUnless(supports_extended_attributes(),
"no non-broken extended attribute support")
class ExtendedAttributeTests(unittest.TestCase):
def tearDown(self):
support.unlink(support.TESTFN)
def _check_xattrs_str(self, s, getxattr, setxattr, removexattr, listxattr, **kwargs):
fn = support.TESTFN
open(fn, "wb").close()
with self.assertRaises(OSError) as cm:
getxattr(fn, s("user.test"), **kwargs)
self.assertEqual(cm.exception.errno, errno.ENODATA)
init_xattr = listxattr(fn)
self.assertIsInstance(init_xattr, list)
setxattr(fn, s("user.test"), b"", **kwargs)
xattr = set(init_xattr)
xattr.add("user.test")
self.assertEqual(set(listxattr(fn)), xattr)
self.assertEqual(getxattr(fn, b"user.test", **kwargs), b"")
setxattr(fn, s("user.test"), b"hello", os.XATTR_REPLACE, **kwargs)
self.assertEqual(getxattr(fn, b"user.test", **kwargs), b"hello")
with self.assertRaises(OSError) as cm:
setxattr(fn, s("user.test"), b"bye", os.XATTR_CREATE, **kwargs)
self.assertEqual(cm.exception.errno, errno.EEXIST)
with self.assertRaises(OSError) as cm:
setxattr(fn, s("user.test2"), b"bye", os.XATTR_REPLACE, **kwargs)
self.assertEqual(cm.exception.errno, errno.ENODATA)
setxattr(fn, s("user.test2"), b"foo", os.XATTR_CREATE, **kwargs)
xattr.add("user.test2")
self.assertEqual(set(listxattr(fn)), xattr)
removexattr(fn, s("user.test"), **kwargs)
with self.assertRaises(OSError) as cm:
getxattr(fn, s("user.test"), **kwargs)
self.assertEqual(cm.exception.errno, errno.ENODATA)
xattr.remove("user.test")
self.assertEqual(set(listxattr(fn)), xattr)
self.assertEqual(getxattr(fn, s("user.test2"), **kwargs), b"foo")
setxattr(fn, s("user.test"), b"a"*1024, **kwargs)
self.assertEqual(getxattr(fn, s("user.test"), **kwargs), b"a"*1024)
removexattr(fn, s("user.test"), **kwargs)
many = sorted("user.test{}".format(i) for i in range(100))
for thing in many:
setxattr(fn, thing, b"x", **kwargs)
self.assertEqual(set(listxattr(fn)), set(init_xattr) | set(many))
def _check_xattrs(self, *args, **kwargs):
def make_bytes(s):
return bytes(s, "ascii")
self._check_xattrs_str(str, *args, **kwargs)
support.unlink(support.TESTFN)
self._check_xattrs_str(make_bytes, *args, **kwargs)
def test_simple(self):
self._check_xattrs(os.getxattr, os.setxattr, os.removexattr,
os.listxattr)
def test_lpath(self):
self._check_xattrs(os.getxattr, os.setxattr, os.removexattr,
os.listxattr, follow_symlinks=False)
def test_fds(self):
def getxattr(path, *args):
with open(path, "rb") as fp:
return os.getxattr(fp.fileno(), *args)
def setxattr(path, *args):
with open(path, "wb") as fp:
os.setxattr(fp.fileno(), *args)
def removexattr(path, *args):
with open(path, "wb") as fp:
os.removexattr(fp.fileno(), *args)
def listxattr(path, *args):
with open(path, "rb") as fp:
return os.listxattr(fp.fileno(), *args)
self._check_xattrs(getxattr, setxattr, removexattr, listxattr)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32DeprecatedBytesAPI(unittest.TestCase):
def test_deprecated(self):
import nt
filename = os.fsencode(support.TESTFN)
with warnings.catch_warnings():
warnings.simplefilter("error", DeprecationWarning)
for func, *args in (
(nt._getfullpathname, filename),
(nt._isdir, filename),
(os.access, filename, os.R_OK),
(os.chdir, filename),
(os.chmod, filename, 0o777),
(os.getcwdb,),
(os.link, filename, filename),
(os.listdir, filename),
(os.lstat, filename),
(os.mkdir, filename),
(os.open, filename, os.O_RDONLY),
(os.rename, filename, filename),
(os.rmdir, filename),
(os.startfile, filename),
(os.stat, filename),
(os.unlink, filename),
(os.utime, filename),
):
self.assertRaises(DeprecationWarning, func, *args)
@support.skip_unless_symlink
def test_symlink(self):
filename = os.fsencode(support.TESTFN)
with warnings.catch_warnings():
warnings.simplefilter("error", DeprecationWarning)
self.assertRaises(DeprecationWarning,
os.symlink, filename, filename)
@unittest.skipUnless(hasattr(os, 'get_terminal_size'), "requires os.get_terminal_size")
class TermsizeTests(unittest.TestCase):
def test_does_not_crash(self):
"""Check if get_terminal_size() returns a meaningful value.
There's no easy portable way to actually check the size of the
terminal, so let's check if it returns something sensible instead.
"""
try:
size = os.get_terminal_size()
except OSError as e:
if sys.platform == "win32" or e.errno in (errno.EINVAL, errno.ENOTTY):
# Under win32 a generic OSError can be thrown if the
# handle cannot be retrieved
self.skipTest("failed to query terminal size")
raise
self.assertGreaterEqual(size.columns, 0)
self.assertGreaterEqual(size.lines, 0)
def test_stty_match(self):
"""Check if stty returns the same results
stty actually tests stdin, so get_terminal_size is invoked on
stdin explicitly. If stty succeeded, then get_terminal_size()
should work too.
"""
try:
size = subprocess.check_output(['stty', 'size']).decode().split()
except (FileNotFoundError, subprocess.CalledProcessError):
self.skipTest("stty invocation failed")
expected = (int(size[1]), int(size[0])) # reversed order
try:
actual = os.get_terminal_size(sys.__stdin__.fileno())
except OSError as e:
if sys.platform == "win32" or e.errno in (errno.EINVAL, errno.ENOTTY):
# Under win32 a generic OSError can be thrown if the
# handle cannot be retrieved
self.skipTest("failed to query terminal size")
raise
self.assertEqual(expected, actual)
class OSErrorTests(unittest.TestCase):
def setUp(self):
class Str(str):
pass
self.bytes_filenames = []
self.unicode_filenames = []
if support.TESTFN_UNENCODABLE is not None:
decoded = support.TESTFN_UNENCODABLE
else:
decoded = support.TESTFN
self.unicode_filenames.append(decoded)
self.unicode_filenames.append(Str(decoded))
if support.TESTFN_UNDECODABLE is not None:
encoded = support.TESTFN_UNDECODABLE
else:
encoded = os.fsencode(support.TESTFN)
self.bytes_filenames.append(encoded)
self.bytes_filenames.append(memoryview(encoded))
self.filenames = self.bytes_filenames + self.unicode_filenames
def test_oserror_filename(self):
funcs = [
(self.filenames, os.chdir,),
(self.filenames, os.chmod, 0o777),
(self.filenames, os.lstat,),
(self.filenames, os.open, os.O_RDONLY),
(self.filenames, os.rmdir,),
(self.filenames, os.stat,),
(self.filenames, os.unlink,),
]
if sys.platform == "win32":
funcs.extend((
(self.bytes_filenames, os.rename, b"dst"),
(self.bytes_filenames, os.replace, b"dst"),
(self.unicode_filenames, os.rename, "dst"),
(self.unicode_filenames, os.replace, "dst"),
# Issue #16414: Don't test undecodable names with listdir()
# because of a Windows bug.
#
# With the ANSI code page 932, os.listdir(b'\xe7') return an
# empty list (instead of failing), whereas os.listdir(b'\xff')
# raises a FileNotFoundError. It looks like a Windows bug:
# b'\xe7' directory does not exist, FindFirstFileA(b'\xe7')
# fails with ERROR_FILE_NOT_FOUND (2), instead of
# ERROR_PATH_NOT_FOUND (3).
(self.unicode_filenames, os.listdir,),
))
else:
funcs.extend((
(self.filenames, os.listdir,),
(self.filenames, os.rename, "dst"),
(self.filenames, os.replace, "dst"),
))
if hasattr(os, "chown"):
funcs.append((self.filenames, os.chown, 0, 0))
if hasattr(os, "lchown"):
funcs.append((self.filenames, os.lchown, 0, 0))
if hasattr(os, "truncate"):
funcs.append((self.filenames, os.truncate, 0))
if hasattr(os, "chflags"):
funcs.append((self.filenames, os.chflags, 0))
if hasattr(os, "lchflags"):
funcs.append((self.filenames, os.lchflags, 0))
if hasattr(os, "chroot"):
funcs.append((self.filenames, os.chroot,))
if hasattr(os, "link"):
if sys.platform == "win32":
funcs.append((self.bytes_filenames, os.link, b"dst"))
funcs.append((self.unicode_filenames, os.link, "dst"))
else:
funcs.append((self.filenames, os.link, "dst"))
if hasattr(os, "listxattr"):
funcs.extend((
(self.filenames, os.listxattr,),
(self.filenames, os.getxattr, "user.test"),
(self.filenames, os.setxattr, "user.test", b'user'),
(self.filenames, os.removexattr, "user.test"),
))
if hasattr(os, "lchmod"):
funcs.append((self.filenames, os.lchmod, 0o777))
if hasattr(os, "readlink"):
if sys.platform == "win32":
funcs.append((self.unicode_filenames, os.readlink,))
else:
funcs.append((self.filenames, os.readlink,))
for filenames, func, *func_args in funcs:
for name in filenames:
try:
func(name, *func_args)
except OSError as err:
self.assertIs(err.filename, name)
else:
self.fail("No exception thrown by {}".format(func))
class CPUCountTests(unittest.TestCase):
def test_cpu_count(self):
cpus = os.cpu_count()
if cpus is not None:
self.assertIsInstance(cpus, int)
self.assertGreater(cpus, 0)
else:
self.skipTest("Could not determine the number of CPUs")
class FDInheritanceTests(unittest.TestCase):
def test_get_set_inheritable(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_inheritable(fd), False)
os.set_inheritable(fd, True)
self.assertEqual(os.get_inheritable(fd), True)
@unittest.skipIf(fcntl is None, "need fcntl")
def test_get_inheritable_cloexec(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_inheritable(fd), False)
# clear FD_CLOEXEC flag
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags &= ~fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
self.assertEqual(os.get_inheritable(fd), True)
@unittest.skipIf(fcntl is None, "need fcntl")
def test_set_inheritable_cloexec(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(fcntl.fcntl(fd, fcntl.F_GETFD) & fcntl.FD_CLOEXEC,
fcntl.FD_CLOEXEC)
os.set_inheritable(fd, True)
self.assertEqual(fcntl.fcntl(fd, fcntl.F_GETFD) & fcntl.FD_CLOEXEC,
0)
def test_open(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_inheritable(fd), False)
@unittest.skipUnless(hasattr(os, 'pipe'), "need os.pipe()")
def test_pipe(self):
rfd, wfd = os.pipe()
self.addCleanup(os.close, rfd)
self.addCleanup(os.close, wfd)
self.assertEqual(os.get_inheritable(rfd), False)
self.assertEqual(os.get_inheritable(wfd), False)
def test_dup(self):
fd1 = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd1)
fd2 = os.dup(fd1)
self.addCleanup(os.close, fd2)
self.assertEqual(os.get_inheritable(fd2), False)
@unittest.skipUnless(hasattr(os, 'dup2'), "need os.dup2()")
def test_dup2(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
# inheritable by default
fd2 = os.open(__file__, os.O_RDONLY)
try:
os.dup2(fd, fd2)
self.assertEqual(os.get_inheritable(fd2), True)
finally:
os.close(fd2)
# force non-inheritable
fd3 = os.open(__file__, os.O_RDONLY)
try:
os.dup2(fd, fd3, inheritable=False)
self.assertEqual(os.get_inheritable(fd3), False)
finally:
os.close(fd3)
@unittest.skipUnless(hasattr(os, 'openpty'), "need os.openpty()")
def test_openpty(self):
master_fd, slave_fd = os.openpty()
self.addCleanup(os.close, master_fd)
self.addCleanup(os.close, slave_fd)
self.assertEqual(os.get_inheritable(master_fd), False)
self.assertEqual(os.get_inheritable(slave_fd), False)
@support.reap_threads
def test_main():
support.run_unittest(
FileTests,
StatAttributeTests,
EnvironTests,
WalkTests,
FwalkTests,
MakedirTests,
DevNullTests,
URandomTests,
ExecTests,
Win32ErrorTests,
TestInvalidFD,
PosixUidGidTests,
Pep383Tests,
Win32KillTests,
Win32ListdirTests,
Win32SymlinkTests,
NonLocalSymlinkTests,
FSEncodingTests,
DeviceEncodingTests,
PidTests,
LoginTests,
LinkTests,
TestSendfile,
ProgramPriorityTests,
ExtendedAttributeTests,
Win32DeprecatedBytesAPI,
TermsizeTests,
OSErrorTests,
RemoveDirsTests,
CPUCountTests,
FDInheritanceTests,
)
if __name__ == "__main__":
test_main()
|
ingokegel/intellij-community
|
refs/heads/master
|
python/testData/refactoring/move/moveUnreferencedFunctionToUnimportableModule/after/src/src.py
|
12133432
| |
tzaffi/git-in-practice-repo
|
refs/heads/master
|
book/lib/python2.7/site-packages/django/contrib/gis/db/backends/postgis/__init__.py
|
12133432
| |
jgoclawski/django
|
refs/heads/master
|
tests/ordering/__init__.py
|
12133432
| |
cpausmit/IntelROCCS
|
refs/heads/master
|
CUADRnT/src/python/cuadrnt/data_analysis/rankings/generic.py
|
4
|
#!/usr/bin/env python2.7
"""
File : generic.py
Author : Bjorn Barrefors <bjorn dot peter dot barrefors AT cern dot ch>
Description: Generic class for all ranking algorithms
"""
# system modules
import logging
import datetime
import json
import math
import numpy as np
from sklearn.externals import joblib
# package modules
from cuadrnt.data_management.tools.sites import SiteManager
from cuadrnt.data_management.tools.datasets import DatasetManager
from cuadrnt.data_management.tools.popularity import PopularityManager
from cuadrnt.data_management.core.storage import StorageManager
class GenericRanking(object):
"""
Generic Ranking class
"""
def __init__(self, config=dict()):
self.logger = logging.getLogger(__name__)
self.config = config
self.sites = SiteManager(self.config)
self.datasets = DatasetManager(self.config)
self.popularity = PopularityManager(self.config)
self.storage = StorageManager(self.config)
self.max_replicas = int(config['rocker_board']['max_replicas'])
self.name = 'generic'
self.data_path = self.config['paths']['data']
self.data_tiers = config['tools']['valid_tiers'].split(',')
self.preprocessed_data = dict()
self.clf_trend = dict()
self.clf_avg = dict()
def predict_trend(self, features, data_tier):
"""
Predict trend based on features
"""
prediction = self.clf_trend[data_tier].predict(features)
return prediction[0]
def predict_avg(self, features, data_tier):
"""
Predict trend based on features
"""
prediction = self.clf_avg[data_tier].predict(features)
return prediction[0]
def train(self):
"""
Training classifier and regressor
"""
for data_tier in self.data_tiers:
fd = open(self.data_path + '/training_data_' + data_tier + '.json', 'r')
self.preprocessed_data[data_tier] = json.load(fd)
fd.close()
tot = len(self.preprocessed_data[data_tier]['features'])
p = int(math.ceil(tot*0.8))
training_features = np.array(self.preprocessed_data[data_tier]['features'][:p])
trend_training_classifications = np.array(self.preprocessed_data[data_tier]['trend_classifications'][:p])
avg_training_classifications = np.array(self.preprocessed_data[data_tier]['avg_classifications'][:p])
t1 = datetime.datetime.utcnow()
self.clf_trend[data_tier].fit(training_features, trend_training_classifications)
self.clf_avg[data_tier].fit(training_features, avg_training_classifications)
t2 = datetime.datetime.utcnow()
td = t2 - t1
self.logger.info('Training %s for data tier %s took %s', self.name, data_tier, str(td))
joblib.dump(self.clf_trend[data_tier], self.data_path + '/' + self.name + '_trend_' + data_tier + '.pkl')
joblib.dump(self.clf_avg[data_tier], self.data_path + '/' + self.name + '_avg_' + data_tier + '.pkl')
def test(self):
"""
Test accuracy/score of classifier and regressor
"""
for data_tier in self.data_tiers:
tot = len(self.preprocessed_data[data_tier]['features'])
p = int(math.floor(tot*0.2))
test_features = np.array(self.preprocessed_data[data_tier]['features'][p:])
trend_test_classifications = np.array(self.preprocessed_data[data_tier]['trend_classifications'][p:])
avg_test_classifications = np.array(self.preprocessed_data[data_tier]['avg_classifications'][p:])
accuracy_trend = self.clf_trend[data_tier].score(test_features, trend_test_classifications)
accuracy_avg = self.clf_avg[data_tier].score(test_features, avg_test_classifications)
self.logger.info('The accuracy of %s trend classifier for data tier %s is %.3f', self.name, data_tier, accuracy_trend)
self.logger.info('The accuracy of %s avg regressor for data tier %s is %.3f', self.name, data_tier, accuracy_avg)
|
sander76/home-assistant
|
refs/heads/dev
|
homeassistant/components/wolflink/sensor.py
|
5
|
"""The Wolf SmartSet sensors."""
from wolf_smartset.models import (
HoursParameter,
ListItemParameter,
Parameter,
PercentageParameter,
Pressure,
SimpleParameter,
Temperature,
)
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import (
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
PRESSURE_BAR,
TEMP_CELSIUS,
TIME_HOURS,
)
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import COORDINATOR, DEVICE_ID, DOMAIN, PARAMETERS, STATES
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up all entries for Wolf Platform."""
coordinator = hass.data[DOMAIN][config_entry.entry_id][COORDINATOR]
parameters = hass.data[DOMAIN][config_entry.entry_id][PARAMETERS]
device_id = hass.data[DOMAIN][config_entry.entry_id][DEVICE_ID]
entities = []
for parameter in parameters:
if isinstance(parameter, Temperature):
entities.append(WolfLinkTemperature(coordinator, parameter, device_id))
if isinstance(parameter, Pressure):
entities.append(WolfLinkPressure(coordinator, parameter, device_id))
if isinstance(parameter, PercentageParameter):
entities.append(WolfLinkPercentage(coordinator, parameter, device_id))
if isinstance(parameter, ListItemParameter):
entities.append(WolfLinkState(coordinator, parameter, device_id))
if isinstance(parameter, HoursParameter):
entities.append(WolfLinkHours(coordinator, parameter, device_id))
if isinstance(parameter, SimpleParameter):
entities.append(WolfLinkSensor(coordinator, parameter, device_id))
async_add_entities(entities, True)
class WolfLinkSensor(CoordinatorEntity, SensorEntity):
"""Base class for all Wolf entities."""
def __init__(self, coordinator, wolf_object: Parameter, device_id):
"""Initialize."""
super().__init__(coordinator)
self.wolf_object = wolf_object
self.device_id = device_id
self._state = None
@property
def name(self):
"""Return the name."""
return f"{self.wolf_object.name}"
@property
def state(self):
"""Return the state. Wolf Client is returning only changed values so we need to store old value here."""
if self.wolf_object.value_id in self.coordinator.data:
self._state = self.coordinator.data[self.wolf_object.value_id]
return self._state
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {
"parameter_id": self.wolf_object.parameter_id,
"value_id": self.wolf_object.value_id,
"parent": self.wolf_object.parent,
}
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return f"{self.device_id}:{self.wolf_object.parameter_id}"
class WolfLinkHours(WolfLinkSensor):
"""Class for hour based entities."""
@property
def icon(self):
"""Icon to display in the front Aend."""
return "mdi:clock"
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return TIME_HOURS
class WolfLinkTemperature(WolfLinkSensor):
"""Class for temperature based entities."""
@property
def device_class(self):
"""Return the device_class."""
return DEVICE_CLASS_TEMPERATURE
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return TEMP_CELSIUS
class WolfLinkPressure(WolfLinkSensor):
"""Class for pressure based entities."""
@property
def device_class(self):
"""Return the device_class."""
return DEVICE_CLASS_PRESSURE
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return PRESSURE_BAR
class WolfLinkPercentage(WolfLinkSensor):
"""Class for percentage based entities."""
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self.wolf_object.unit
class WolfLinkState(WolfLinkSensor):
"""Class for entities which has defined list of state."""
@property
def device_class(self):
"""Return the device class."""
return "wolflink__state"
@property
def state(self):
"""Return the state converting with supported values."""
state = super().state
resolved_state = [
item for item in self.wolf_object.items if item.value == int(state)
]
if resolved_state:
resolved_name = resolved_state[0].name
return STATES.get(resolved_name, resolved_name)
return state
|
andreif/django
|
refs/heads/master
|
tests/urlpatterns_reverse/included_named_urls2.py
|
452
|
from django.conf.urls import url
from .views import empty_view
urlpatterns = [
url(r'^$', empty_view, name="named-url5"),
url(r'^extra/(?P<extra>\w+)/$', empty_view, name="named-url6"),
url(r'^(?P<one>[0-9]+)|(?P<two>[0-9]+)/$', empty_view),
]
|
throwable-one/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.3/tests/regressiontests/comment_tests/models.py
|
102
|
"""
Comments may be attached to any object. See the comment documentation for
more information.
"""
from django.db import models
from django.test import TestCase
class Author(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
def __str__(self):
return '%s %s' % (self.first_name, self.last_name)
class Article(models.Model):
author = models.ForeignKey(Author)
headline = models.CharField(max_length=100)
def __str__(self):
return self.headline
class Entry(models.Model):
title = models.CharField(max_length=250)
body = models.TextField()
pub_date = models.DateField()
enable_comments = models.BooleanField()
def __str__(self):
return self.title
class Book(models.Model):
dewey_decimal = models.DecimalField(primary_key = True, decimal_places=2, max_digits=5)
|
himmih/cluedo
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/lint.py
|
500
|
from __future__ import absolute_import, division, unicode_literals
from . import _base
from ..constants import cdataElements, rcdataElements, voidElements
from ..constants import spaceCharacters
spaceCharacters = "".join(spaceCharacters)
class LintError(Exception):
pass
class Filter(_base.Filter):
def __iter__(self):
open_elements = []
contentModelFlag = "PCDATA"
for token in _base.Filter.__iter__(self):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
name = token["name"]
if contentModelFlag != "PCDATA":
raise LintError("StartTag not in PCDATA content model flag: %(tag)s" % {"tag": name})
if not isinstance(name, str):
raise LintError("Tag name is not a string: %(tag)r" % {"tag": name})
if not name:
raise LintError("Empty tag name")
if type == "StartTag" and name in voidElements:
raise LintError("Void element reported as StartTag token: %(tag)s" % {"tag": name})
elif type == "EmptyTag" and name not in voidElements:
raise LintError("Non-void element reported as EmptyTag token: %(tag)s" % {"tag": token["name"]})
if type == "StartTag":
open_elements.append(name)
for name, value in token["data"]:
if not isinstance(name, str):
raise LintError("Attribute name is not a string: %(name)r" % {"name": name})
if not name:
raise LintError("Empty attribute name")
if not isinstance(value, str):
raise LintError("Attribute value is not a string: %(value)r" % {"value": value})
if name in cdataElements:
contentModelFlag = "CDATA"
elif name in rcdataElements:
contentModelFlag = "RCDATA"
elif name == "plaintext":
contentModelFlag = "PLAINTEXT"
elif type == "EndTag":
name = token["name"]
if not isinstance(name, str):
raise LintError("Tag name is not a string: %(tag)r" % {"tag": name})
if not name:
raise LintError("Empty tag name")
if name in voidElements:
raise LintError("Void element reported as EndTag token: %(tag)s" % {"tag": name})
start_name = open_elements.pop()
if start_name != name:
raise LintError("EndTag (%(end)s) does not match StartTag (%(start)s)" % {"end": name, "start": start_name})
contentModelFlag = "PCDATA"
elif type == "Comment":
if contentModelFlag != "PCDATA":
raise LintError("Comment not in PCDATA content model flag")
elif type in ("Characters", "SpaceCharacters"):
data = token["data"]
if not isinstance(data, str):
raise LintError("Attribute name is not a string: %(name)r" % {"name": data})
if not data:
raise LintError("%(type)s token with empty data" % {"type": type})
if type == "SpaceCharacters":
data = data.strip(spaceCharacters)
if data:
raise LintError("Non-space character(s) found in SpaceCharacters token: %(token)r" % {"token": data})
elif type == "Doctype":
name = token["name"]
if contentModelFlag != "PCDATA":
raise LintError("Doctype not in PCDATA content model flag: %(name)s" % {"name": name})
if not isinstance(name, str):
raise LintError("Tag name is not a string: %(tag)r" % {"tag": name})
# XXX: what to do with token["data"] ?
elif type in ("ParseError", "SerializeError"):
pass
else:
raise LintError("Unknown token type: %(type)s" % {"type": type})
yield token
|
adityacs/ansible
|
refs/heads/devel
|
test/runner/lib/cover.py
|
43
|
"""Code coverage utilities."""
from __future__ import absolute_import, print_function
import os
import re
from lib.target import (
walk_module_targets,
)
from lib.util import (
display,
ApplicationError,
EnvironmentConfig,
run_command,
)
from lib.executor import (
Delegate,
install_command_requirements,
)
COVERAGE_DIR = 'test/results/coverage'
COVERAGE_FILE = os.path.join(COVERAGE_DIR, 'coverage')
def command_coverage_combine(args):
"""Patch paths in coverage files and merge into a single file.
:type args: CoverageConfig
"""
coverage = initialize_coverage(args)
modules = dict((t.module, t.path) for t in list(walk_module_targets()))
coverage_files = [os.path.join(COVERAGE_DIR, f) for f in os.listdir(COVERAGE_DIR)
if f.startswith('coverage') and f != 'coverage']
arc_data = {}
ansible_path = os.path.abspath('lib/ansible/') + '/'
root_path = os.getcwd() + '/'
counter = 0
for coverage_file in coverage_files:
counter += 1
display.info('[%4d/%4d] %s' % (counter, len(coverage_files), coverage_file), verbosity=2)
original = coverage.CoverageData()
if os.path.getsize(coverage_file) == 0:
display.warning('Empty coverage file: %s' % coverage_file)
continue
try:
original.read_file(coverage_file)
except Exception as ex: # pylint: disable=locally-disabled, broad-except
display.error(str(ex))
continue
for filename in original.measured_files():
arcs = set(original.arcs(filename))
if '/ansible_modlib.zip/ansible/' in filename:
new_name = re.sub('^.*/ansible_modlib.zip/ansible/', ansible_path, filename)
display.info('%s -> %s' % (filename, new_name), verbosity=3)
filename = new_name
elif '/ansible_module_' in filename:
module = re.sub('^.*/ansible_module_(?P<module>.*).py$', '\\g<module>', filename)
new_name = os.path.abspath(modules[module])
display.info('%s -> %s' % (filename, new_name), verbosity=3)
filename = new_name
elif filename.startswith('/root/ansible/'):
new_name = re.sub('^/.*?/ansible/', root_path, filename)
display.info('%s -> %s' % (filename, new_name), verbosity=3)
filename = new_name
if filename not in arc_data:
arc_data[filename] = set()
arc_data[filename].update(arcs)
updated = coverage.CoverageData()
for filename in arc_data:
if not os.path.isfile(filename):
display.warning('Invalid coverage path: %s' % filename)
continue
updated.add_arcs({filename: list(arc_data[filename])})
if not args.explain:
updated.write_file(COVERAGE_FILE)
def command_coverage_report(args):
"""
:type args: CoverageConfig
"""
command_coverage_combine(args)
run_command(args, ['coverage', 'report'])
def command_coverage_html(args):
"""
:type args: CoverageConfig
"""
command_coverage_combine(args)
run_command(args, ['coverage', 'html', '-d', 'test/results/reports/coverage'])
def command_coverage_xml(args):
"""
:type args: CoverageConfig
"""
command_coverage_combine(args)
run_command(args, ['coverage', 'xml', '-o', 'test/results/reports/coverage.xml'])
def command_coverage_erase(args):
"""
:type args: CoverageConfig
"""
initialize_coverage(args)
for name in os.listdir(COVERAGE_DIR):
if not name.startswith('coverage'):
continue
path = os.path.join(COVERAGE_DIR, name)
if not args.explain:
os.remove(path)
def initialize_coverage(args):
"""
:type args: CoverageConfig
:rtype: coverage
"""
if args.delegate:
raise Delegate()
if args.requirements:
install_command_requirements(args)
try:
import coverage
except ImportError:
coverage = None
if not coverage:
raise ApplicationError('You must install the "coverage" python module to use this command.')
return coverage
class CoverageConfig(EnvironmentConfig):
"""Configuration for the coverage command."""
def __init__(self, args):
"""
:type args: any
"""
super(CoverageConfig, self).__init__(args, 'coverage')
|
conjurinc/api-python
|
refs/heads/master
|
conjur/layer.py
|
1
|
#
# Copyright (C) 2014 Conjur Inc
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from conjur.util import urlescape, authzid
from conjur.exceptions import ConjurException
class Layer(object):
def __init__(self, api, id, attrs=None):
self.api = api
self.id = id
self._attrs = {} if attrs is None else attrs
def add_host(self, host):
hostid = authzid(host, 'role', with_account=False)
self.api.post(self._hosts_url(), data={'hostid': hostid})
def remove_host(self, host):
hostid = authzid(host, 'role')
self.api.delete(self._host_url(hostid))
def exists(self):
resp = self.api.get(self._url(), check_errors=False)
if resp.status_code == 200:
return True
if resp.status_code == 404:
return False
raise ConjurException("Request Failed: {0}".format(resp.status_code))
def _url(self):
return "{0}/layers/{1}".format(self.api.config.core_url,
urlescape(self.id))
def _hosts_url(self):
return "{0}/hosts".format(self._url())
def _host_url(self, host_id):
return "{0}/{1}".format(self._hosts_url(), urlescape(host_id))
def _fetch(self):
self._attrs = self.api.get(self._url()).json()
def __getattr__(self, item):
if self._attrs is None:
self._fetch()
try:
return self._attrs[item]
except KeyError:
raise AttributeError(item)
|
Zhongqilong/mykbengineer
|
refs/heads/master
|
kbe/src/lib/python/Lib/test/final_a.py
|
103
|
"""
Fodder for module finalization tests in test_module.
"""
import shutil
import test.final_b
x = 'a'
class C:
def __del__(self):
# Inspect module globals and builtins
print("x =", x)
print("final_b.x =", test.final_b.x)
print("shutil.rmtree =", getattr(shutil.rmtree, '__name__', None))
print("len =", getattr(len, '__name__', None))
c = C()
_underscored = C()
|
dbmi-pitt/DIKB-Micropublication
|
refs/heads/master
|
scripts/mp-scripts/Bio/SeqIO/InsdcIO.py
|
1
|
# Copyright 2007 by Peter Cock. All rights reserved.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
from Bio.GenBank.Scanner import GenBankScanner, EmblScanner
from Bio.Alphabet import generic_protein
# NOTE
# ====
# The "brains" for parsing GenBank and EMBL files (and any
# other flat file variants from the INSDC in future) is in
# Bio.GenBank.Scanner (plus the _FeatureConsumer in Bio.GenBank)
#
# See also
# ========
# International Nucleotide Sequence Database Collaboration
# http://www.insdc.org/
#
# GenBank
# http://www.ncbi.nlm.nih.gov/Genbank/
#
# EMBL Nucleotide Sequence Database
# http://www.ebi.ac.uk/embl/
#
# DDBJ (DNA Data Bank of Japan)
# http://www.ddbj.nig.ac.jp/
def GenBankIterator(handle) :
"""Breaks up a Genbank file into SeqRecord objects
Every section from the LOCUS line to the terminating // becomes
a single SeqRecord with associated annotation and features.
Note that for genomes or chromosomes, there is typically only
one record."""
#This calls a generator function:
return GenBankScanner(debug=0).parse_records(handle)
def EmblIterator(handle) :
"""Breaks up an EMBL file into SeqRecord objects
Every section from the LOCUS line to the terminating // becomes
a single SeqRecord with associated annotation and features.
Note that for genomes or chromosomes, there is typically only
one record."""
#This calls a generator function:
return EmblScanner(debug=0).parse_records(handle)
def GenBankCdsFeatureIterator(handle, alphabet=generic_protein) :
"""Breaks up a Genbank file into SeqRecord objects for each CDS feature
Every section from the LOCUS line to the terminating // can contain
many CDS features. These are returned as with the stated amino acid
translation sequence (if given).
"""
#This calls a generator function:
return GenBankScanner(debug=0).parse_cds_features(handle, alphabet)
def EmblCdsFeatureIterator(handle, alphabet=generic_protein) :
"""Breaks up a EMBL file into SeqRecord objects for each CDS feature
Every section from the LOCUS line to the terminating // can contain
many CDS features. These are returned as with the stated amino acid
translation sequence (if given).
"""
#This calls a generator function:
return EmblScanner(debug=0).parse_cds_features(handle, alphabet)
|
tzoiker/gensim
|
refs/heads/develop
|
gensim/test/test_doc2vec.py
|
4
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated tests for checking transformation algorithms (the models package).
"""
from __future__ import with_statement
import logging
import unittest
import os
import tempfile
from six.moves import zip as izip
from collections import namedtuple
from testfixtures import log_capture
import numpy as np
from gensim import utils, matutils
from gensim.models import doc2vec
module_path = os.path.dirname(__file__) # needed because sample data files are located in the same folder
datapath = lambda fname: os.path.join(module_path, 'test_data', fname)
class DocsLeeCorpus(object):
def __init__(self, string_tags=False):
self.string_tags = string_tags
def _tag(self, i):
return i if not self.string_tags else '_*%d' % i
def __iter__(self):
with open(datapath('lee_background.cor')) as f:
for i, line in enumerate(f):
yield doc2vec.TaggedDocument(utils.simple_preprocess(line), [self._tag(i)])
list_corpus = list(DocsLeeCorpus())
raw_sentences = [
['human', 'interface', 'computer'],
['survey', 'user', 'computer', 'system', 'response', 'time'],
['eps', 'user', 'interface', 'system'],
['system', 'human', 'system', 'eps'],
['user', 'response', 'time'],
['trees'],
['graph', 'trees'],
['graph', 'minors', 'trees'],
['graph', 'minors', 'survey']
]
sentences = [doc2vec.TaggedDocument(words, [i]) for i, words in enumerate(raw_sentences)]
def testfile():
# temporary data will be stored to this file
return os.path.join(tempfile.gettempdir(), 'gensim_doc2vec.tst')
class TestDoc2VecModel(unittest.TestCase):
def test_persistence(self):
"""Test storing/loading the entire model."""
model = doc2vec.Doc2Vec(DocsLeeCorpus(), min_count=1)
model.save(testfile())
self.models_equal(model, doc2vec.Doc2Vec.load(testfile()))
def test_load_mmap(self):
"""Test storing/loading the entire model."""
model = doc2vec.Doc2Vec(sentences, min_count=1)
# test storing the internal arrays into separate files
model.save(testfile(), sep_limit=0)
self.models_equal(model, doc2vec.Doc2Vec.load(testfile()))
# make sure mmaping the arrays back works, too
self.models_equal(model, doc2vec.Doc2Vec.load(testfile(), mmap='r'))
def test_int_doctags(self):
"""Test doc2vec doctag alternatives"""
corpus = DocsLeeCorpus()
model = doc2vec.Doc2Vec(min_count=1)
model.build_vocab(corpus)
self.assertEqual(len(model.docvecs.doctag_syn0), 300)
self.assertEqual(model.docvecs[0].shape, (300,))
self.assertRaises(KeyError, model.__getitem__, '_*0')
def test_missing_string_doctag(self):
"""Test doc2vec doctag alternatives"""
corpus = list(DocsLeeCorpus(True))
# force duplicated tags
corpus = corpus[0:10] + corpus
model = doc2vec.Doc2Vec(min_count=1)
model.build_vocab(corpus)
self.assertRaises(KeyError, model.docvecs.__getitem__, 'not_a_tag')
def test_string_doctags(self):
"""Test doc2vec doctag alternatives"""
corpus = list(DocsLeeCorpus(True))
# force duplicated tags
corpus = corpus[0:10] + corpus
model = doc2vec.Doc2Vec(min_count=1)
model.build_vocab(corpus)
self.assertEqual(len(model.docvecs.doctag_syn0), 300)
self.assertEqual(model.docvecs[0].shape, (300,))
self.assertEqual(model.docvecs['_*0'].shape, (300,))
self.assertTrue(all(model.docvecs['_*0'] == model.docvecs[0]))
self.assertTrue(max(d.offset for d in model.docvecs.doctags.values()) < len(model.docvecs.doctags))
self.assertTrue(max(model.docvecs._int_index(str_key) for str_key in model.docvecs.doctags.keys()) < len(model.docvecs.doctag_syn0))
# verify docvecs.most_similar() returns string doctags rather than indexes
self.assertEqual(model.docvecs.offset2doctag[0], model.docvecs.most_similar([model.docvecs[0]])[0][0])
def test_empty_errors(self):
# no input => "RuntimeError: you must first build vocabulary before training the model"
self.assertRaises(RuntimeError, doc2vec.Doc2Vec, [])
# input not empty, but rather completely filtered out
self.assertRaises(RuntimeError, doc2vec.Doc2Vec, list_corpus, min_count=10000)
def test_similarity_unseen_docs(self):
"""Test similarity of out of training sentences"""
rome_str = ['rome', 'italy']
car_str = ['car']
corpus = list(DocsLeeCorpus(True))
model = doc2vec.Doc2Vec(min_count=1)
model.build_vocab(corpus)
self.assertTrue(model.docvecs.similarity_unseen_docs(model, rome_str, rome_str) > model.docvecs.similarity_unseen_docs(model, rome_str, car_str))
def model_sanity(self, model):
"""Any non-trivial model on DocsLeeCorpus can pass these sanity checks"""
fire1 = 0 # doc 0 sydney fires
fire2 = 8 # doc 8 sydney fires
tennis1 = 6 # doc 6 tennis
# inferred vector should be top10 close to bulk-trained one
doc0_inferred = model.infer_vector(list(DocsLeeCorpus())[0].words)
sims_to_infer = model.docvecs.most_similar([doc0_inferred], topn=len(model.docvecs))
f_rank = [docid for docid, sim in sims_to_infer].index(fire1)
self.assertLess(f_rank, 10)
# fire2 should be top30 close to fire1
sims = model.docvecs.most_similar(fire1, topn=len(model.docvecs))
f2_rank = [docid for docid, sim in sims].index(fire2)
self.assertLess(f2_rank, 30)
# same sims should appear in lookup by vec as by index
doc0_vec = model.docvecs[fire1]
sims2 = model.docvecs.most_similar(positive=[doc0_vec], topn=21)
sims2 = [(id, sim) for id, sim in sims2 if id != fire1] # ignore the doc itself
sims = sims[:20]
self.assertEqual(list(zip(*sims))[0], list(zip(*sims2))[0]) # same doc ids
self.assertTrue(np.allclose(list(zip(*sims))[1], list(zip(*sims2))[1])) # close-enough dists
# tennis doc should be out-of-place among fire news
self.assertEqual(model.docvecs.doesnt_match([fire1, tennis1, fire2]), tennis1)
# fire docs should be closer than fire-tennis
self.assertTrue(model.docvecs.similarity(fire1, fire2) > model.docvecs.similarity(fire1, tennis1))
def test_training(self):
"""Test doc2vec training."""
corpus = DocsLeeCorpus()
model = doc2vec.Doc2Vec(size=100, min_count=2, iter=20)
model.build_vocab(corpus)
self.assertEqual(model.docvecs.doctag_syn0.shape, (300, 100))
model.train(corpus)
self.model_sanity(model)
# build vocab and train in one step; must be the same as above
model2 = doc2vec.Doc2Vec(corpus, size=100, min_count=2, iter=20)
self.models_equal(model, model2)
def test_dbow_hs(self):
"""Test DBOW doc2vec training."""
model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=1, negative=0, min_count=2, iter=20)
self.model_sanity(model)
def test_dmm_hs(self):
"""Test DM/mean doc2vec training."""
model = doc2vec.Doc2Vec(list_corpus, dm=1, dm_mean=1, size=24, window=4, hs=1, negative=0,
alpha=0.05, min_count=2, iter=20)
self.model_sanity(model)
def test_dms_hs(self):
"""Test DM/sum doc2vec training."""
model = doc2vec.Doc2Vec(list_corpus, dm=1, dm_mean=0, size=24, window=4, hs=1, negative=0,
alpha=0.05, min_count=2, iter=20)
self.model_sanity(model)
def test_dmc_hs(self):
"""Test DM/concatenate doc2vec training."""
model = doc2vec.Doc2Vec(list_corpus, dm=1, dm_concat=1, size=24, window=4, hs=1, negative=0,
alpha=0.05, min_count=2, iter=20)
self.model_sanity(model)
def test_dbow_neg(self):
"""Test DBOW doc2vec training."""
model = doc2vec.Doc2Vec(list_corpus, dm=0, hs=0, negative=10, min_count=2, iter=20)
self.model_sanity(model)
def test_dmm_neg(self):
"""Test DM/mean doc2vec training."""
model = doc2vec.Doc2Vec(list_corpus, dm=1, dm_mean=1, size=24, window=4, hs=0, negative=10,
alpha=0.05, min_count=2, iter=20)
self.model_sanity(model)
def test_dms_neg(self):
"""Test DM/sum doc2vec training."""
model = doc2vec.Doc2Vec(list_corpus, dm=1, dm_mean=0, size=24, window=4, hs=0, negative=10,
alpha=0.05, min_count=2, iter=20)
self.model_sanity(model)
def test_dmc_neg(self):
"""Test DM/concatenate doc2vec training."""
model = doc2vec.Doc2Vec(list_corpus, dm=1, dm_concat=1, size=24, window=4, hs=0, negative=10,
alpha=0.05, min_count=2, iter=20)
self.model_sanity(model)
def test_parallel(self):
"""Test doc2vec parallel training."""
if doc2vec.FAST_VERSION < 0: # don't test the plain NumPy version for parallelism (too slow)
return
corpus = utils.RepeatCorpus(DocsLeeCorpus(), 10000)
for workers in [2, 4]:
model = doc2vec.Doc2Vec(corpus, workers=workers)
self.model_sanity(model)
def test_deterministic_hs(self):
"""Test doc2vec results identical with identical RNG seed."""
# hs
model = doc2vec.Doc2Vec(DocsLeeCorpus(), seed=42, workers=1)
model2 = doc2vec.Doc2Vec(DocsLeeCorpus(), seed=42, workers=1)
self.models_equal(model, model2)
def test_deterministic_neg(self):
"""Test doc2vec results identical with identical RNG seed."""
# neg
model = doc2vec.Doc2Vec(DocsLeeCorpus(), hs=0, negative=3, seed=42, workers=1)
model2 = doc2vec.Doc2Vec(DocsLeeCorpus(), hs=0, negative=3, seed=42, workers=1)
self.models_equal(model, model2)
def test_deterministic_dmc(self):
"""Test doc2vec results identical with identical RNG seed."""
# bigger, dmc
model = doc2vec.Doc2Vec(DocsLeeCorpus(), dm=1, dm_concat=1, size=24, window=4, hs=1, negative=3,
seed=42, workers=1)
model2 = doc2vec.Doc2Vec(DocsLeeCorpus(), dm=1, dm_concat=1, size=24, window=4, hs=1, negative=3,
seed=42, workers=1)
self.models_equal(model, model2)
def test_mixed_tag_types(self):
"""Ensure alternating int/string tags don't share indexes in doctag_syn0"""
mixed_tag_corpus = [doc2vec.TaggedDocument(words, [i, words[0]]) for i, words in enumerate(raw_sentences)]
model = doc2vec.Doc2Vec()
model.build_vocab(mixed_tag_corpus)
expected_length = len(sentences) + len(model.docvecs.doctags) # 9 sentences, 7 unique first tokens
self.assertEquals(len(model.docvecs.doctag_syn0), expected_length)
def models_equal(self, model, model2):
# check words/hidden-weights
self.assertEqual(len(model.vocab), len(model2.vocab))
self.assertTrue(np.allclose(model.syn0, model2.syn0))
if model.hs:
self.assertTrue(np.allclose(model.syn1, model2.syn1))
if model.negative:
self.assertTrue(np.allclose(model.syn1neg, model2.syn1neg))
# check docvecs
self.assertEqual(len(model.docvecs.doctags), len(model2.docvecs.doctags))
self.assertEqual(len(model.docvecs.offset2doctag), len(model2.docvecs.offset2doctag))
self.assertTrue(np.allclose(model.docvecs.doctag_syn0, model2.docvecs.doctag_syn0))
@log_capture()
def testBuildVocabWarning(self, l):
"""Test if logger warning is raised on non-ideal input to a doc2vec model"""
raw_sentences = ['human', 'machine']
sentences = [doc2vec.TaggedDocument(words, [i]) for i, words in enumerate(raw_sentences)]
model = doc2vec.Doc2Vec()
model.build_vocab(sentences)
warning = "Each 'words' should be a list of words (usually unicode strings)."
self.assertTrue(warning in str(l))
@log_capture()
def testTrainWarning(self, l):
"""Test if warning is raised if alpha rises during subsequent calls to train()"""
raw_sentences = [['human'],
['graph', 'trees']]
sentences = [doc2vec.TaggedDocument(words, [i]) for i, words in enumerate(raw_sentences)]
model = doc2vec.Doc2Vec(alpha=0.025, min_alpha=0.025, min_count=1, workers=8, size=5)
model.build_vocab(sentences)
for epoch in range(10):
model.train(sentences)
model.alpha -= 0.002
model.min_alpha = model.alpha
if epoch == 5:
model.alpha += 0.05
warning = "Effective 'alpha' higher than previous training cycles"
self.assertTrue(warning in str(l))
#endclass TestDoc2VecModel
if not hasattr(TestDoc2VecModel, 'assertLess'):
# workaround for python 2.6
def assertLess(self, a, b, msg=None):
self.assertTrue(a < b, msg="%s is not less than %s" % (a, b))
setattr(TestDoc2VecModel, 'assertLess', assertLess)
# following code is useful for reproducing paragraph-vectors paper sentiment experiments
class ConcatenatedDoc2Vec(object):
"""
Concatenation of multiple models for reproducing the Paragraph Vectors paper.
Models must have exactly-matching vocabulary and document IDs. (Models should
be trained separately; this wrapper just returns concatenated results.)
"""
def __init__(self, models):
self.models = models
if hasattr(models[0], 'docvecs'):
self.docvecs = ConcatenatedDocvecs([model.docvecs for model in models])
def __getitem__(self, token):
return np.concatenate([model[token] for model in self.models])
def infer_vector(self, document, alpha=0.1, min_alpha=0.0001, steps=5):
return np.concatenate([model.infer_vector(document, alpha, min_alpha, steps) for model in self.models])
def train(self, ignored):
pass # train subcomponents individually
class ConcatenatedDocvecs(object):
def __init__(self, models):
self.models = models
def __getitem__(self, token):
return np.concatenate([model[token] for model in self.models])
SentimentDocument = namedtuple('SentimentDocument', 'words tags split sentiment')
def read_su_sentiment_rotten_tomatoes(dirname, lowercase=True):
"""
Read and return documents from the Stanford Sentiment Treebank
corpus (Rotten Tomatoes reviews), from http://nlp.Stanford.edu/sentiment/
Initialize the corpus from a given directory, where
http://nlp.stanford.edu/~socherr/stanfordSentimentTreebank.zip
has been expanded. It's not too big, so compose entirely into memory.
"""
logging.info("loading corpus from %s" % dirname)
# many mangled chars in sentences (datasetSentences.txt)
chars_sst_mangled = ['à', 'á', 'â', 'ã', 'æ', 'ç', 'è', 'é', 'í',
'í', 'ï', 'ñ', 'ó', 'ô', 'ö', 'û', 'ü']
sentence_fixups = [(char.encode('utf-8').decode('latin1'), char) for char in chars_sst_mangled]
# more junk, and the replace necessary for sentence-phrase consistency
sentence_fixups.extend([
('Â', ''),
('\xa0', ' '),
('-LRB-', '('),
('-RRB-', ')'),
])
# only this junk in phrases (dictionary.txt)
phrase_fixups = [('\xa0', ' ')]
# sentence_id and split are only positive for the full sentences
# read sentences to temp {sentence -> (id,split) dict, to correlate with dictionary.txt
info_by_sentence = {}
with open(os.path.join(dirname, 'datasetSentences.txt'), 'r') as sentences:
with open(os.path.join(dirname, 'datasetSplit.txt'), 'r') as splits:
next(sentences) # legend
next(splits) # legend
for sentence_line, split_line in izip(sentences, splits):
(id, text) = sentence_line.split('\t')
id = int(id)
text = text.rstrip()
for junk, fix in sentence_fixups:
text = text.replace(junk, fix)
(id2, split_i) = split_line.split(',')
assert id == int(id2)
if text not in info_by_sentence: # discard duplicates
info_by_sentence[text] = (id, int(split_i))
# read all phrase text
phrases = [None] * 239232 # known size of phrases
with open(os.path.join(dirname, 'dictionary.txt'), 'r') as phrase_lines:
for line in phrase_lines:
(text, id) = line.split('|')
for junk, fix in phrase_fixups:
text = text.replace(junk, fix)
phrases[int(id)] = text.rstrip() # for 1st pass just string
SentimentPhrase = namedtuple('SentimentPhrase', SentimentDocument._fields + ('sentence_id',))
# add sentiment labels, correlate with sentences
with open(os.path.join(dirname, 'sentiment_labels.txt'), 'r') as sentiments:
next(sentiments) # legend
for line in sentiments:
(id, sentiment) = line.split('|')
id = int(id)
sentiment = float(sentiment)
text = phrases[id]
words = text.split()
if lowercase:
words = [word.lower() for word in words]
(sentence_id, split_i) = info_by_sentence.get(text, (None, 0))
split = [None, 'train', 'test', 'dev'][split_i]
phrases[id] = SentimentPhrase(words, [id], split, sentiment, sentence_id)
assert len([phrase for phrase in phrases if phrase.sentence_id is not None]) == len(info_by_sentence) # all
# counts don't match 8544, 2210, 1101 because 13 TRAIN and 1 DEV sentences are duplicates
assert len([phrase for phrase in phrases if phrase.split == 'train']) == 8531 # 'train'
assert len([phrase for phrase in phrases if phrase.split == 'test']) == 2210 # 'test'
assert len([phrase for phrase in phrases if phrase.split == 'dev']) == 1100 # 'dev'
logging.info("loaded corpus with %i sentences and %i phrases from %s",
len(info_by_sentence), len(phrases), dirname)
return phrases
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
logging.info("using optimization %s", doc2vec.FAST_VERSION)
unittest.main()
|
gauribhoite/personfinder
|
refs/heads/master
|
env/site-packages/pygments/styles/emacs.py
|
135
|
# -*- coding: utf-8 -*-
"""
pygments.styles.emacs
~~~~~~~~~~~~~~~~~~~~~
A highlighting style for Pygments, inspired by Emacs.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class EmacsStyle(Style):
"""
The default style (inspired by Emacs 22).
"""
background_color = "#f8f8f8"
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "italic #008800",
Comment.Preproc: "noitalic",
Comment.Special: "noitalic bold",
Keyword: "bold #AA22FF",
Keyword.Pseudo: "nobold",
Keyword.Type: "bold #00BB00",
Operator: "#666666",
Operator.Word: "bold #AA22FF",
Name.Builtin: "#AA22FF",
Name.Function: "#00A000",
Name.Class: "#0000FF",
Name.Namespace: "bold #0000FF",
Name.Exception: "bold #D2413A",
Name.Variable: "#B8860B",
Name.Constant: "#880000",
Name.Label: "#A0A000",
Name.Entity: "bold #999999",
Name.Attribute: "#BB4444",
Name.Tag: "bold #008000",
Name.Decorator: "#AA22FF",
String: "#BB4444",
String.Doc: "italic",
String.Interpol: "bold #BB6688",
String.Escape: "bold #BB6622",
String.Regex: "#BB6688",
String.Symbol: "#B8860B",
String.Other: "#008000",
Number: "#666666",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #000080",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "border:#FF0000"
}
|
Yusa95/numpy
|
refs/heads/master
|
numpy/core/tests/test_errstate.py
|
146
|
from __future__ import division, absolute_import, print_function
import platform
import numpy as np
from numpy.testing import TestCase, assert_, run_module_suite, dec
class TestErrstate(TestCase):
@dec.skipif(platform.machine() == "armv5tel", "See gh-413.")
def test_invalid(self):
with np.errstate(all='raise', under='ignore'):
a = -np.arange(3)
# This should work
with np.errstate(invalid='ignore'):
np.sqrt(a)
# While this should fail!
try:
np.sqrt(a)
except FloatingPointError:
pass
else:
self.fail("Did not raise an invalid error")
def test_divide(self):
with np.errstate(all='raise', under='ignore'):
a = -np.arange(3)
# This should work
with np.errstate(divide='ignore'):
a // 0
# While this should fail!
try:
a // 0
except FloatingPointError:
pass
else:
self.fail("Did not raise divide by zero error")
def test_errcall(self):
def foo(*args):
print(args)
olderrcall = np.geterrcall()
with np.errstate(call=foo):
assert_(np.geterrcall() is foo, 'call is not foo')
with np.errstate(call=None):
assert_(np.geterrcall() is None, 'call is not None')
assert_(np.geterrcall() is olderrcall, 'call is not olderrcall')
if __name__ == "__main__":
run_module_suite()
|
lancezlin/pylearn2
|
refs/heads/master
|
pylearn2/utils/image.py
|
39
|
"""
Utility functions for working with images.
"""
import logging
import numpy as np
plt = None
axes = None
from theano.compat.six.moves import xrange
from theano.compat.six import string_types
import warnings
try:
import matplotlib.pyplot as plt
import matplotlib.axes
except (RuntimeError, ImportError, TypeError) as matplotlib_exception:
warnings.warn("Unable to import matplotlib. Some features unavailable. "
"Original exception: " + str(matplotlib_exception))
import os
try:
from PIL import Image
except ImportError:
Image = None
from pylearn2.utils import string_utils as string
from pylearn2.utils.exc import reraise_as
from tempfile import mkstemp
from multiprocessing import Process
import subprocess
logger = logging.getLogger(__name__)
def ensure_Image():
"""Makes sure Image has been imported from PIL"""
global Image
if Image is None:
raise RuntimeError("You are trying to use PIL-dependent functionality"
" but don't have PIL installed.")
def imview(*args, **kwargs):
"""
A matplotlib-based image viewer command,
wrapping `matplotlib.pyplot.imshow` but behaving more
sensibly.
Parameters
----------
figure : TODO
TODO: write parameters section using decorators to inherit
the matplotlib docstring
Notes
-----
Parameters are identical to `matplotlib.pyplot.imshow`
but this behaves somewhat differently:
* By default, it creates a new figure (unless a
`figure` keyword argument is supplied.
* It modifies the axes of that figure to use the
full frame, without ticks or tick labels.
* It turns on `nearest` interpolation by default
(i.e., it does not antialias pixel data). This
can be overridden with the `interpolation`
argument as in `imshow`.
All other arguments and keyword arguments are passed
on to `imshow`.`
"""
if 'figure' not in kwargs:
f = plt.figure()
else:
f = kwargs['figure']
new_ax = matplotlib.axes.Axes(f,
[0, 0, 1, 1],
xticks=[],
yticks=[],
frame_on=False)
f.delaxes(f.gca())
f.add_axes(new_ax)
if len(args) < 5 and 'interpolation' not in kwargs:
kwargs['interpolation'] = 'nearest'
plt.imshow(*args, **kwargs)
def imview_async(*args, **kwargs):
"""
A version of `imview` that forks a separate process and
immediately shows the image.
Parameters
----------
window_title : str
TODO: writeme with decorators to inherit the other imviews'
docstrings
Notes
-----
Supports the `window_title` keyword argument to cope with
the title always being 'Figure 1'.
Returns the `multiprocessing.Process` handle.
"""
if 'figure' in kwargs:
raise ValueError("passing a figure argument not supported")
def fork_image_viewer():
f = plt.figure()
kwargs['figure'] = f
imview(*args, **kwargs)
if 'window_title' in kwargs:
f.set_window_title(kwargs['window_title'])
plt.show()
p = Process(None, fork_image_viewer)
p.start()
return p
def show(image):
"""
.. todo::
WRITEME
Parameters
----------
image : PIL Image object or ndarray
If ndarray, integer formats are assumed to use 0-255
and float formats are assumed to use 0-1
"""
viewer_command = string.preprocess('${PYLEARN2_VIEWER_COMMAND}')
if viewer_command == 'inline':
return imview(image)
if hasattr(image, '__array__'):
# do some shape checking because PIL just raises a tuple indexing error
# that doesn't make it very clear what the problem is
if len(image.shape) < 2 or len(image.shape) > 3:
raise ValueError('image must have either 2 or 3 dimensions but its'
' shape is ' + str(image.shape))
# The below is a temporary workaround that prevents us from crashing
# 3rd party image viewers such as eog by writing out overly large
# images.
# In the long run we should determine if this is a bug in PIL when
# producing
# such images or a bug in eog and determine a proper fix.
# Since this is hopefully just a short term workaround the
# constants below are not included in the interface to the
# function, so that 3rd party code won't start passing them.
max_height = 4096
max_width = 4096
# Display separate warnings for each direction, since it's
# common to crop only one.
if image.shape[0] > max_height:
image = image[0:max_height, :, :]
warnings.warn("Cropping image to smaller height to avoid crashing "
"the viewer program.")
if image.shape[0] > max_width:
image = image[:, 0:max_width, :]
warnings.warn("Cropping the image to a smaller width to avoid "
"crashing the viewer program.")
# This ends the workaround
if image.dtype == 'int8':
image = np.cast['uint8'](image)
elif str(image.dtype).startswith('float'):
# don't use *=, we don't want to modify the input array
image = image * 255.
image = np.cast['uint8'](image)
# PIL is too stupid to handle single-channel arrays
if len(image.shape) == 3 and image.shape[2] == 1:
image = image[:, :, 0]
try:
ensure_Image()
image = Image.fromarray(image)
except TypeError:
reraise_as(TypeError("PIL issued TypeError on ndarray of shape " +
str(image.shape) + " and dtype " +
str(image.dtype)))
# Create a temporary file with the suffix '.png'.
fd, name = mkstemp(suffix='.png')
os.close(fd)
# Note:
# Although we can use tempfile.NamedTemporaryFile() to create
# a temporary file, the function should be used with care.
#
# In Python earlier than 2.7, a temporary file created by the
# function will be deleted just after the file is closed.
# We can re-use the name of the temporary file, but there is an
# instant where a file with the name does not exist in the file
# system before we re-use the name. This may cause a race
# condition.
#
# In Python 2.7 or later, tempfile.NamedTemporaryFile() has
# the 'delete' argument which can control whether a temporary
# file will be automatically deleted or not. With the argument,
# the above race condition can be avoided.
#
image.save(name)
if os.name == 'nt':
subprocess.Popen(viewer_command + ' ' + name + ' && del ' + name,
shell=True)
else:
subprocess.Popen(viewer_command + ' ' + name + ' ; rm ' + name,
shell=True)
def pil_from_ndarray(ndarray):
"""
Converts an ndarray to a PIL image.
Parameters
----------
ndarray : ndarray
An ndarray containing an image.
Returns
-------
pil : PIL Image
A PIL Image containing the image.
"""
try:
if ndarray.dtype == 'float32' or ndarray.dtype == 'float64':
assert ndarray.min() >= 0.0
assert ndarray.max() <= 1.0
ndarray = np.cast['uint8'](ndarray * 255)
if len(ndarray.shape) == 3 and ndarray.shape[2] == 1:
ndarray = ndarray[:, :, 0]
ensure_Image()
rval = Image.fromarray(ndarray)
return rval
except Exception as e:
logger.exception('original exception: ')
logger.exception(e)
logger.exception('ndarray.dtype: {0}'.format(ndarray.dtype))
logger.exception('ndarray.shape: {0}'.format(ndarray.shape))
raise
assert False
def ndarray_from_pil(pil, dtype='uint8'):
"""
Converts a PIL Image to an ndarray.
Parameters
----------
pil : PIL Image
An image represented as a PIL Image object
dtype : str
The dtype of ndarray to create
Returns
-------
ndarray : ndarray
The image as an ndarray.
"""
rval = np.asarray(pil)
if dtype != rval.dtype:
rval = np.cast[dtype](rval)
if str(dtype).startswith('float'):
rval /= 255.
if len(rval.shape) == 2:
rval = rval.reshape(rval.shape[0], rval.shape[1], 1)
return rval
def rescale(image, shape):
"""
Scales image to be no larger than shape. PIL might give you
unexpected results beyond that.
Parameters
----------
image : WRITEME
shape : WRITEME
Returns
-------
WRITEME
"""
assert len(image.shape) == 3 # rows, cols, channels
assert len(shape) == 2 # rows, cols
i = pil_from_ndarray(image)
ensure_Image()
i.thumbnail([shape[1], shape[0]], Image.ANTIALIAS)
rval = ndarray_from_pil(i, dtype=image.dtype)
return rval
resize = rescale
def fit_inside(image, shape):
"""
Scales image down to fit inside shape preserves proportions of image
Parameters
----------
image : WRITEME
shape : WRITEME
Returns
-------
WRITEME
"""
assert len(image.shape) == 3 # rows, cols, channels
assert len(shape) == 2 # rows, cols
if image.shape[0] <= shape[0] and image.shape[1] <= shape[1]:
return image.copy()
row_ratio = float(image.shape[0]) / float(shape[0])
col_ratio = float(image.shape[1]) / float(shape[1])
if row_ratio > col_ratio:
target_shape = [shape[0], min(image.shape[1] / row_ratio, shape[1])]
else:
target_shape = [min(image.shape[0] / col_ratio, shape[0]), shape[1]]
assert target_shape[0] <= shape[0]
assert target_shape[1] <= shape[1]
assert target_shape[0] == shape[0] or target_shape[1] == shape[1]
rval = rescale(image, target_shape)
return rval
def letterbox(image, shape):
"""
Pads image with black letterboxing to bring image.shape up to shape
Parameters
----------
image : WRITEME
shape : WRITEME
Returns
-------
WRITEME
"""
assert len(image.shape) == 3 # rows, cols, channels
assert len(shape) == 2 # rows, cols
assert image.shape[0] <= shape[0]
assert image.shape[1] <= shape[1]
if image.shape[0] == shape[0] and image.shape[1] == shape[1]:
return image.copy()
rval = np.zeros((shape[0], shape[1], image.shape[2]), dtype=image.dtype)
rstart = (shape[0] - image.shape[0]) / 2
cstart = (shape[1] - image.shape[1]) / 2
rend = rstart + image.shape[0]
cend = cstart + image.shape[1]
rval[rstart:rend, cstart:cend] = image
return rval
def make_letterboxed_thumbnail(image, shape):
"""
Scales image down to shape. Preserves proportions of image, introduces
black letterboxing if necessary.
Parameters
----------
image : WRITEME
shape : WRITEME
Returns
-------
WRITEME
"""
assert len(image.shape) == 3
assert len(shape) == 2
shrunk = fit_inside(image, shape)
letterboxed = letterbox(shrunk, shape)
return letterboxed
def load(filepath, rescale_image=True, dtype='float64'):
"""
Load an image from a file.
Parameters
----------
filepath : str
Path to the image file to load
rescale_image : bool
Default value: True
If True, returned images have pixel values in [0, 1]. Otherwise,
values are in [0, 255].
dtype: str
The dtype to use for the returned value
Returns
-------
img : numpy ndarray
An array containing the image that was in the file.
"""
assert isinstance(filepath, string_types)
if not rescale_image and dtype == 'uint8':
ensure_Image()
rval = np.asarray(Image.open(filepath))
assert rval.dtype == 'uint8'
return rval
s = 1.0
if rescale_image:
s = 255.
try:
ensure_Image()
rval = Image.open(filepath)
except Exception:
reraise_as(Exception("Could not open " + filepath))
numpy_rval = np.array(rval)
msg = ("Tried to load an image, got an array with %d"
" dimensions. Expected 2 or 3."
"This may indicate a mildly corrupted image file. Try "
"converting it to a different image format with a different "
"editor like gimp or imagemagic. Sometimes these programs are "
"more robust to minor corruption than PIL and will emit a "
"correctly formatted image in the new format.")
if numpy_rval.ndim not in [2, 3]:
logger.error(dir(rval))
logger.error(rval)
logger.error(rval.size)
rval.show()
raise AssertionError(msg % numpy_rval.ndim)
rval = numpy_rval
rval = np.cast[dtype](rval) / s
if rval.ndim == 2:
rval = rval.reshape(rval.shape[0], rval.shape[1], 1)
if rval.ndim != 3:
raise AssertionError("Something went wrong opening " +
filepath + '. Resulting shape is ' +
str(rval.shape) +
" (it's meant to have 3 dimensions by now)")
return rval
def save(filepath, ndarray):
"""
Saves an image to a file.
Parameters
----------
filepath : str
The path to write the file to.
ndarray : ndarray
An array containing the image to be saved.
"""
pil_from_ndarray(ndarray).save(filepath)
def scale_to_unit_interval(ndar, eps=1e-8):
"""
Scales all values in the ndarray ndar to be between 0 and 1
Parameters
----------
ndar : WRITEME
eps : WRITEME
Returns
-------
WRITEME
"""
ndar = ndar.copy()
ndar -= ndar.min()
ndar *= 1.0 / (ndar.max() + eps)
return ndar
def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),
scale_rows_to_unit_interval=True,
output_pixel_vals=True):
"""
Transform an array with one flattened image per row, into an array in
which images are reshaped and layed out like tiles on a floor.
This function is useful for visualizing datasets whose rows are images,
and also columns of matrices for transforming those rows
(such as the first layer of a neural net).
Parameters
----------
x : numpy.ndarray
2-d ndarray or 4 tuple of 2-d ndarrays or None for channels,
in which every row is a flattened image.
shape : 2-tuple of ints
The first component is the height of each image,
the second component is the width.
tile_shape : 2-tuple of ints
The number of images to tile in (row, columns) form.
scale_rows_to_unit_interval : bool
Whether or not the values need to be before being plotted to [0, 1].
output_pixel_vals : bool
Whether or not the output should be pixel values (int8) or floats.
Returns
-------
y : 2d-ndarray
The return value has the same dtype as X, and is suitable for
viewing as an image with PIL.Image.fromarray.
"""
assert len(img_shape) == 2
assert len(tile_shape) == 2
assert len(tile_spacing) == 2
# The expression below can be re-written in a more C style as
# follows :
#
# out_shape = [0,0]
# out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -
# tile_spacing[0]
# out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -
# tile_spacing[1]
out_shape = [(ishp + tsp) * tshp - tsp for ishp, tshp, tsp
in zip(img_shape, tile_shape, tile_spacing)]
if isinstance(X, tuple):
assert len(X) == 4
# Create an output np ndarray to store the image
if output_pixel_vals:
out_array = np.zeros((out_shape[0], out_shape[1], 4),
dtype='uint8')
else:
out_array = np.zeros((out_shape[0], out_shape[1], 4),
dtype=X.dtype)
# colors default to 0, alpha defaults to 1 (opaque)
if output_pixel_vals:
channel_defaults = [0, 0, 0, 255]
else:
channel_defaults = [0., 0., 0., 1.]
for i in xrange(4):
if X[i] is None:
# if channel is None, fill it with zeros of the correct
# dtype
dt = out_array.dtype
if output_pixel_vals:
dt = 'uint8'
out_array[:, :, i] = np.zeros(out_shape, dtype=dt) + \
channel_defaults[i]
else:
# use a recurrent call to compute the channel and store it
# in the output
out_array[:, :, i] = tile_raster_images(
X[i], img_shape, tile_shape, tile_spacing,
scale_rows_to_unit_interval, output_pixel_vals)
return out_array
else:
# if we are dealing with only one channel
H, W = img_shape
Hs, Ws = tile_spacing
# generate a matrix to store the output
dt = X.dtype
if output_pixel_vals:
dt = 'uint8'
out_array = np.zeros(out_shape, dtype=dt)
for tile_row in xrange(tile_shape[0]):
for tile_col in xrange(tile_shape[1]):
if tile_row * tile_shape[1] + tile_col < X.shape[0]:
this_x = X[tile_row * tile_shape[1] + tile_col]
if scale_rows_to_unit_interval:
# if we should scale values to be between 0 and 1
# do this by calling the `scale_to_unit_interval`
# function
this_img = scale_to_unit_interval(
this_x.reshape(img_shape))
else:
this_img = this_x.reshape(img_shape)
# add the slice to the corresponding position in the
# output array
c = 1
if output_pixel_vals:
c = 255
out_array[
tile_row * (H + Hs): tile_row * (H + Hs) + H,
tile_col * (W + Ws): tile_col * (W + Ws) + W
] = this_img * c
return out_array
if __name__ == '__main__':
black = np.zeros((50, 50, 3), dtype='uint8')
red = black.copy()
red[:, :, 0] = 255
green = black.copy()
green[:, :, 1] = 255
show(black)
show(green)
show(red)
|
liuqr/edx-xiaodun
|
refs/heads/master
|
common/lib/xmodule/xmodule/seq_module.py
|
6
|
import json
import logging
from lxml import etree
from xblock.fields import Integer, Scope
from xblock.fragment import Fragment
from pkg_resources import resource_string
from .exceptions import NotFoundError
from .fields import Date
from .mako_module import MakoModuleDescriptor
from .progress import Progress
from .x_module import XModule
from .xml_module import XmlDescriptor
log = logging.getLogger(__name__)
# HACK: This shouldn't be hard-coded to two types
# OBSOLETE: This obsoletes 'type'
class_priority = ['video', 'problem']
class SequenceFields(object):
has_children = True
# NOTE: Position is 1-indexed. This is silly, but there are now student
# positions saved on prod, so it's not easy to fix.
position = Integer(help="Last tab viewed in this sequence", scope=Scope.user_state)
due = Date(help="Date that this problem is due by", scope=Scope.settings)
extended_due = Date(
help="Date that this problem is due by for a particular student. This "
"can be set by an instructor, and will override the global due "
"date if it is set to a date that is later than the global due "
"date.",
default=None,
scope=Scope.user_state,
)
class SequenceModule(SequenceFields, XModule):
''' Layout module which lays out content in a temporal sequence
'''
js = {'coffee': [resource_string(__name__,
'js/src/sequence/display.coffee')],
'js': [resource_string(__name__, 'js/src/sequence/display/jquery.sequence.js')]}
css = {'scss': [resource_string(__name__, 'css/sequence/display.scss')]}
js_module_name = "Sequence"
def __init__(self, *args, **kwargs):
super(SequenceModule, self).__init__(*args, **kwargs)
# if position is specified in system, then use that instead
if getattr(self.system, 'position', None) is not None:
self.position = int(self.system.position)
def get_progress(self):
''' Return the total progress, adding total done and total available.
(assumes that each submodule uses the same "units" for progress.)
'''
# TODO: Cache progress or children array?
children = self.get_children()
progresses = [child.get_progress() for child in children]
progress = reduce(Progress.add_counts, progresses, None)
return progress
def handle_ajax(self, dispatch, data): # TODO: bounds checking
''' get = request.POST instance '''
if dispatch == 'goto_position':
self.position = int(data['position'])
return json.dumps({'success': True})
raise NotFoundError('Unexpected dispatch type')
def student_view(self, context):
# If we're rendering this sequence, but no position is set yet,
# default the position to the first element
if self.position is None:
self.position = 1
## Returns a set of all types of all sub-children
contents = []
fragment = Fragment()
for child in self.get_display_items():
progress = child.get_progress()
rendered_child = child.render('student_view', context)
fragment.add_frag_resources(rendered_child)
titles = child.get_content_titles()
print titles
childinfo = {
'content': rendered_child.content,
'title': "\n".join(titles),
'page_title': titles[0] if titles else '',
'progress_status': Progress.to_js_status_str(progress),
'progress_detail': Progress.to_js_detail_str(progress),
'type': child.get_icon_class(),
'id': child.id,
}
if childinfo['title'] == '':
childinfo['title'] = child.display_name_with_default
contents.append(childinfo)
params = {'items': contents,
'element_id': self.location.html_id(),
'item_id': self.id,
'position': self.position,
'tag': self.location.category,
'ajax_url': self.system.ajax_url,
}
fragment.add_content(self.system.render_template('seq_module.html', params))
return fragment
def mobi_student_view(self, context):
# If we're rendering this sequence, but no position is set yet,
# default the position to the first element
if self.position is None:
self.position = 1
## Returns a set of all types of all sub-children
contents = []
fragment = Fragment()
for child in self.get_display_items():
progress = child.get_progress()
rendered_child = child.render('mobi_student_view', context)
fragment.add_frag_resources(rendered_child)
childinfo = {
'content': rendered_child.content,
'title': "\n".join(
grand_child.display_name
for grand_child in child.get_children()
if grand_child.display_name is not None
),
'progress_status': Progress.to_js_status_str(progress),
'progress_detail': Progress.to_js_detail_str(progress),
'type': child.get_icon_class(),
'id': child.id,
}
if childinfo['title'] == '':
childinfo['title'] = child.display_name_with_default
contents.append(childinfo)
params = {'items': contents,
'element_id': self.location.html_id(),
'item_id': self.id,
'position': self.position,
'tag': self.location.category,
'ajax_url': self.system.ajax_url,
}
fragment.add_content(self.system.render_template('wechat/mobi_seq_module.html', params))
return fragment
def get_icon_class(self):
child_classes = set(child.get_icon_class()
for child in self.get_children())
new_class = 'other'
for c in class_priority:
if c in child_classes:
new_class = c
return new_class
class SequenceDescriptor(SequenceFields, MakoModuleDescriptor, XmlDescriptor):
mako_template = 'widgets/sequence-edit.html'
module_class = SequenceModule
js = {'coffee': [resource_string(__name__, 'js/src/sequence/edit.coffee')]}
js_module_name = "SequenceDescriptor"
@classmethod
def definition_from_xml(cls, xml_object, system):
children = []
for child in xml_object:
try:
child_block = system.process_xml(etree.tostring(child, encoding='unicode'))
children.append(child_block.scope_ids.usage_id)
except Exception as e:
log.exception("Unable to load child when parsing Sequence. Continuing...")
if system.error_tracker is not None:
system.error_tracker(u"ERROR: {0}".format(e))
continue
return {}, children
def definition_to_xml(self, resource_fs):
xml_object = etree.Element('sequential')
for child in self.get_children():
self.runtime.add_block_as_child_node(child, xml_object)
return xml_object
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.