repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
henningpohl/Arduino
|
refs/heads/master
|
arduino-core/src/processing/app/i18n/python/requests/packages/urllib3/packages/six.py
|
2374
|
"""Utilities for writing code that runs on Python 2 and 3"""
#Copyright (c) 2010-2011 Benjamin Peterson
#Permission is hereby granted, free of charge, to any person obtaining a copy of
#this software and associated documentation files (the "Software"), to deal in
#the Software without restriction, including without limitation the rights to
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
#the Software, and to permit persons to whom the Software is furnished to do so,
#subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.2.0" # Revision 41c74fef2ded
# True if we are running on Python 3.
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result)
# This is a bit ugly, but it avoids running this again.
delattr(tp, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _MovedItems(types.ModuleType):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
del attr
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_code = "func_code"
_func_defaults = "func_defaults"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
if PY3:
def get_unbound_function(unbound):
return unbound
Iterator = object
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
else:
def get_unbound_function(unbound):
return unbound.im_func
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
def iterkeys(d):
"""Return an iterator over the keys of a dictionary."""
return iter(getattr(d, _iterkeys)())
def itervalues(d):
"""Return an iterator over the values of a dictionary."""
return iter(getattr(d, _itervalues)())
def iteritems(d):
"""Return an iterator over the (key, value) pairs of a dictionary."""
return iter(getattr(d, _iteritems)())
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
def u(s):
return unicode(s, "unicode_escape")
int2byte = chr
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
import builtins
exec_ = getattr(builtins, "exec")
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
print_ = getattr(builtins, "print")
del builtins
else:
def exec_(code, globs=None, locs=None):
"""Execute code in a namespace."""
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
def print_(*args, **kwargs):
"""The new-style print function."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
def with_metaclass(meta, base=object):
"""Create a base class with a metaclass."""
return meta("NewBase", (base,), {})
|
tboyce021/home-assistant
|
refs/heads/dev
|
homeassistant/components/zha/device_trigger.py
|
15
|
"""Provides device automations for ZHA devices that emit events."""
import voluptuous as vol
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.components.device_automation.exceptions import (
InvalidDeviceAutomationConfig,
)
from homeassistant.components.homeassistant.triggers import event as event_trigger
from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_PLATFORM, CONF_TYPE
from . import DOMAIN
from .core.helpers import async_get_zha_device
CONF_SUBTYPE = "subtype"
DEVICE = "device"
DEVICE_IEEE = "device_ieee"
ZHA_EVENT = "zha_event"
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{vol.Required(CONF_TYPE): str, vol.Required(CONF_SUBTYPE): str}
)
async def async_validate_trigger_config(hass, config):
"""Validate config."""
config = TRIGGER_SCHEMA(config)
if "zha" in hass.config.components:
trigger = (config[CONF_TYPE], config[CONF_SUBTYPE])
try:
zha_device = await async_get_zha_device(hass, config[CONF_DEVICE_ID])
except (KeyError, AttributeError) as err:
raise InvalidDeviceAutomationConfig from err
if (
zha_device.device_automation_triggers is None
or trigger not in zha_device.device_automation_triggers
):
raise InvalidDeviceAutomationConfig
return config
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for state changes based on configuration."""
trigger = (config[CONF_TYPE], config[CONF_SUBTYPE])
try:
zha_device = await async_get_zha_device(hass, config[CONF_DEVICE_ID])
except (KeyError, AttributeError):
return None
if trigger not in zha_device.device_automation_triggers:
return None
trigger = zha_device.device_automation_triggers[trigger]
event_config = {
event_trigger.CONF_PLATFORM: "event",
event_trigger.CONF_EVENT_TYPE: ZHA_EVENT,
event_trigger.CONF_EVENT_DATA: {DEVICE_IEEE: str(zha_device.ieee), **trigger},
}
event_config = event_trigger.TRIGGER_SCHEMA(event_config)
return await event_trigger.async_attach_trigger(
hass, event_config, action, automation_info, platform_type="device"
)
async def async_get_triggers(hass, device_id):
"""List device triggers.
Make sure the device supports device automations and
if it does return the trigger list.
"""
zha_device = await async_get_zha_device(hass, device_id)
if not zha_device.device_automation_triggers:
return
triggers = []
for trigger, subtype in zha_device.device_automation_triggers.keys():
triggers.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_PLATFORM: DEVICE,
CONF_TYPE: trigger,
CONF_SUBTYPE: subtype,
}
)
return triggers
|
antontsv/r2d2
|
refs/heads/master
|
sensor_showcase.py
|
1
|
#!/usr/bin/env python
import argparse
from motion_detector import *
from led import *
from distance_detector import *
parser = argparse.ArgumentParser(description='Sensor showcase')
parser.add_argument('-a','--all', action="store_true", help='Show all demos')
parser.add_argument('-m','--motion', action="store_true", help='Motion sensor demo')
parser.add_argument('-l','--led', action="store_true", help=u'Led demo')
parser.add_argument('-d','--distance', action="store_true", help=u'Distance measurement demo')
args = parser.parse_args()
def header(title='Some demo'):
char = '*'
frame = char * (len(title) + 4)
print frame
print "%s %s %s" % (char,title,char)
print frame
print "Press CTRL+C to quit at any time"
DEMO_RUNTIME_SEC = 30
if args.all or args.motion:
header("Motion sensor demo")
print "Will be moving to next demo in %d seconds" % DEMO_RUNTIME_SEC
print "Wave or walk in front of connected PIR sensor. You should see message once motion is detected."
start = time.time()
md = MotionDetector()
i=0
while time.time() - start < DEMO_RUNTIME_SEC:
if md.any_movement():
i+=1
print "Motion Detected - %d" % i
print '...zzz'
time.sleep(0.5)
if args.all or args.led:
header("LED light demo")
print "Will be moving to next demo in %d seconds" % DEMO_RUNTIME_SEC
start = time.time()
led = Light()
LIGHT_TIME_SEC = 5
SLEEP_TIME_SEC = 3
while time.time() - start < DEMO_RUNTIME_SEC:
print "Shining for %d seconds..." % LIGHT_TIME_SEC
led.shine_for(LIGHT_TIME_SEC)
print '...zzz'
time.sleep(SLEEP_TIME_SEC)
if args.all or args.distance:
header("Distance detector demo")
print "Will be moving to next demo in %d seconds" % DEMO_RUNTIME_SEC
detector = DistanceDetector()
start = time.time()
INTERVAL_SEC = 3
while time.time() - start < DEMO_RUNTIME_SEC:
pos = 0
false_count = 0
collective = 0
current_measure_start=time.time()
while time.time() - current_measure_start < INTERVAL_SEC:
reading = detector.measure()
if reading > 0:
pos += 1
collective += reading
else:
false_count += 1
print "Distance measured over interval of %d sec: %f cm (pos: %d, false: %d)" % (INTERVAL_SEC, collective/pos if pos > 0 else -1, pos, false_count)
|
Intel-Corporation/tensorflow
|
refs/heads/master
|
tensorflow/python/debug/examples/debug_errors.py
|
13
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Example of debugging TensorFlow runtime errors using tfdbg."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import numpy as np
import tensorflow as tf
from tensorflow.python import debug as tf_debug
def main(_):
sess = tf.Session()
# Construct the TensorFlow network.
ph_float = tf.placeholder(tf.float32, name="ph_float")
x = tf.transpose(ph_float, name="x")
v = tf.Variable(np.array([[-2.0], [-3.0], [6.0]], dtype=np.float32), name="v")
m = tf.constant(
np.array([[0.0, 1.0, 2.0], [-4.0, -1.0, 0.0]]),
dtype=tf.float32,
name="m")
y = tf.matmul(m, x, name="y")
z = tf.matmul(m, v, name="z")
if FLAGS.debug:
sess = tf_debug.LocalCLIDebugWrapperSession(sess, ui_type=FLAGS.ui_type)
if FLAGS.error == "shape_mismatch":
print(sess.run(y, feed_dict={ph_float: np.array([[0.0], [1.0], [2.0]])}))
elif FLAGS.error == "uninitialized_variable":
print(sess.run(z))
elif FLAGS.error == "no_error":
print(sess.run(y, feed_dict={ph_float: np.array([[0.0, 1.0, 2.0]])}))
else:
raise ValueError("Unrecognized error type: " + FLAGS.error)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.register("type", "bool", lambda v: v.lower() == "true")
parser.add_argument(
"--error",
type=str,
default="shape_mismatch",
help="""\
Type of the error to generate (shape_mismatch | uninitialized_variable |
no_error).\
""")
parser.add_argument(
"--ui_type",
type=str,
default="curses",
help="Command-line user interface type (curses | readline)")
parser.add_argument(
"--debug",
type="bool",
nargs="?",
const=True,
default=False,
help="Use debugger to track down bad values during training")
FLAGS, unparsed = parser.parse_known_args()
with tf.Graph().as_default():
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
Acehaidrey/incubator-airflow
|
refs/heads/master
|
tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py
|
4
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import unittest
from unittest.mock import patch
from airflow import DAG
from airflow.models import Connection
from airflow.providers.cncf.kubernetes.operators.spark_kubernetes import SparkKubernetesOperator
from airflow.utils import db, timezone
TEST_VALID_APPLICATION_YAML = """
apiVersion: "sparkoperator.k8s.io/v1beta2"
kind: SparkApplication
metadata:
name: spark-pi
namespace: default
spec:
type: Scala
mode: cluster
image: "gcr.io/spark-operator/spark:v2.4.5"
imagePullPolicy: Always
mainClass: org.apache.spark.examples.SparkPi
mainApplicationFile: "local:///opt/spark/examples/jars/spark-examples_2.11-2.4.5.jar"
sparkVersion: "2.4.5"
restartPolicy:
type: Never
volumes:
- name: "test-volume"
hostPath:
path: "/tmp"
type: Directory
driver:
cores: 1
coreLimit: "1200m"
memory: "512m"
labels:
version: 2.4.5
serviceAccount: spark
volumeMounts:
- name: "test-volume"
mountPath: "/tmp"
executor:
cores: 1
instances: 1
memory: "512m"
labels:
version: 2.4.5
volumeMounts:
- name: "test-volume"
mountPath: "/tmp"
"""
TEST_VALID_APPLICATION_JSON = """
{
"apiVersion":"sparkoperator.k8s.io/v1beta2",
"kind":"SparkApplication",
"metadata":{
"name":"spark-pi",
"namespace":"default"
},
"spec":{
"type":"Scala",
"mode":"cluster",
"image":"gcr.io/spark-operator/spark:v2.4.5",
"imagePullPolicy":"Always",
"mainClass":"org.apache.spark.examples.SparkPi",
"mainApplicationFile":"local:///opt/spark/examples/jars/spark-examples_2.11-2.4.5.jar",
"sparkVersion":"2.4.5",
"restartPolicy":{
"type":"Never"
},
"volumes":[
{
"name":"test-volume",
"hostPath":{
"path":"/tmp",
"type":"Directory"
}
}
],
"driver":{
"cores":1,
"coreLimit":"1200m",
"memory":"512m",
"labels":{
"version":"2.4.5"
},
"serviceAccount":"spark",
"volumeMounts":[
{
"name":"test-volume",
"mountPath":"/tmp"
}
]
},
"executor":{
"cores":1,
"instances":1,
"memory":"512m",
"labels":{
"version":"2.4.5"
},
"volumeMounts":[
{
"name":"test-volume",
"mountPath":"/tmp"
}
]
}
}
}
"""
TEST_APPLICATION_DICT = {
'apiVersion': 'sparkoperator.k8s.io/v1beta2',
'kind': 'SparkApplication',
'metadata': {'name': 'spark-pi', 'namespace': 'default'},
'spec': {
'driver': {
'coreLimit': '1200m',
'cores': 1,
'labels': {'version': '2.4.5'},
'memory': '512m',
'serviceAccount': 'spark',
'volumeMounts': [{'mountPath': '/tmp', 'name': 'test-volume'}],
},
'executor': {
'cores': 1,
'instances': 1,
'labels': {'version': '2.4.5'},
'memory': '512m',
'volumeMounts': [{'mountPath': '/tmp', 'name': 'test-volume'}],
},
'image': 'gcr.io/spark-operator/spark:v2.4.5',
'imagePullPolicy': 'Always',
'mainApplicationFile': 'local:///opt/spark/examples/jars/spark-examples_2.11-2.4.5.jar',
'mainClass': 'org.apache.spark.examples.SparkPi',
'mode': 'cluster',
'restartPolicy': {'type': 'Never'},
'sparkVersion': '2.4.5',
'type': 'Scala',
'volumes': [{'hostPath': {'path': '/tmp', 'type': 'Directory'}, 'name': 'test-volume'}],
},
}
@patch('airflow.providers.cncf.kubernetes.hooks.kubernetes.KubernetesHook.get_conn')
class TestSparkKubernetesOperator(unittest.TestCase):
def setUp(self):
db.merge_conn(
Connection(conn_id='kubernetes_default_kube_config', conn_type='kubernetes', extra=json.dumps({}))
)
db.merge_conn(
Connection(
conn_id='kubernetes_with_namespace',
conn_type='kubernetes',
extra=json.dumps({'extra__kubernetes__namespace': 'mock_namespace'}),
)
)
args = {'owner': 'airflow', 'start_date': timezone.datetime(2020, 2, 1)}
self.dag = DAG('test_dag_id', default_args=args)
@patch('kubernetes.client.apis.custom_objects_api.CustomObjectsApi.create_namespaced_custom_object')
def test_create_application_from_yaml(self, mock_create_namespaced_crd, mock_kubernetes_hook):
op = SparkKubernetesOperator(
application_file=TEST_VALID_APPLICATION_YAML,
dag=self.dag,
kubernetes_conn_id='kubernetes_default_kube_config',
task_id='test_task_id',
)
op.execute(None)
mock_kubernetes_hook.assert_called_once_with()
mock_create_namespaced_crd.assert_called_with(
body=TEST_APPLICATION_DICT,
group='sparkoperator.k8s.io',
namespace='default',
plural='sparkapplications',
version='v1beta2',
)
@patch('kubernetes.client.apis.custom_objects_api.CustomObjectsApi.create_namespaced_custom_object')
def test_create_application_from_json(self, mock_create_namespaced_crd, mock_kubernetes_hook):
op = SparkKubernetesOperator(
application_file=TEST_VALID_APPLICATION_JSON,
dag=self.dag,
kubernetes_conn_id='kubernetes_default_kube_config',
task_id='test_task_id',
)
op.execute(None)
mock_kubernetes_hook.assert_called_once_with()
mock_create_namespaced_crd.assert_called_with(
body=TEST_APPLICATION_DICT,
group='sparkoperator.k8s.io',
namespace='default',
plural='sparkapplications',
version='v1beta2',
)
@patch('kubernetes.client.apis.custom_objects_api.CustomObjectsApi.create_namespaced_custom_object')
def test_namespace_from_operator(self, mock_create_namespaced_crd, mock_kubernetes_hook):
op = SparkKubernetesOperator(
application_file=TEST_VALID_APPLICATION_JSON,
dag=self.dag,
namespace='operator_namespace',
kubernetes_conn_id='kubernetes_with_namespace',
task_id='test_task_id',
)
op.execute(None)
mock_kubernetes_hook.assert_called_once_with()
mock_create_namespaced_crd.assert_called_with(
body=TEST_APPLICATION_DICT,
group='sparkoperator.k8s.io',
namespace='operator_namespace',
plural='sparkapplications',
version='v1beta2',
)
@patch('kubernetes.client.apis.custom_objects_api.CustomObjectsApi.create_namespaced_custom_object')
def test_namespace_from_connection(self, mock_create_namespaced_crd, mock_kubernetes_hook):
op = SparkKubernetesOperator(
application_file=TEST_VALID_APPLICATION_JSON,
dag=self.dag,
kubernetes_conn_id='kubernetes_with_namespace',
task_id='test_task_id',
)
op.execute(None)
mock_kubernetes_hook.assert_called_once_with()
mock_create_namespaced_crd.assert_called_with(
body=TEST_APPLICATION_DICT,
group='sparkoperator.k8s.io',
namespace='mock_namespace',
plural='sparkapplications',
version='v1beta2',
)
|
odoobgorg/odoo
|
refs/heads/9.0
|
openerp/report/render/rml2txt/utils.py
|
48
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import copy
import re
import reportlab
import reportlab.lib.units
from openerp.tools.safe_eval import safe_eval as eval
_regex = re.compile('\[\[(.+?)\]\]')
def _child_get(node, self=None, tagname=None):
for n in node:
if self and self.localcontext and n.get('rml_loop', False):
oldctx = self.localcontext
for ctx in eval(n.get('rml_loop'),{}, self.localcontext):
self.localcontext.update(ctx)
if (tagname is None) or (n.tag==tagname):
if n.get('rml_except', False):
try:
eval(n.get('rml_except'), {}, self.localcontext)
except Exception:
continue
if n.get('rml_tag'):
try:
(tag,attr) = eval(n.get('rml_tag'),{}, self.localcontext)
n2 = copy.copy(n)
n2.tag = tag
n2.attrib.update(attr)
yield n2
except Exception:
yield n
else:
yield n
self.localcontext = oldctx
continue
if self and self.localcontext and n.get('rml_except', False):
try:
eval(n.get('rml_except'), {}, self.localcontext)
except Exception:
continue
if (tagname is None) or (n.tag==tagname):
yield n
def _process_text(self, txt):
if not self.localcontext:
return txt
if not txt:
return ''
result = ''
sps = _regex.split(txt)
while sps:
# This is a simple text to translate
result += self.localcontext.get('translate', lambda x:x)(sps.pop(0))
if sps:
try:
txt2 = eval(sps.pop(0),self.localcontext)
except Exception:
txt2 = ''
if isinstance(txt2, (int, float)):
txt2 = str(txt2)
if isinstance(txt2, basestring):
result += txt2
return result
def text_get(node):
rc = ''
for node in node.getchildren():
rc = rc + node.text
return rc
units = [
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
]
def unit_get(size):
global units
if size:
for unit in units:
res = unit[0].search(size, 0)
if res:
return unit[1]*float(res.group(1))
return False
def tuple_int_get(node, attr_name, default=None):
if not node.get(attr_name):
return default
res = [int(x) for x in node.get(attr_name).split(',')]
return res
def bool_get(value):
return (str(value)=="1") or (value.lower()=='yes')
def attr_get(node, attrs, dict=None):
if dict is None:
dict = {}
res = {}
for name in attrs:
if node.get(name):
res[name] = unit_get(node.get(name))
for key in dict:
if node.get(key):
if dict[key]=='str':
res[key] = str(node.get(key))
elif dict[key]=='bool':
res[key] = bool_get(node.get(key))
elif dict[key]=='int':
res[key] = int(node.get(key))
elif dict[key]=='unit':
res[key] = unit_get(node.get(key))
return res
|
bubichain/blockchain
|
refs/heads/master
|
src/3rd/src/jsoncpp/scons-2.1.0/engine/SCons/Tool/m4.py
|
21
|
"""SCons.Tool.m4
Tool-specific initialization for m4.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/m4.py 5357 2011/09/09 21:31:03 bdeegan"
import SCons.Action
import SCons.Builder
import SCons.Util
def generate(env):
"""Add Builders and construction variables for m4 to an Environment."""
M4Action = SCons.Action.Action('$M4COM', '$M4COMSTR')
bld = SCons.Builder.Builder(action = M4Action, src_suffix = '.m4')
env['BUILDERS']['M4'] = bld
# .m4 files might include other files, and it would be pretty hard
# to write a scanner for it, so let's just cd to the dir of the m4
# file and run from there.
# The src_suffix setup is like so: file.c.m4 -> file.c,
# file.cpp.m4 -> file.cpp etc.
env['M4'] = 'm4'
env['M4FLAGS'] = SCons.Util.CLVar('-E')
env['M4COM'] = 'cd ${SOURCE.rsrcdir} && $M4 $M4FLAGS < ${SOURCE.file} > ${TARGET.abspath}'
def exists(env):
return env.Detect('m4')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
anshulsharmanyu/twitter_plot
|
refs/heads/master
|
Twitter Map Cloud Assignment/googleMapsTweet/urls.py
|
1
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.Index, name='index'),
url(r'^post/$', views.Post, name='post')
]
|
synergeticsedx/deployment-wipro
|
refs/heads/oxa/master.fic
|
lms/djangoapps/commerce/api/v0/urls.py
|
72
|
""" API v0 URLs. """
from django.conf.urls import patterns, url, include
from commerce.api.v0 import views
BASKET_URLS = patterns(
'',
url(r'^$', views.BasketsView.as_view(), name='create'),
url(r'^(?P<basket_id>[\w]+)/order/$', views.BasketOrderView.as_view(), name='retrieve_order'),
)
urlpatterns = patterns(
'',
url(r'^baskets/', include(BASKET_URLS, namespace='baskets')),
)
|
nicpottier/pool
|
refs/heads/master
|
pool/public/views.py
|
1
|
from django.shortcuts import render
from pool.league.models import Season, Player, Match, Team, AVG_GAMES
def index(request):
season = Season.objects.all().order_by('-start_date').first()
# get all matches for this season, grouped by player
players = list(Player.objects.filter(scores__match__season=season).distinct())
for player in players:
player.set_season(season)
# get all teams for this season
teams = list(Team.season_summary(season))
players = sorted(players, key=lambda p: p.avg, reverse=True)
matches = Match.objects.filter(season=season).order_by('date')
context = dict(season=season, players=players, matches=matches, teams=teams, avg_count=AVG_GAMES)
return render(request, 'index.html', context)
def player(request, player_id):
player = Player.objects.get(id=player_id)
context = dict(player=player, games=player.get_games(), weeks=player.get_week_averages())
return render(request, 'player.html', context)
|
browseinfo/odoo_saas3_nicolas
|
refs/heads/master
|
openerp/osv/expression.py
|
6
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" Domain expression processing
The main duty of this module is to compile a domain expression into a
SQL query. A lot of things should be documented here, but as a first
step in the right direction, some tests in test_osv_expression.yml
might give you some additional information.
For legacy reasons, a domain uses an inconsistent two-levels abstract
syntax (domains are regular Python data structures). At the first
level, a domain is an expression made of terms (sometimes called
leaves) and (domain) operators used in prefix notation. The available
operators at this level are '!', '&', and '|'. '!' is a unary 'not',
'&' is a binary 'and', and '|' is a binary 'or'. For instance, here
is a possible domain. (<term> stands for an arbitrary term, more on
this later.)::
['&', '!', <term1>, '|', <term2>, <term3>]
It is equivalent to this pseudo code using infix notation::
(not <term1>) and (<term2> or <term3>)
The second level of syntax deals with the term representation. A term
is a triple of the form (left, operator, right). That is, a term uses
an infix notation, and the available operators, and possible left and
right operands differ with those of the previous level. Here is a
possible term::
('company_id.name', '=', 'OpenERP')
The left and right operand don't have the same possible values. The
left operand is field name (related to the model for which the domain
applies). Actually, the field name can use the dot-notation to
traverse relationships. The right operand is a Python value whose
type should match the used operator and field type. In the above
example, a string is used because the name field of a company has type
string, and because we use the '=' operator. When appropriate, a 'in'
operator can be used, and thus the right operand should be a list.
Note: the non-uniform syntax could have been more uniform, but this
would hide an important limitation of the domain syntax. Say that the
term representation was ['=', 'company_id.name', 'OpenERP']. Used in a
complete domain, this would look like::
['!', ['=', 'company_id.name', 'OpenERP']]
and you would be tempted to believe something like this would be
possible::
['!', ['=', 'company_id.name', ['&', ..., ...]]]
That is, a domain could be a valid operand. But this is not the
case. A domain is really limited to a two-level nature, and can not
take a recursive form: a domain is not a valid second-level operand.
Unaccent - Accent-insensitive search
OpenERP will use the SQL function 'unaccent' when available for the
'ilike' and 'not ilike' operators, and enabled in the configuration.
Normally the 'unaccent' function is obtained from `the PostgreSQL
'unaccent' contrib module
<http://developer.postgresql.org/pgdocs/postgres/unaccent.html>`_.
.. todo: The following explanation should be moved in some external
installation guide
The steps to install the module might differ on specific PostgreSQL
versions. We give here some instruction for PostgreSQL 9.x on a
Ubuntu system.
Ubuntu doesn't come yet with PostgreSQL 9.x, so an alternative package
source is used. We use Martin Pitt's PPA available at
`ppa:pitti/postgresql
<https://launchpad.net/~pitti/+archive/postgresql>`_.
.. code-block:: sh
> sudo add-apt-repository ppa:pitti/postgresql
> sudo apt-get update
Once the package list is up-to-date, you have to install PostgreSQL
9.0 and its contrib modules.
.. code-block:: sh
> sudo apt-get install postgresql-9.0 postgresql-contrib-9.0
When you want to enable unaccent on some database:
.. code-block:: sh
> psql9 <database> -f /usr/share/postgresql/9.0/contrib/unaccent.sql
Here :program:`psql9` is an alias for the newly installed PostgreSQL
9.0 tool, together with the correct port if necessary (for instance if
PostgreSQL 8.4 is running on 5432). (Other aliases can be used for
createdb and dropdb.)
.. code-block:: sh
> alias psql9='/usr/lib/postgresql/9.0/bin/psql -p 5433'
You can check unaccent is working:
.. code-block:: sh
> psql9 <database> -c"select unaccent('hélène')"
Finally, to instruct OpenERP to really use the unaccent function, you have to
start the server specifying the ``--unaccent`` flag.
"""
import logging
import traceback
import openerp.modules
from openerp.osv import fields
from openerp.osv.orm import MAGIC_COLUMNS
import openerp.tools as tools
# Domain operators.
NOT_OPERATOR = '!'
OR_OPERATOR = '|'
AND_OPERATOR = '&'
DOMAIN_OPERATORS = (NOT_OPERATOR, OR_OPERATOR, AND_OPERATOR)
# List of available term operators. It is also possible to use the '<>'
# operator, which is strictly the same as '!='; the later should be prefered
# for consistency. This list doesn't contain '<>' as it is simpified to '!='
# by the normalize_operator() function (so later part of the code deals with
# only one representation).
# Internals (i.e. not available to the user) 'inselect' and 'not inselect'
# operators are also used. In this case its right operand has the form (subselect, params).
TERM_OPERATORS = ('=', '!=', '<=', '<', '>', '>=', '=?', '=like', '=ilike',
'like', 'not like', 'ilike', 'not ilike', 'in', 'not in',
'child_of')
# A subset of the above operators, with a 'negative' semantic. When the
# expressions 'in NEGATIVE_TERM_OPERATORS' or 'not in NEGATIVE_TERM_OPERATORS' are used in the code
# below, this doesn't necessarily mean that any of those NEGATIVE_TERM_OPERATORS is
# legal in the processed term.
NEGATIVE_TERM_OPERATORS = ('!=', 'not like', 'not ilike', 'not in')
TRUE_LEAF = (1, '=', 1)
FALSE_LEAF = (0, '=', 1)
TRUE_DOMAIN = [TRUE_LEAF]
FALSE_DOMAIN = [FALSE_LEAF]
_logger = logging.getLogger(__name__)
# --------------------------------------------------
# Generic domain manipulation
# --------------------------------------------------
def normalize_domain(domain):
"""Returns a normalized version of ``domain_expr``, where all implicit '&' operators
have been made explicit. One property of normalized domain expressions is that they
can be easily combined together as if they were single domain components.
"""
assert isinstance(domain, (list, tuple)), "Domains to normalize must have a 'domain' form: a list or tuple of domain components"
if not domain:
return TRUE_DOMAIN
result = []
expected = 1 # expected number of expressions
op_arity = {NOT_OPERATOR: 1, AND_OPERATOR: 2, OR_OPERATOR: 2}
for token in domain:
if expected == 0: # more than expected, like in [A, B]
result[0:0] = [AND_OPERATOR] # put an extra '&' in front
expected = 1
result.append(token)
if isinstance(token, (list, tuple)): # domain term
expected -= 1
else:
expected += op_arity.get(token, 0) - 1
assert expected == 0, 'This domain is syntactically not correct: %s' % (domain)
return result
def combine(operator, unit, zero, domains):
"""Returns a new domain expression where all domain components from ``domains``
have been added together using the binary operator ``operator``. The given
domains must be normalized.
:param unit: the identity element of the domains "set" with regard to the operation
performed by ``operator``, i.e the domain component ``i`` which, when
combined with any domain ``x`` via ``operator``, yields ``x``.
E.g. [(1,'=',1)] is the typical unit for AND_OPERATOR: adding it
to any domain component gives the same domain.
:param zero: the absorbing element of the domains "set" with regard to the operation
performed by ``operator``, i.e the domain component ``z`` which, when
combined with any domain ``x`` via ``operator``, yields ``z``.
E.g. [(1,'=',1)] is the typical zero for OR_OPERATOR: as soon as
you see it in a domain component the resulting domain is the zero.
:param domains: a list of normalized domains.
"""
result = []
count = 0
for domain in domains:
if domain == unit:
continue
if domain == zero:
return zero
if domain:
result += domain
count += 1
result = [operator] * (count - 1) + result
return result
def AND(domains):
"""AND([D1,D2,...]) returns a domain representing D1 and D2 and ... """
return combine(AND_OPERATOR, TRUE_DOMAIN, FALSE_DOMAIN, domains)
def OR(domains):
"""OR([D1,D2,...]) returns a domain representing D1 or D2 or ... """
return combine(OR_OPERATOR, FALSE_DOMAIN, TRUE_DOMAIN, domains)
def distribute_not(domain):
""" Distribute any '!' domain operators found inside a normalized domain.
Because we don't use SQL semantic for processing a 'left not in right'
query (i.e. our 'not in' is not simply translated to a SQL 'not in'),
it means that a '! left in right' can not be simply processed
by __leaf_to_sql by first emitting code for 'left in right' then wrapping
the result with 'not (...)', as it would result in a 'not in' at the SQL
level.
This function is thus responsible for pushing any '!' domain operators
inside the terms themselves. For example::
['!','&',('user_id','=',4),('partner_id','in',[1,2])]
will be turned into:
['|',('user_id','!=',4),('partner_id','not in',[1,2])]
"""
def negate(leaf):
"""Negates and returns a single domain leaf term,
using the opposite operator if possible"""
left, operator, right = leaf
mapping = {
'<': '>=',
'>': '<=',
'<=': '>',
'>=': '<',
'=': '!=',
'!=': '=',
}
if operator in ('in', 'like', 'ilike'):
operator = 'not ' + operator
return [(left, operator, right)]
if operator in ('not in', 'not like', 'not ilike'):
operator = operator[4:]
return [(left, operator, right)]
if operator in mapping:
operator = mapping[operator]
return [(left, operator, right)]
return [NOT_OPERATOR, (left, operator, right)]
def distribute_negate(domain):
"""Negate the domain ``subtree`` rooted at domain[0],
leaving the rest of the domain intact, and return
(negated_subtree, untouched_domain_rest)
"""
if is_leaf(domain[0]):
return negate(domain[0]), domain[1:]
if domain[0] == AND_OPERATOR:
done1, todo1 = distribute_negate(domain[1:])
done2, todo2 = distribute_negate(todo1)
return [OR_OPERATOR] + done1 + done2, todo2
if domain[0] == OR_OPERATOR:
done1, todo1 = distribute_negate(domain[1:])
done2, todo2 = distribute_negate(todo1)
return [AND_OPERATOR] + done1 + done2, todo2
if not domain:
return []
if domain[0] != NOT_OPERATOR:
return [domain[0]] + distribute_not(domain[1:])
if domain[0] == NOT_OPERATOR:
done, todo = distribute_negate(domain[1:])
return done + distribute_not(todo)
# --------------------------------------------------
# Generic leaf manipulation
# --------------------------------------------------
def _quote(to_quote):
if '"' not in to_quote:
return '"%s"' % to_quote
return to_quote
def generate_table_alias(src_table_alias, joined_tables=[]):
""" Generate a standard table alias name. An alias is generated as following:
- the base is the source table name (that can already be an alias)
- then, each joined table is added in the alias using a 'link field name'
that is used to render unique aliases for a given path
- returns a tuple composed of the alias, and the full table alias to be
added in a from condition with quoting done
Examples:
- src_table_alias='res_users', join_tables=[]:
alias = ('res_users','"res_users"')
- src_model='res_users', join_tables=[(res.partner, 'parent_id')]
alias = ('res_users__parent_id', '"res_partner" as "res_users__parent_id"')
:param model src_table_alias: model source of the alias
:param list joined_tables: list of tuples
(dst_model, link_field)
:return tuple: (table_alias, alias statement for from clause with quotes added)
"""
alias = src_table_alias
if not joined_tables:
return '%s' % alias, '%s' % _quote(alias)
for link in joined_tables:
alias += '__' + link[1]
assert len(alias) < 64, 'Table alias name %s is longer than the 64 characters size accepted by default in postgresql.' % alias
return '%s' % alias, '%s as %s' % (_quote(joined_tables[-1][0]), _quote(alias))
def get_alias_from_query(from_query):
""" :param string from_query: is something like :
- '"res_partner"' OR
- '"res_partner" as "res_users__partner_id"''
"""
from_splitted = from_query.split(' as ')
if len(from_splitted) > 1:
return from_splitted[0].replace('"', ''), from_splitted[1].replace('"', '')
else:
return from_splitted[0].replace('"', ''), from_splitted[0].replace('"', '')
def normalize_leaf(element):
""" Change a term's operator to some canonical form, simplifying later
processing. """
if not is_leaf(element):
return element
left, operator, right = element
original = operator
operator = operator.lower()
if operator == '<>':
operator = '!='
if isinstance(right, bool) and operator in ('in', 'not in'):
_logger.warning("The domain term '%s' should use the '=' or '!=' operator." % ((left, original, right),))
operator = '=' if operator == 'in' else '!='
if isinstance(right, (list, tuple)) and operator in ('=', '!='):
_logger.warning("The domain term '%s' should use the 'in' or 'not in' operator." % ((left, original, right),))
operator = 'in' if operator == '=' else 'not in'
return left, operator, right
def is_operator(element):
""" Test whether an object is a valid domain operator. """
return isinstance(element, basestring) and element in DOMAIN_OPERATORS
def is_leaf(element, internal=False):
""" Test whether an object is a valid domain term:
- is a list or tuple
- with 3 elements
- second element if a valid op
:param tuple element: a leaf in form (left, operator, right)
:param boolean internal: allow or not the 'inselect' internal operator
in the term. This should be always left to False.
Note: OLD TODO change the share wizard to use this function.
"""
INTERNAL_OPS = TERM_OPERATORS + ('<>',)
if internal:
INTERNAL_OPS += ('inselect', 'not inselect')
return (isinstance(element, tuple) or isinstance(element, list)) \
and len(element) == 3 \
and element[1] in INTERNAL_OPS \
and ((isinstance(element[0], basestring) and element[0])
or element in (TRUE_LEAF, FALSE_LEAF))
# --------------------------------------------------
# SQL utils
# --------------------------------------------------
def select_from_where(cr, select_field, from_table, where_field, where_ids, where_operator):
# todo: merge into parent query as sub-query
res = []
if where_ids:
if where_operator in ['<', '>', '>=', '<=']:
cr.execute('SELECT "%s" FROM "%s" WHERE "%s" %s %%s' % \
(select_field, from_table, where_field, where_operator),
(where_ids[0],)) # TODO shouldn't this be min/max(where_ids) ?
res = [r[0] for r in cr.fetchall()]
else: # TODO where_operator is supposed to be 'in'? It is called with child_of...
for i in range(0, len(where_ids), cr.IN_MAX):
subids = where_ids[i:i + cr.IN_MAX]
cr.execute('SELECT "%s" FROM "%s" WHERE "%s" IN %%s' % \
(select_field, from_table, where_field), (tuple(subids),))
res.extend([r[0] for r in cr.fetchall()])
return res
def select_distinct_from_where_not_null(cr, select_field, from_table):
cr.execute('SELECT distinct("%s") FROM "%s" where "%s" is not null' % (select_field, from_table, select_field))
return [r[0] for r in cr.fetchall()]
def get_unaccent_wrapper(cr):
if openerp.modules.registry.RegistryManager.get(cr.dbname).has_unaccent:
return lambda x: "unaccent(%s)" % (x,)
return lambda x: x
# --------------------------------------------------
# ExtendedLeaf class for managing leafs and contexts
# -------------------------------------------------
class ExtendedLeaf(object):
""" Class wrapping a domain leaf, and giving some services and management
features on it. In particular it managed join contexts to be able to
construct queries through multiple models.
"""
# --------------------------------------------------
# Join / Context manipulation
# running examples:
# - res_users.name, like, foo: name is on res_partner, not on res_users
# - res_partner.bank_ids.name, like, foo: bank_ids is a one2many with _auto_join
# - res_partner.state_id.name, like, foo: state_id is a many2one with _auto_join
# A join:
# - link between src_table and dst_table, using src_field and dst_field
# i.e.: inherits: res_users.partner_id = res_partner.id
# i.e.: one2many: res_partner.id = res_partner_bank.partner_id
# i.e.: many2one: res_partner.state_id = res_country_state.id
# - done in the context of a field
# i.e.: inherits: 'partner_id'
# i.e.: one2many: 'bank_ids'
# i.e.: many2one: 'state_id'
# - table names use aliases: initial table followed by the context field
# names, joined using a '__'
# i.e.: inherits: res_partner as res_users__partner_id
# i.e.: one2many: res_partner_bank as res_partner__bank_ids
# i.e.: many2one: res_country_state as res_partner__state_id
# - join condition use aliases
# i.e.: inherits: res_users.partner_id = res_users__partner_id.id
# i.e.: one2many: res_partner.id = res_partner__bank_ids.parr_id
# i.e.: many2one: res_partner.state_id = res_partner__state_id.id
# Variables explanation:
# - src_table: working table before the join
# -> res_users, res_partner, res_partner
# - dst_table: working table after the join
# -> res_partner, res_partner_bank, res_country_state
# - src_table_link_name: field name used to link the src table, not
# necessarily a field (because 'id' is not a field instance)
# i.e.: inherits: 'partner_id', found in the inherits of the current table
# i.e.: one2many: 'id', not a field
# i.e.: many2one: 'state_id', the current field name
# - dst_table_link_name: field name used to link the dst table, not
# necessarily a field (because 'id' is not a field instance)
# i.e.: inherits: 'id', not a field
# i.e.: one2many: 'partner_id', _fields_id of the current field
# i.e.: many2one: 'id', not a field
# - context_field_name: field name used as a context to make the alias
# i.e.: inherits: 'partner_id': found in the inherits of the current table
# i.e.: one2many: 'bank_ids': current field name
# i.e.: many2one: 'state_id': current field name
# --------------------------------------------------
def __init__(self, leaf, model, join_context=None):
""" Initialize the ExtendedLeaf
:attr [string, tuple] leaf: operator or tuple-formatted domain
expression
:attr obj model: current working model
:attr list _models: list of chained models, updated when
adding joins
:attr list join_context: list of join contexts. This is a list of
tuples like ``(lhs, table, lhs_col, col, link)``
where
lhs
source (left hand) model
model
destination (right hand) model
lhs_col
source model column for join condition
col
destination model column for join condition
link
link column between source and destination model
that is not necessarily (but generally) a real column used
in the condition (i.e. in many2one); this link is used to
compute aliases
"""
assert model, 'Invalid leaf creation without table'
self.join_context = join_context or []
self.leaf = leaf
# normalize the leaf's operator
self.normalize_leaf()
# set working variables; handle the context stack and previous tables
self.model = model
self._models = []
for item in self.join_context:
self._models.append(item[0])
self._models.append(model)
# check validity
self.check_leaf()
def __str__(self):
return '<osv.ExtendedLeaf: %s on %s (ctx: %s)>' % (str(self.leaf), self.model._table, ','.join(self._get_context_debug()))
def generate_alias(self):
links = [(context[1]._table, context[4]) for context in self.join_context]
alias, alias_statement = generate_table_alias(self._models[0]._table, links)
return alias
def add_join_context(self, model, lhs_col, table_col, link):
""" See above comments for more details. A join context is a tuple like:
``(lhs, model, lhs_col, col, link)``
After adding the join, the model of the current leaf is updated.
"""
self.join_context.append((self.model, model, lhs_col, table_col, link))
self._models.append(model)
self.model = model
def get_join_conditions(self):
conditions = []
alias = self._models[0]._table
for context in self.join_context:
previous_alias = alias
alias += '__' + context[4]
conditions.append('"%s"."%s"="%s"."%s"' % (previous_alias, context[2], alias, context[3]))
return conditions
def get_tables(self):
tables = set()
links = []
for context in self.join_context:
links.append((context[1]._table, context[4]))
alias, alias_statement = generate_table_alias(self._models[0]._table, links)
tables.add(alias_statement)
return tables
def _get_context_debug(self):
names = ['"%s"."%s"="%s"."%s" (%s)' % (item[0]._table, item[2], item[1]._table, item[3], item[4]) for item in self.join_context]
return names
# --------------------------------------------------
# Leaf manipulation
# --------------------------------------------------
def check_leaf(self):
""" Leaf validity rules:
- a valid leaf is an operator or a leaf
- a valid leaf has a field objects unless
- it is not a tuple
- it is an inherited field
- left is id, operator is 'child_of'
- left is in MAGIC_COLUMNS
"""
if not is_operator(self.leaf) and not is_leaf(self.leaf, True):
raise ValueError("Invalid leaf %s" % str(self.leaf))
def is_operator(self):
return is_operator(self.leaf)
def is_true_leaf(self):
return self.leaf == TRUE_LEAF
def is_false_leaf(self):
return self.leaf == FALSE_LEAF
def is_leaf(self, internal=False):
return is_leaf(self.leaf, internal=internal)
def normalize_leaf(self):
self.leaf = normalize_leaf(self.leaf)
return True
def create_substitution_leaf(leaf, new_elements, new_model=None):
""" From a leaf, create a new leaf (based on the new_elements tuple
and new_model), that will have the same join context. Used to
insert equivalent leafs in the processing stack. """
if new_model is None:
new_model = leaf.model
new_join_context = [tuple(context) for context in leaf.join_context]
new_leaf = ExtendedLeaf(new_elements, new_model, join_context=new_join_context)
return new_leaf
class expression(object):
""" Parse a domain expression
Use a real polish notation
Leafs are still in a ('foo', '=', 'bar') format
For more info: http://christophe-simonis-at-tiny.blogspot.com/2008/08/new-new-domain-notation.html
"""
def __init__(self, cr, uid, exp, table, context):
""" Initialize expression object and automatically parse the expression
right after initialization.
:param exp: expression (using domain ('foo', '=', 'bar' format))
:param table: root model
:attr list result: list that will hold the result of the parsing
as a list of ExtendedLeaf
:attr list joins: list of join conditions, such as
(res_country_state."id" = res_partner."state_id")
:attr root_model: base model for the query
:attr list expression: the domain expression, that will be normalized
and prepared
"""
self._unaccent = get_unaccent_wrapper(cr)
self.joins = []
self.root_model = table
# normalize and prepare the expression for parsing
self.expression = distribute_not(normalize_domain(exp))
# parse the domain expression
self.parse(cr, uid, context=context)
# ----------------------------------------
# Leafs management
# ----------------------------------------
def get_tables(self):
""" Returns the list of tables for SQL queries, like select from ... """
tables = []
for leaf in self.result:
for table in leaf.get_tables():
if table not in tables:
tables.append(table)
table_name = _quote(self.root_model._table)
if table_name not in tables:
tables.append(table_name)
return tables
# ----------------------------------------
# Parsing
# ----------------------------------------
def parse(self, cr, uid, context):
""" Transform the leaves of the expression
The principle is to pop elements from a leaf stack one at a time.
Each leaf is processed. The processing is a if/elif list of various
cases that appear in the leafs (many2one, function fields, ...).
Two things can happen as a processing result:
- the leaf has been modified and/or new leafs have to be introduced
in the expression; they are pushed into the leaf stack, to be
processed right after
- the leaf is added to the result
Some internal var explanation:
:var obj working_model: model object, model containing the field
(the name provided in the left operand)
:var list field_path: left operand seen as a path (foo.bar -> [foo, bar])
:var obj relational_model: relational model of a field (field._obj)
ex: res_partner.bank_ids -> res.partner.bank
"""
def to_ids(value, relational_model, context=None, limit=None):
""" Normalize a single id or name, or a list of those, into a list of ids
:param {int,long,basestring,list,tuple} value:
if int, long -> return [value]
if basestring, convert it into a list of basestrings, then
if list of basestring ->
perform a name_search on relational_model for each name
return the list of related ids
"""
names = []
if isinstance(value, basestring):
names = [value]
elif value and isinstance(value, (tuple, list)) and all(isinstance(item, basestring) for item in value):
names = value
elif isinstance(value, (int, long)):
return [value]
if names:
name_get_list = [name_get[0] for name in names for name_get in relational_model.name_search(cr, uid, name, [], 'ilike', context=context, limit=limit)]
return list(set(name_get_list))
return list(value)
def child_of_domain(left, ids, left_model, parent=None, prefix='', context=None):
""" Return a domain implementing the child_of operator for [(left,child_of,ids)],
either as a range using the parent_left/right tree lookup fields
(when available), or as an expanded [(left,in,child_ids)] """
if left_model._parent_store and (not left_model.pool._init):
# TODO: Improve where joins are implemented for many with '.', replace by:
# doms += ['&',(prefix+'.parent_left','<',o.parent_right),(prefix+'.parent_left','>=',o.parent_left)]
doms = []
for o in left_model.browse(cr, uid, ids, context=context):
if doms:
doms.insert(0, OR_OPERATOR)
doms += [AND_OPERATOR, ('parent_left', '<', o.parent_right), ('parent_left', '>=', o.parent_left)]
if prefix:
return [(left, 'in', left_model.search(cr, uid, doms, context=context))]
return doms
else:
def recursive_children(ids, model, parent_field):
if not ids:
return []
ids2 = model.search(cr, uid, [(parent_field, 'in', ids)], context=context)
return ids + recursive_children(ids2, model, parent_field)
return [(left, 'in', recursive_children(ids, left_model, parent or left_model._parent_name))]
def pop():
""" Pop a leaf to process. """
return self.stack.pop()
def push(leaf):
""" Push a leaf to be processed right after. """
self.stack.append(leaf)
def push_result(leaf):
""" Push a leaf to the results. This leaf has been fully processed
and validated. """
self.result.append(leaf)
self.result = []
self.stack = [ExtendedLeaf(leaf, self.root_model) for leaf in self.expression]
# process from right to left; expression is from left to right
self.stack.reverse()
while self.stack:
# Get the next leaf to process
leaf = pop()
# Get working variables
working_model = leaf.model
if leaf.is_operator():
left, operator, right = leaf.leaf, None, None
elif leaf.is_true_leaf() or leaf.is_false_leaf():
# because we consider left as a string
left, operator, right = ('%s' % leaf.leaf[0], leaf.leaf[1], leaf.leaf[2])
else:
left, operator, right = leaf.leaf
field_path = left.split('.', 1)
field = working_model._columns.get(field_path[0])
if field and field._obj:
relational_model = working_model.pool[field._obj]
else:
relational_model = None
# ----------------------------------------
# SIMPLE CASE
# 1. leaf is an operator
# 2. leaf is a true/false leaf
# -> add directly to result
# ----------------------------------------
if leaf.is_operator() or leaf.is_true_leaf() or leaf.is_false_leaf():
push_result(leaf)
# ----------------------------------------
# FIELD NOT FOUND
# -> from inherits'd fields -> work on the related model, and add
# a join condition
# -> ('id', 'child_of', '..') -> use a 'to_ids'
# -> but is one on the _log_access special fields, add directly to
# result
# TODO: make these fields explicitly available in self.columns instead!
# -> else: crash
# ----------------------------------------
elif not field and field_path[0] in working_model._inherit_fields:
# comments about inherits'd fields
# { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
# field_column_obj, origina_parent_model), ... }
next_model = working_model.pool[working_model._inherit_fields[field_path[0]][0]]
leaf.add_join_context(next_model, working_model._inherits[next_model._name], 'id', working_model._inherits[next_model._name])
push(leaf)
elif left == 'id' and operator == 'child_of':
ids2 = to_ids(right, working_model, context)
dom = child_of_domain(left, ids2, working_model)
for dom_leaf in reversed(dom):
new_leaf = create_substitution_leaf(leaf, dom_leaf, working_model)
push(new_leaf)
elif not field and field_path[0] in MAGIC_COLUMNS:
push_result(leaf)
elif not field:
raise ValueError("Invalid field %r in leaf %r" % (left, str(leaf)))
# ----------------------------------------
# PATH SPOTTED
# -> many2one or one2many with _auto_join:
# - add a join, then jump into linked field: field.remaining on
# src_table is replaced by remaining on dst_table, and set for re-evaluation
# - if a domain is defined on the field, add it into evaluation
# on the relational table
# -> many2one, many2many, one2many: replace by an equivalent computed
# domain, given by recursively searching on the remaining of the path
# -> note: hack about fields.property should not be necessary anymore
# as after transforming the field, it will go through this loop once again
# ----------------------------------------
elif len(field_path) > 1 and field._type == 'many2one' and field._auto_join:
# res_partner.state_id = res_partner__state_id.id
leaf.add_join_context(relational_model, field_path[0], 'id', field_path[0])
push(create_substitution_leaf(leaf, (field_path[1], operator, right), relational_model))
elif len(field_path) > 1 and field._type == 'one2many' and field._auto_join:
# res_partner.id = res_partner__bank_ids.partner_id
leaf.add_join_context(relational_model, 'id', field._fields_id, field_path[0])
domain = field._domain(working_model) if callable(field._domain) else field._domain
push(create_substitution_leaf(leaf, (field_path[1], operator, right), relational_model))
if domain:
domain = normalize_domain(domain)
for elem in reversed(domain):
push(create_substitution_leaf(leaf, elem, relational_model))
push(create_substitution_leaf(leaf, AND_OPERATOR, relational_model))
elif len(field_path) > 1 and field._auto_join:
raise NotImplementedError('_auto_join attribute not supported on many2many field %s' % left)
elif len(field_path) > 1 and field._type == 'many2one':
right_ids = relational_model.search(cr, uid, [(field_path[1], operator, right)], context=context)
leaf.leaf = (field_path[0], 'in', right_ids)
push(leaf)
# Making search easier when there is a left operand as field.o2m or field.m2m
elif len(field_path) > 1 and field._type in ['many2many', 'one2many']:
right_ids = relational_model.search(cr, uid, [(field_path[1], operator, right)], context=context)
table_ids = working_model.search(cr, uid, [(field_path[0], 'in', right_ids)], context=dict(context, active_test=False))
leaf.leaf = ('id', 'in', table_ids)
push(leaf)
# -------------------------------------------------
# FUNCTION FIELD
# -> not stored: error if no _fnct_search, otherwise handle the result domain
# -> stored: management done in the remaining of parsing
# -------------------------------------------------
elif isinstance(field, fields.function) and not field.store and not field._fnct_search:
# this is a function field that is not stored
# the function field doesn't provide a search function and doesn't store
# values in the database, so we must ignore it : we generate a dummy leaf
leaf.leaf = TRUE_LEAF
_logger.error(
"The field '%s' (%s) can not be searched: non-stored "
"function field without fnct_search",
field.string, left)
# avoid compiling stack trace if not needed
if _logger.isEnabledFor(logging.DEBUG):
_logger.debug(''.join(traceback.format_stack()))
push(leaf)
elif isinstance(field, fields.function) and not field.store:
# this is a function field that is not stored
fct_domain = field.search(cr, uid, working_model, left, [leaf.leaf], context=context)
if not fct_domain:
leaf.leaf = TRUE_LEAF
push(leaf)
else:
# we assume that the expression is valid
# we create a dummy leaf for forcing the parsing of the resulting expression
for domain_element in reversed(fct_domain):
push(create_substitution_leaf(leaf, domain_element, working_model))
# self.push(create_substitution_leaf(leaf, TRUE_LEAF, working_model))
# self.push(create_substitution_leaf(leaf, AND_OPERATOR, working_model))
# -------------------------------------------------
# RELATIONAL FIELDS
# -------------------------------------------------
# Applying recursivity on field(one2many)
elif field._type == 'one2many' and operator == 'child_of':
ids2 = to_ids(right, relational_model, context)
if field._obj != working_model._name:
dom = child_of_domain(left, ids2, relational_model, prefix=field._obj)
else:
dom = child_of_domain('id', ids2, working_model, parent=left)
for dom_leaf in reversed(dom):
push(create_substitution_leaf(leaf, dom_leaf, working_model))
elif field._type == 'one2many':
call_null = True
if right is not False:
if isinstance(right, basestring):
ids2 = [x[0] for x in relational_model.name_search(cr, uid, right, [], operator, context=context, limit=None)]
if ids2:
operator = 'in'
else:
if not isinstance(right, list):
ids2 = [right]
else:
ids2 = right
if not ids2:
if operator in ['like', 'ilike', 'in', '=']:
#no result found with given search criteria
call_null = False
push(create_substitution_leaf(leaf, FALSE_LEAF, working_model))
else:
ids2 = select_from_where(cr, field._fields_id, relational_model._table, 'id', ids2, operator)
if ids2:
call_null = False
o2m_op = 'not in' if operator in NEGATIVE_TERM_OPERATORS else 'in'
push(create_substitution_leaf(leaf, ('id', o2m_op, ids2), working_model))
if call_null:
o2m_op = 'in' if operator in NEGATIVE_TERM_OPERATORS else 'not in'
push(create_substitution_leaf(leaf, ('id', o2m_op, select_distinct_from_where_not_null(cr, field._fields_id, relational_model._table)), working_model))
elif field._type == 'many2many':
rel_table, rel_id1, rel_id2 = field._sql_names(working_model)
#FIXME
if operator == 'child_of':
def _rec_convert(ids):
if relational_model == working_model:
return ids
return select_from_where(cr, rel_id1, rel_table, rel_id2, ids, operator)
ids2 = to_ids(right, relational_model, context)
dom = child_of_domain('id', ids2, relational_model)
ids2 = relational_model.search(cr, uid, dom, context=context)
push(create_substitution_leaf(leaf, ('id', 'in', _rec_convert(ids2)), working_model))
else:
call_null_m2m = True
if right is not False:
if isinstance(right, basestring):
res_ids = [x[0] for x in relational_model.name_search(cr, uid, right, [], operator, context=context)]
if res_ids:
operator = 'in'
else:
if not isinstance(right, list):
res_ids = [right]
else:
res_ids = right
if not res_ids:
if operator in ['like', 'ilike', 'in', '=']:
#no result found with given search criteria
call_null_m2m = False
push(create_substitution_leaf(leaf, FALSE_LEAF, working_model))
else:
operator = 'in' # operator changed because ids are directly related to main object
else:
call_null_m2m = False
m2m_op = 'not in' if operator in NEGATIVE_TERM_OPERATORS else 'in'
push(create_substitution_leaf(leaf, ('id', m2m_op, select_from_where(cr, rel_id1, rel_table, rel_id2, res_ids, operator) or [0]), working_model))
if call_null_m2m:
m2m_op = 'in' if operator in NEGATIVE_TERM_OPERATORS else 'not in'
push(create_substitution_leaf(leaf, ('id', m2m_op, select_distinct_from_where_not_null(cr, rel_id1, rel_table)), working_model))
elif field._type == 'many2one':
if operator == 'child_of':
ids2 = to_ids(right, relational_model, context)
if field._obj != working_model._name:
dom = child_of_domain(left, ids2, relational_model, prefix=field._obj)
else:
dom = child_of_domain('id', ids2, working_model, parent=left)
for dom_leaf in reversed(dom):
push(create_substitution_leaf(leaf, dom_leaf, working_model))
else:
def _get_expression(relational_model, cr, uid, left, right, operator, context=None):
if context is None:
context = {}
c = context.copy()
c['active_test'] = False
#Special treatment to ill-formed domains
operator = (operator in ['<', '>', '<=', '>=']) and 'in' or operator
dict_op = {'not in': '!=', 'in': '=', '=': 'in', '!=': 'not in'}
if isinstance(right, tuple):
right = list(right)
if (not isinstance(right, list)) and operator in ['not in', 'in']:
operator = dict_op[operator]
elif isinstance(right, list) and operator in ['!=', '=']: # for domain (FIELD,'=',['value1','value2'])
operator = dict_op[operator]
res_ids = [x[0] for x in relational_model.name_search(cr, uid, right, [], operator, limit=None, context=c)]
if operator in NEGATIVE_TERM_OPERATORS:
res_ids.append(False) # TODO this should not be appended if False was in 'right'
return left, 'in', res_ids
# resolve string-based m2o criterion into IDs
if isinstance(right, basestring) or \
right and isinstance(right, (tuple, list)) and all(isinstance(item, basestring) for item in right):
push(create_substitution_leaf(leaf, _get_expression(relational_model, cr, uid, left, right, operator, context=context), working_model))
else:
# right == [] or right == False and all other cases are handled by __leaf_to_sql()
push_result(leaf)
# -------------------------------------------------
# OTHER FIELDS
# -> datetime fields: manage time part of the datetime
# field when it is not there
# -> manage translatable fields
# -------------------------------------------------
else:
if field._type == 'datetime' and right and len(right) == 10:
if operator in ('>', '<='):
right += ' 23:59:59'
else:
right += ' 00:00:00'
push(create_substitution_leaf(leaf, (left, operator, right), working_model))
elif field.translate and right:
need_wildcard = operator in ('like', 'ilike', 'not like', 'not ilike')
sql_operator = {'=like': 'like', '=ilike': 'ilike'}.get(operator, operator)
if need_wildcard:
right = '%%%s%%' % right
inselect_operator = 'inselect'
if sql_operator in NEGATIVE_TERM_OPERATORS:
# negate operator (fix lp:1071710)
sql_operator = sql_operator[4:] if sql_operator[:3] == 'not' else '='
inselect_operator = 'not inselect'
unaccent = self._unaccent if sql_operator.endswith('like') else lambda x: x
instr = unaccent('%s')
if sql_operator == 'in':
# params will be flatten by to_sql() => expand the placeholders
instr = '(%s)' % ', '.join(['%s'] * len(right))
subselect = """WITH temp_irt_current (id, name) as (
SELECT ct.id, coalesce(it.value,ct.{quote_left})
FROM {current_table} ct
LEFT JOIN ir_translation it ON (it.name = %s and
it.lang = %s and
it.type = %s and
it.res_id = ct.id and
it.value != '')
)
SELECT id FROM temp_irt_current WHERE {name} {operator} {right} order by name
""".format(current_table=working_model._table, quote_left=_quote(left), name=unaccent('name'),
operator=sql_operator, right=instr)
params = (
working_model._name + ',' + left,
context.get('lang') or 'en_US',
'model',
right,
)
push(create_substitution_leaf(leaf, ('id', inselect_operator, (subselect, params)), working_model))
else:
push_result(leaf)
# ----------------------------------------
# END OF PARSING FULL DOMAIN
# -> generate joins
# ----------------------------------------
joins = set()
for leaf in self.result:
joins |= set(leaf.get_join_conditions())
self.joins = list(joins)
def __leaf_to_sql(self, eleaf):
model = eleaf.model
leaf = eleaf.leaf
left, operator, right = leaf
# final sanity checks - should never fail
assert operator in (TERM_OPERATORS + ('inselect', 'not inselect')), \
"Invalid operator %r in domain term %r" % (operator, leaf)
assert leaf in (TRUE_LEAF, FALSE_LEAF) or left in model._all_columns \
or left in MAGIC_COLUMNS, "Invalid field %r in domain term %r" % (left, leaf)
table_alias = '"%s"' % (eleaf.generate_alias())
if leaf == TRUE_LEAF:
query = 'TRUE'
params = []
elif leaf == FALSE_LEAF:
query = 'FALSE'
params = []
elif operator == 'inselect':
query = '(%s."%s" in (%s))' % (table_alias, left, right[0])
params = right[1]
elif operator == 'not inselect':
query = '(%s."%s" not in (%s))' % (table_alias, left, right[0])
params = right[1]
elif operator in ['in', 'not in']:
# Two cases: right is a boolean or a list. The boolean case is an
# abuse and handled for backward compatibility.
if isinstance(right, bool):
_logger.warning("The domain term '%s' should use the '=' or '!=' operator." % (leaf,))
if operator == 'in':
r = 'NOT NULL' if right else 'NULL'
else:
r = 'NULL' if right else 'NOT NULL'
query = '(%s."%s" IS %s)' % (table_alias, left, r)
params = []
elif isinstance(right, (list, tuple)):
params = list(right)
check_nulls = False
for i in range(len(params))[::-1]:
if params[i] == False:
check_nulls = True
del params[i]
if params:
if left == 'id':
instr = ','.join(['%s'] * len(params))
else:
ss = model._columns[left]._symbol_set
instr = ','.join([ss[0]] * len(params))
params = map(ss[1], params)
query = '(%s."%s" %s (%s))' % (table_alias, left, operator, instr)
else:
# The case for (left, 'in', []) or (left, 'not in', []).
query = 'FALSE' if operator == 'in' else 'TRUE'
if check_nulls and operator == 'in':
query = '(%s OR %s."%s" IS NULL)' % (query, table_alias, left)
elif not check_nulls and operator == 'not in':
query = '(%s OR %s."%s" IS NULL)' % (query, table_alias, left)
elif check_nulls and operator == 'not in':
query = '(%s AND %s."%s" IS NOT NULL)' % (query, table_alias, left) # needed only for TRUE.
else: # Must not happen
raise ValueError("Invalid domain term %r" % (leaf,))
elif right == False and (left in model._columns) and model._columns[left]._type == "boolean" and (operator == '='):
query = '(%s."%s" IS NULL or %s."%s" = false )' % (table_alias, left, table_alias, left)
params = []
elif (right is False or right is None) and (operator == '='):
query = '%s."%s" IS NULL ' % (table_alias, left)
params = []
elif right == False and (left in model._columns) and model._columns[left]._type == "boolean" and (operator == '!='):
query = '(%s."%s" IS NOT NULL and %s."%s" != false)' % (table_alias, left, table_alias, left)
params = []
elif (right is False or right is None) and (operator == '!='):
query = '%s."%s" IS NOT NULL' % (table_alias, left)
params = []
elif operator == '=?':
if right is False or right is None:
# '=?' is a short-circuit that makes the term TRUE if right is None or False
query = 'TRUE'
params = []
else:
# '=?' behaves like '=' in other cases
query, params = self.__leaf_to_sql(
create_substitution_leaf(eleaf, (left, '=', right), model))
elif left == 'id':
query = '%s.id %s %%s' % (table_alias, operator)
params = right
else:
need_wildcard = operator in ('like', 'ilike', 'not like', 'not ilike')
sql_operator = {'=like': 'like', '=ilike': 'ilike'}.get(operator, operator)
cast = '::text' if sql_operator.endswith('like') else ''
if left in model._columns:
format = need_wildcard and '%s' or model._columns[left]._symbol_set[0]
unaccent = self._unaccent if sql_operator.endswith('like') else lambda x: x
column = '%s.%s' % (table_alias, _quote(left))
query = '(%s%s %s %s)' % (unaccent(column), cast, sql_operator, unaccent(format))
elif left in MAGIC_COLUMNS:
query = "(%s.\"%s\"%s %s %%s)" % (table_alias, left, cast, sql_operator)
params = right
else: # Must not happen
raise ValueError("Invalid field %r in domain term %r" % (left, leaf))
add_null = False
if need_wildcard:
if isinstance(right, str):
str_utf8 = right
elif isinstance(right, unicode):
str_utf8 = right.encode('utf-8')
else:
str_utf8 = str(right)
params = '%%%s%%' % str_utf8
add_null = not str_utf8
elif left in model._columns:
params = model._columns[left]._symbol_set[1](right)
if add_null:
query = '(%s OR %s."%s" IS NULL)' % (query, table_alias, left)
if isinstance(params, basestring):
params = [params]
return query, params
def to_sql(self):
stack = []
params = []
# Process the domain from right to left, using a stack, to generate a SQL expression.
self.result.reverse()
for leaf in self.result:
if leaf.is_leaf(internal=True):
q, p = self.__leaf_to_sql(leaf)
params.insert(0, p)
stack.append(q)
elif leaf.leaf == NOT_OPERATOR:
stack.append('(NOT (%s))' % (stack.pop(),))
else:
ops = {AND_OPERATOR: ' AND ', OR_OPERATOR: ' OR '}
q1 = stack.pop()
q2 = stack.pop()
stack.append('(%s %s %s)' % (q1, ops[leaf.leaf], q2,))
assert len(stack) == 1
query = stack[0]
joins = ' AND '.join(self.joins)
if joins:
query = '(%s) AND %s' % (joins, query)
return query, tools.flatten(params)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
chafique-delli/OpenUpgrade
|
refs/heads/master
|
addons/web_kanban_gauge/__openerp__.py
|
428
|
{
'name': 'Gauge Widget for Kanban',
'category': 'Hidden',
'description': """
This widget allows to display gauges using justgage library.
""",
'version': '1.0',
'depends': ['web_kanban'],
'data' : [
'views/web_kanban_gauge.xml',
],
'qweb': [
],
'auto_install': True,
}
|
z-fork/scrapy
|
refs/heads/master
|
scrapy/xlib/tx/endpoints.py
|
164
|
# -*- test-case-name: twisted.internet.test.test_endpoints -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Implementations of L{IStreamServerEndpoint} and L{IStreamClientEndpoint} that
wrap the L{IReactorTCP}, L{IReactorSSL}, and L{IReactorUNIX} interfaces.
This also implements an extensible mini-language for describing endpoints,
parsed by the L{clientFromString} and L{serverFromString} functions.
@since: 10.1
"""
from __future__ import division, absolute_import
import os
import socket
from zope.interface import implementer, directlyProvides
import warnings
from twisted.internet import interfaces, defer, error, fdesc, threads
from twisted.internet.protocol import (
ClientFactory, Protocol, ProcessProtocol, Factory)
from twisted.internet.interfaces import IStreamServerEndpointStringParser
from twisted.internet.interfaces import IStreamClientEndpointStringParser
from twisted.python.filepath import FilePath
from twisted.python.failure import Failure
from twisted.python import log
from twisted.python.components import proxyForInterface
from twisted.plugin import IPlugin, getPlugins
from twisted.internet import stdio
from .interfaces import IFileDescriptorReceiver
__all__ = ["TCP4ClientEndpoint", "SSL4ServerEndpoint"]
class _WrappingProtocol(Protocol):
"""
Wrap another protocol in order to notify my user when a connection has
been made.
"""
def __init__(self, connectedDeferred, wrappedProtocol):
"""
@param connectedDeferred: The L{Deferred} that will callback
with the C{wrappedProtocol} when it is connected.
@param wrappedProtocol: An L{IProtocol} provider that will be
connected.
"""
self._connectedDeferred = connectedDeferred
self._wrappedProtocol = wrappedProtocol
for iface in [interfaces.IHalfCloseableProtocol,
IFileDescriptorReceiver]:
if iface.providedBy(self._wrappedProtocol):
directlyProvides(self, iface)
def logPrefix(self):
"""
Transparently pass through the wrapped protocol's log prefix.
"""
if interfaces.ILoggingContext.providedBy(self._wrappedProtocol):
return self._wrappedProtocol.logPrefix()
return self._wrappedProtocol.__class__.__name__
def connectionMade(self):
"""
Connect the C{self._wrappedProtocol} to our C{self.transport} and
callback C{self._connectedDeferred} with the C{self._wrappedProtocol}
"""
self._wrappedProtocol.makeConnection(self.transport)
self._connectedDeferred.callback(self._wrappedProtocol)
def dataReceived(self, data):
"""
Proxy C{dataReceived} calls to our C{self._wrappedProtocol}
"""
return self._wrappedProtocol.dataReceived(data)
def fileDescriptorReceived(self, descriptor):
"""
Proxy C{fileDescriptorReceived} calls to our C{self._wrappedProtocol}
"""
return self._wrappedProtocol.fileDescriptorReceived(descriptor)
def connectionLost(self, reason):
"""
Proxy C{connectionLost} calls to our C{self._wrappedProtocol}
"""
return self._wrappedProtocol.connectionLost(reason)
def readConnectionLost(self):
"""
Proxy L{IHalfCloseableProtocol.readConnectionLost} to our
C{self._wrappedProtocol}
"""
self._wrappedProtocol.readConnectionLost()
def writeConnectionLost(self):
"""
Proxy L{IHalfCloseableProtocol.writeConnectionLost} to our
C{self._wrappedProtocol}
"""
self._wrappedProtocol.writeConnectionLost()
class _WrappingFactory(ClientFactory):
"""
Wrap a factory in order to wrap the protocols it builds.
@ivar _wrappedFactory: A provider of I{IProtocolFactory} whose buildProtocol
method will be called and whose resulting protocol will be wrapped.
@ivar _onConnection: A L{Deferred} that fires when the protocol is
connected
@ivar _connector: A L{connector <twisted.internet.interfaces.IConnector>}
that is managing the current or previous connection attempt.
"""
protocol = _WrappingProtocol
def __init__(self, wrappedFactory):
"""
@param wrappedFactory: A provider of I{IProtocolFactory} whose
buildProtocol method will be called and whose resulting protocol
will be wrapped.
"""
self._wrappedFactory = wrappedFactory
self._onConnection = defer.Deferred(canceller=self._canceller)
def startedConnecting(self, connector):
"""
A connection attempt was started. Remember the connector which started
said attempt, for use later.
"""
self._connector = connector
def _canceller(self, deferred):
"""
The outgoing connection attempt was cancelled. Fail that L{Deferred}
with an L{error.ConnectingCancelledError}.
@param deferred: The L{Deferred <defer.Deferred>} that was cancelled;
should be the same as C{self._onConnection}.
@type deferred: L{Deferred <defer.Deferred>}
@note: This relies on startedConnecting having been called, so it may
seem as though there's a race condition where C{_connector} may not
have been set. However, using public APIs, this condition is
impossible to catch, because a connection API
(C{connectTCP}/C{SSL}/C{UNIX}) is always invoked before a
L{_WrappingFactory}'s L{Deferred <defer.Deferred>} is returned to
C{connect()}'s caller.
@return: C{None}
"""
deferred.errback(
error.ConnectingCancelledError(
self._connector.getDestination()))
self._connector.stopConnecting()
def doStart(self):
"""
Start notifications are passed straight through to the wrapped factory.
"""
self._wrappedFactory.doStart()
def doStop(self):
"""
Stop notifications are passed straight through to the wrapped factory.
"""
self._wrappedFactory.doStop()
def buildProtocol(self, addr):
"""
Proxy C{buildProtocol} to our C{self._wrappedFactory} or errback
the C{self._onConnection} L{Deferred}.
@return: An instance of L{_WrappingProtocol} or C{None}
"""
try:
proto = self._wrappedFactory.buildProtocol(addr)
except:
self._onConnection.errback()
else:
return self.protocol(self._onConnection, proto)
def clientConnectionFailed(self, connector, reason):
"""
Errback the C{self._onConnection} L{Deferred} when the
client connection fails.
"""
if not self._onConnection.called:
self._onConnection.errback(reason)
@implementer(interfaces.ITransport)
class _ProcessEndpointTransport(proxyForInterface(
interfaces.IProcessTransport, '_process')):
"""
An L{ITransport} provider for the L{IProtocol} instance passed to the
process endpoint.
@ivar _process: An active process transport which will be used by write
methods on this object to write data to a child process.
@type _process: L{interfaces.IProcessTransport} provider
"""
def write(self, data):
"""
Write to the child process's standard input.
@param data: The data to write on stdin.
"""
self._process.writeToChild(0, data)
def writeSequence(self, data):
"""
Write a list of strings to child process's stdin.
@param data: The list of chunks to write on stdin.
"""
for chunk in data:
self._process.writeToChild(0, chunk)
@implementer(interfaces.IStreamServerEndpoint)
class _TCPServerEndpoint(object):
"""
A TCP server endpoint interface
"""
def __init__(self, reactor, port, backlog, interface):
"""
@param reactor: An L{IReactorTCP} provider.
@param port: The port number used for listening
@type port: int
@param backlog: Size of the listen queue
@type backlog: int
@param interface: The hostname to bind to
@type interface: str
"""
self._reactor = reactor
self._port = port
self._backlog = backlog
self._interface = interface
def listen(self, protocolFactory):
"""
Implement L{IStreamServerEndpoint.listen} to listen on a TCP
socket
"""
return defer.execute(self._reactor.listenTCP,
self._port,
protocolFactory,
backlog=self._backlog,
interface=self._interface)
class TCP4ServerEndpoint(_TCPServerEndpoint):
"""
Implements TCP server endpoint with an IPv4 configuration
"""
def __init__(self, reactor, port, backlog=50, interface=''):
"""
@param reactor: An L{IReactorTCP} provider.
@param port: The port number used for listening
@type port: int
@param backlog: Size of the listen queue
@type backlog: int
@param interface: The hostname to bind to, defaults to '' (all)
@type interface: str
"""
_TCPServerEndpoint.__init__(self, reactor, port, backlog, interface)
class TCP6ServerEndpoint(_TCPServerEndpoint):
"""
Implements TCP server endpoint with an IPv6 configuration
"""
def __init__(self, reactor, port, backlog=50, interface='::'):
"""
@param reactor: An L{IReactorTCP} provider.
@param port: The port number used for listening
@type port: int
@param backlog: Size of the listen queue
@type backlog: int
@param interface: The hostname to bind to, defaults to '' (all)
@type interface: str
"""
_TCPServerEndpoint.__init__(self, reactor, port, backlog, interface)
@implementer(interfaces.IStreamClientEndpoint)
class TCP4ClientEndpoint(object):
"""
TCP client endpoint with an IPv4 configuration.
"""
def __init__(self, reactor, host, port, timeout=30, bindAddress=None):
"""
@param reactor: An L{IReactorTCP} provider
@param host: A hostname, used when connecting
@type host: str
@param port: The port number, used when connecting
@type port: int
@param timeout: The number of seconds to wait before assuming the
connection has failed.
@type timeout: int
@param bindAddress: A (host, port) tuple of local address to bind to,
or None.
@type bindAddress: tuple
"""
self._reactor = reactor
self._host = host
self._port = port
self._timeout = timeout
self._bindAddress = bindAddress
def connect(self, protocolFactory):
"""
Implement L{IStreamClientEndpoint.connect} to connect via TCP.
"""
try:
wf = _WrappingFactory(protocolFactory)
self._reactor.connectTCP(
self._host, self._port, wf,
timeout=self._timeout, bindAddress=self._bindAddress)
return wf._onConnection
except:
return defer.fail()
@implementer(interfaces.IStreamServerEndpoint)
class SSL4ServerEndpoint(object):
"""
SSL secured TCP server endpoint with an IPv4 configuration.
"""
def __init__(self, reactor, port, sslContextFactory,
backlog=50, interface=''):
"""
@param reactor: An L{IReactorSSL} provider.
@param port: The port number used for listening
@type port: int
@param sslContextFactory: An instance of
L{twisted.internet.ssl.ContextFactory}.
@param backlog: Size of the listen queue
@type backlog: int
@param interface: The hostname to bind to, defaults to '' (all)
@type interface: str
"""
self._reactor = reactor
self._port = port
self._sslContextFactory = sslContextFactory
self._backlog = backlog
self._interface = interface
def listen(self, protocolFactory):
"""
Implement L{IStreamServerEndpoint.listen} to listen for SSL on a
TCP socket.
"""
return defer.execute(self._reactor.listenSSL, self._port,
protocolFactory,
contextFactory=self._sslContextFactory,
backlog=self._backlog,
interface=self._interface)
@implementer(interfaces.IStreamClientEndpoint)
class SSL4ClientEndpoint(object):
"""
SSL secured TCP client endpoint with an IPv4 configuration
"""
def __init__(self, reactor, host, port, sslContextFactory,
timeout=30, bindAddress=None):
"""
@param reactor: An L{IReactorSSL} provider.
@param host: A hostname, used when connecting
@type host: str
@param port: The port number, used when connecting
@type port: int
@param sslContextFactory: SSL Configuration information as an instance
of L{twisted.internet.ssl.ContextFactory}.
@param timeout: Number of seconds to wait before assuming the
connection has failed.
@type timeout: int
@param bindAddress: A (host, port) tuple of local address to bind to,
or None.
@type bindAddress: tuple
"""
self._reactor = reactor
self._host = host
self._port = port
self._sslContextFactory = sslContextFactory
self._timeout = timeout
self._bindAddress = bindAddress
def connect(self, protocolFactory):
"""
Implement L{IStreamClientEndpoint.connect} to connect with SSL over
TCP.
"""
try:
wf = _WrappingFactory(protocolFactory)
self._reactor.connectSSL(
self._host, self._port, wf, self._sslContextFactory,
timeout=self._timeout, bindAddress=self._bindAddress)
return wf._onConnection
except:
return defer.fail()
@implementer(interfaces.IStreamServerEndpoint)
class UNIXServerEndpoint(object):
"""
UnixSocket server endpoint.
"""
def __init__(self, reactor, address, backlog=50, mode=0o666, wantPID=0):
"""
@param reactor: An L{IReactorUNIX} provider.
@param address: The path to the Unix socket file, used when listening
@param backlog: number of connections to allow in backlog.
@param mode: mode to set on the unix socket. This parameter is
deprecated. Permissions should be set on the directory which
contains the UNIX socket.
@param wantPID: If True, create a pidfile for the socket.
"""
self._reactor = reactor
self._address = address
self._backlog = backlog
self._mode = mode
self._wantPID = wantPID
def listen(self, protocolFactory):
"""
Implement L{IStreamServerEndpoint.listen} to listen on a UNIX socket.
"""
return defer.execute(self._reactor.listenUNIX, self._address,
protocolFactory,
backlog=self._backlog,
mode=self._mode,
wantPID=self._wantPID)
@implementer(interfaces.IStreamClientEndpoint)
class UNIXClientEndpoint(object):
"""
UnixSocket client endpoint.
"""
def __init__(self, reactor, path, timeout=30, checkPID=0):
"""
@param reactor: An L{IReactorUNIX} provider.
@param path: The path to the Unix socket file, used when connecting
@type path: str
@param timeout: Number of seconds to wait before assuming the
connection has failed.
@type timeout: int
@param checkPID: If True, check for a pid file to verify that a server
is listening.
@type checkPID: bool
"""
self._reactor = reactor
self._path = path
self._timeout = timeout
self._checkPID = checkPID
def connect(self, protocolFactory):
"""
Implement L{IStreamClientEndpoint.connect} to connect via a
UNIX Socket
"""
try:
wf = _WrappingFactory(protocolFactory)
self._reactor.connectUNIX(
self._path, wf,
timeout=self._timeout,
checkPID=self._checkPID)
return wf._onConnection
except:
return defer.fail()
@implementer(interfaces.IStreamServerEndpoint)
class AdoptedStreamServerEndpoint(object):
"""
An endpoint for listening on a file descriptor initialized outside of
Twisted.
@ivar _used: A C{bool} indicating whether this endpoint has been used to
listen with a factory yet. C{True} if so.
"""
_close = os.close
_setNonBlocking = staticmethod(fdesc.setNonBlocking)
def __init__(self, reactor, fileno, addressFamily):
"""
@param reactor: An L{IReactorSocket} provider.
@param fileno: An integer file descriptor corresponding to a listening
I{SOCK_STREAM} socket.
@param addressFamily: The address family of the socket given by
C{fileno}.
"""
self.reactor = reactor
self.fileno = fileno
self.addressFamily = addressFamily
self._used = False
def listen(self, factory):
"""
Implement L{IStreamServerEndpoint.listen} to start listening on, and
then close, C{self._fileno}.
"""
if self._used:
return defer.fail(error.AlreadyListened())
self._used = True
try:
self._setNonBlocking(self.fileno)
port = self.reactor.adoptStreamPort(
self.fileno, self.addressFamily, factory)
self._close(self.fileno)
except:
return defer.fail()
return defer.succeed(port)
def _parseTCP(factory, port, interface="", backlog=50):
"""
Internal parser function for L{_parseServer} to convert the string
arguments for a TCP(IPv4) stream endpoint into the structured arguments.
@param factory: the protocol factory being parsed, or C{None}. (This was a
leftover argument from when this code was in C{strports}, and is now
mostly None and unused.)
@type factory: L{IProtocolFactory} or C{NoneType}
@param port: the integer port number to bind
@type port: C{str}
@param interface: the interface IP to listen on
@param backlog: the length of the listen queue
@type backlog: C{str}
@return: a 2-tuple of (args, kwargs), describing the parameters to
L{IReactorTCP.listenTCP} (or, modulo argument 2, the factory, arguments
to L{TCP4ServerEndpoint}.
"""
return (int(port), factory), {'interface': interface,
'backlog': int(backlog)}
def _parseUNIX(factory, address, mode='666', backlog=50, lockfile=True):
"""
Internal parser function for L{_parseServer} to convert the string
arguments for a UNIX (AF_UNIX/SOCK_STREAM) stream endpoint into the
structured arguments.
@param factory: the protocol factory being parsed, or C{None}. (This was a
leftover argument from when this code was in C{strports}, and is now
mostly None and unused.)
@type factory: L{IProtocolFactory} or C{NoneType}
@param address: the pathname of the unix socket
@type address: C{str}
@param backlog: the length of the listen queue
@type backlog: C{str}
@param lockfile: A string '0' or '1', mapping to True and False
respectively. See the C{wantPID} argument to C{listenUNIX}
@return: a 2-tuple of (args, kwargs), describing the parameters to
L{IReactorTCP.listenUNIX} (or, modulo argument 2, the factory,
arguments to L{UNIXServerEndpoint}.
"""
return (
(address, factory),
{'mode': int(mode, 8), 'backlog': int(backlog),
'wantPID': bool(int(lockfile))})
def _parseSSL(factory, port, privateKey="server.pem", certKey=None,
sslmethod=None, interface='', backlog=50):
"""
Internal parser function for L{_parseServer} to convert the string
arguments for an SSL (over TCP/IPv4) stream endpoint into the structured
arguments.
@param factory: the protocol factory being parsed, or C{None}. (This was a
leftover argument from when this code was in C{strports}, and is now
mostly None and unused.)
@type factory: L{IProtocolFactory} or C{NoneType}
@param port: the integer port number to bind
@type port: C{str}
@param interface: the interface IP to listen on
@param backlog: the length of the listen queue
@type backlog: C{str}
@param privateKey: The file name of a PEM format private key file.
@type privateKey: C{str}
@param certKey: The file name of a PEM format certificate file.
@type certKey: C{str}
@param sslmethod: The string name of an SSL method, based on the name of a
constant in C{OpenSSL.SSL}. Must be one of: "SSLv23_METHOD",
"SSLv2_METHOD", "SSLv3_METHOD", "TLSv1_METHOD".
@type sslmethod: C{str}
@return: a 2-tuple of (args, kwargs), describing the parameters to
L{IReactorSSL.listenSSL} (or, modulo argument 2, the factory, arguments
to L{SSL4ServerEndpoint}.
"""
from twisted.internet import ssl
if certKey is None:
certKey = privateKey
kw = {}
if sslmethod is not None:
kw['method'] = getattr(ssl.SSL, sslmethod)
else:
kw['method'] = ssl.SSL.SSLv23_METHOD
certPEM = FilePath(certKey).getContent()
keyPEM = FilePath(privateKey).getContent()
privateCertificate = ssl.PrivateCertificate.loadPEM(certPEM + keyPEM)
cf = ssl.CertificateOptions(
privateKey=privateCertificate.privateKey.original,
certificate=privateCertificate.original,
**kw
)
return ((int(port), factory, cf),
{'interface': interface, 'backlog': int(backlog)})
@implementer(IPlugin, IStreamServerEndpointStringParser)
class _StandardIOParser(object):
"""
Stream server endpoint string parser for the Standard I/O type.
@ivar prefix: See L{IStreamClientEndpointStringParser.prefix}.
"""
prefix = "stdio"
def _parseServer(self, reactor):
"""
Internal parser function for L{_parseServer} to convert the string
arguments into structured arguments for the L{StandardIOEndpoint}
@param reactor: Reactor for the endpoint
"""
return StandardIOEndpoint(reactor)
def parseStreamServer(self, reactor, *args, **kwargs):
# Redirects to another function (self._parseServer), tricks zope.interface
# into believing the interface is correctly implemented.
return self._parseServer(reactor)
@implementer(IPlugin, IStreamServerEndpointStringParser)
class _TCP6ServerParser(object):
"""
Stream server endpoint string parser for the TCP6ServerEndpoint type.
@ivar prefix: See L{IStreamClientEndpointStringParser.prefix}.
"""
prefix = "tcp6" # Used in _parseServer to identify the plugin with the endpoint type
def _parseServer(self, reactor, port, backlog=50, interface='::'):
"""
Internal parser function for L{_parseServer} to convert the string
arguments into structured arguments for the L{TCP6ServerEndpoint}
@param reactor: An L{IReactorTCP} provider.
@param port: The port number used for listening
@type port: int
@param backlog: Size of the listen queue
@type backlog: int
@param interface: The hostname to bind to
@type interface: str
"""
port = int(port)
backlog = int(backlog)
return TCP6ServerEndpoint(reactor, port, backlog, interface)
def parseStreamServer(self, reactor, *args, **kwargs):
# Redirects to another function (self._parseServer), tricks zope.interface
# into believing the interface is correctly implemented.
return self._parseServer(reactor, *args, **kwargs)
_serverParsers = {"tcp": _parseTCP,
"unix": _parseUNIX,
"ssl": _parseSSL,
}
_OP, _STRING = range(2)
def _tokenize(description):
"""
Tokenize a strports string and yield each token.
@param description: a string as described by L{serverFromString} or
L{clientFromString}.
@return: an iterable of 2-tuples of (L{_OP} or L{_STRING}, string). Tuples
starting with L{_OP} will contain a second element of either ':' (i.e.
'next parameter') or '=' (i.e. 'assign parameter value'). For example,
the string 'hello:greet\=ing=world' would result in a generator
yielding these values::
_STRING, 'hello'
_OP, ':'
_STRING, 'greet=ing'
_OP, '='
_STRING, 'world'
"""
current = ''
ops = ':='
nextOps = {':': ':=', '=': ':'}
description = iter(description)
for n in description:
if n in ops:
yield _STRING, current
yield _OP, n
current = ''
ops = nextOps[n]
elif n == '\\':
current += next(description)
else:
current += n
yield _STRING, current
def _parse(description):
"""
Convert a description string into a list of positional and keyword
parameters, using logic vaguely like what Python does.
@param description: a string as described by L{serverFromString} or
L{clientFromString}.
@return: a 2-tuple of C{(args, kwargs)}, where 'args' is a list of all
':'-separated C{str}s not containing an '=' and 'kwargs' is a map of
all C{str}s which do contain an '='. For example, the result of
C{_parse('a:b:d=1:c')} would be C{(['a', 'b', 'c'], {'d': '1'})}.
"""
args, kw = [], {}
def add(sofar):
if len(sofar) == 1:
args.append(sofar[0])
else:
kw[sofar[0]] = sofar[1]
sofar = ()
for (type, value) in _tokenize(description):
if type is _STRING:
sofar += (value,)
elif value == ':':
add(sofar)
sofar = ()
add(sofar)
return args, kw
# Mappings from description "names" to endpoint constructors.
_endpointServerFactories = {
'TCP': TCP4ServerEndpoint,
'SSL': SSL4ServerEndpoint,
'UNIX': UNIXServerEndpoint,
}
_endpointClientFactories = {
'TCP': TCP4ClientEndpoint,
'SSL': SSL4ClientEndpoint,
'UNIX': UNIXClientEndpoint,
}
_NO_DEFAULT = object()
def _parseServer(description, factory, default=None):
"""
Parse a stports description into a 2-tuple of arguments and keyword values.
@param description: A description in the format explained by
L{serverFromString}.
@type description: C{str}
@param factory: A 'factory' argument; this is left-over from
twisted.application.strports, it's not really used.
@type factory: L{IProtocolFactory} or L{None}
@param default: Deprecated argument, specifying the default parser mode to
use for unqualified description strings (those which do not have a ':'
and prefix).
@type default: C{str} or C{NoneType}
@return: a 3-tuple of (plugin or name, arguments, keyword arguments)
"""
args, kw = _parse(description)
if not args or (len(args) == 1 and not kw):
deprecationMessage = (
"Unqualified strport description passed to 'service'."
"Use qualified endpoint descriptions; for example, 'tcp:%s'."
% (description,))
if default is None:
default = 'tcp'
warnings.warn(
deprecationMessage, category=DeprecationWarning, stacklevel=4)
elif default is _NO_DEFAULT:
raise ValueError(deprecationMessage)
# If the default has been otherwise specified, the user has already
# been warned.
args[0:0] = [default]
endpointType = args[0]
parser = _serverParsers.get(endpointType)
if parser is None:
# If the required parser is not found in _server, check if
# a plugin exists for the endpointType
for plugin in getPlugins(IStreamServerEndpointStringParser):
if plugin.prefix == endpointType:
return (plugin, args[1:], kw)
raise ValueError("Unknown endpoint type: '%s'" % (endpointType,))
return (endpointType.upper(),) + parser(factory, *args[1:], **kw)
def _serverFromStringLegacy(reactor, description, default):
"""
Underlying implementation of L{serverFromString} which avoids exposing the
deprecated 'default' argument to anything but L{strports.service}.
"""
nameOrPlugin, args, kw = _parseServer(description, None, default)
if type(nameOrPlugin) is not str:
plugin = nameOrPlugin
return plugin.parseStreamServer(reactor, *args, **kw)
else:
name = nameOrPlugin
# Chop out the factory.
args = args[:1] + args[2:]
return _endpointServerFactories[name](reactor, *args, **kw)
def serverFromString(reactor, description):
"""
Construct a stream server endpoint from an endpoint description string.
The format for server endpoint descriptions is a simple string. It is a
prefix naming the type of endpoint, then a colon, then the arguments for
that endpoint.
For example, you can call it like this to create an endpoint that will
listen on TCP port 80::
serverFromString(reactor, "tcp:80")
Additional arguments may be specified as keywords, separated with colons.
For example, you can specify the interface for a TCP server endpoint to
bind to like this::
serverFromString(reactor, "tcp:80:interface=127.0.0.1")
SSL server endpoints may be specified with the 'ssl' prefix, and the
private key and certificate files may be specified by the C{privateKey} and
C{certKey} arguments::
serverFromString(reactor, "ssl:443:privateKey=key.pem:certKey=crt.pem")
If a private key file name (C{privateKey}) isn't provided, a "server.pem"
file is assumed to exist which contains the private key. If the certificate
file name (C{certKey}) isn't provided, the private key file is assumed to
contain the certificate as well.
You may escape colons in arguments with a backslash, which you will need to
use if you want to specify a full pathname argument on Windows::
serverFromString(reactor,
"ssl:443:privateKey=C\\:/key.pem:certKey=C\\:/cert.pem")
finally, the 'unix' prefix may be used to specify a filesystem UNIX socket,
optionally with a 'mode' argument to specify the mode of the socket file
created by C{listen}::
serverFromString(reactor, "unix:/var/run/finger")
serverFromString(reactor, "unix:/var/run/finger:mode=660")
This function is also extensible; new endpoint types may be registered as
L{IStreamServerEndpointStringParser} plugins. See that interface for more
information.
@param reactor: The server endpoint will be constructed with this reactor.
@param description: The strports description to parse.
@return: A new endpoint which can be used to listen with the parameters
given by by C{description}.
@rtype: L{IStreamServerEndpoint<twisted.internet.interfaces.IStreamServerEndpoint>}
@raise ValueError: when the 'description' string cannot be parsed.
@since: 10.2
"""
return _serverFromStringLegacy(reactor, description, _NO_DEFAULT)
def quoteStringArgument(argument):
"""
Quote an argument to L{serverFromString} and L{clientFromString}. Since
arguments are separated with colons and colons are escaped with
backslashes, some care is necessary if, for example, you have a pathname,
you may be tempted to interpolate into a string like this::
serverFromString("ssl:443:privateKey=%s" % (myPathName,))
This may appear to work, but will have portability issues (Windows
pathnames, for example). Usually you should just construct the appropriate
endpoint type rather than interpolating strings, which in this case would
be L{SSL4ServerEndpoint}. There are some use-cases where you may need to
generate such a string, though; for example, a tool to manipulate a
configuration file which has strports descriptions in it. To be correct in
those cases, do this instead::
serverFromString("ssl:443:privateKey=%s" %
(quoteStringArgument(myPathName),))
@param argument: The part of the endpoint description string you want to
pass through.
@type argument: C{str}
@return: The quoted argument.
@rtype: C{str}
"""
return argument.replace('\\', '\\\\').replace(':', '\\:')
def _parseClientTCP(*args, **kwargs):
"""
Perform any argument value coercion necessary for TCP client parameters.
Valid positional arguments to this function are host and port.
Valid keyword arguments to this function are all L{IReactorTCP.connectTCP}
arguments.
@return: The coerced values as a C{dict}.
"""
if len(args) == 2:
kwargs['port'] = int(args[1])
kwargs['host'] = args[0]
elif len(args) == 1:
if 'host' in kwargs:
kwargs['port'] = int(args[0])
else:
kwargs['host'] = args[0]
try:
kwargs['port'] = int(kwargs['port'])
except KeyError:
pass
try:
kwargs['timeout'] = int(kwargs['timeout'])
except KeyError:
pass
return kwargs
def _loadCAsFromDir(directoryPath):
"""
Load certificate-authority certificate objects in a given directory.
@param directoryPath: a L{FilePath} pointing at a directory to load .pem
files from.
@return: a C{list} of L{OpenSSL.crypto.X509} objects.
"""
from twisted.internet import ssl
caCerts = {}
for child in directoryPath.children():
if not child.basename().split('.')[-1].lower() == 'pem':
continue
try:
data = child.getContent()
except IOError:
# Permission denied, corrupt disk, we don't care.
continue
try:
theCert = ssl.Certificate.loadPEM(data)
except ssl.SSL.Error:
# Duplicate certificate, invalid certificate, etc. We don't care.
pass
else:
caCerts[theCert.digest()] = theCert.original
return caCerts.values()
def _parseClientSSL(*args, **kwargs):
"""
Perform any argument value coercion necessary for SSL client parameters.
Valid keyword arguments to this function are all L{IReactorSSL.connectSSL}
arguments except for C{contextFactory}. Instead, C{certKey} (the path name
of the certificate file) C{privateKey} (the path name of the private key
associated with the certificate) are accepted and used to construct a
context factory.
Valid positional arguments to this function are host and port.
@param caCertsDir: The one parameter which is not part of
L{IReactorSSL.connectSSL}'s signature, this is a path name used to
construct a list of certificate authority certificates. The directory
will be scanned for files ending in C{.pem}, all of which will be
considered valid certificate authorities for this connection.
@type caCertsDir: C{str}
@return: The coerced values as a C{dict}.
"""
from twisted.internet import ssl
kwargs = _parseClientTCP(*args, **kwargs)
certKey = kwargs.pop('certKey', None)
privateKey = kwargs.pop('privateKey', None)
caCertsDir = kwargs.pop('caCertsDir', None)
if certKey is not None:
certx509 = ssl.Certificate.loadPEM(
FilePath(certKey).getContent()).original
else:
certx509 = None
if privateKey is not None:
privateKey = ssl.PrivateCertificate.loadPEM(
FilePath(privateKey).getContent()).privateKey.original
else:
privateKey = None
if caCertsDir is not None:
verify = True
caCerts = _loadCAsFromDir(FilePath(caCertsDir))
else:
verify = False
caCerts = None
kwargs['sslContextFactory'] = ssl.CertificateOptions(
method=ssl.SSL.SSLv23_METHOD,
certificate=certx509,
privateKey=privateKey,
verify=verify,
caCerts=caCerts
)
return kwargs
def _parseClientUNIX(*args, **kwargs):
"""
Perform any argument value coercion necessary for UNIX client parameters.
Valid keyword arguments to this function are all L{IReactorUNIX.connectUNIX}
keyword arguments except for C{checkPID}. Instead, C{lockfile} is accepted
and has the same meaning. Also C{path} is used instead of C{address}.
Valid positional arguments to this function are C{path}.
@return: The coerced values as a C{dict}.
"""
if len(args) == 1:
kwargs['path'] = args[0]
try:
kwargs['checkPID'] = bool(int(kwargs.pop('lockfile')))
except KeyError:
pass
try:
kwargs['timeout'] = int(kwargs['timeout'])
except KeyError:
pass
return kwargs
_clientParsers = {
'TCP': _parseClientTCP,
'SSL': _parseClientSSL,
'UNIX': _parseClientUNIX,
}
def clientFromString(reactor, description):
"""
Construct a client endpoint from a description string.
Client description strings are much like server description strings,
although they take all of their arguments as keywords, aside from host and
port.
You can create a TCP client endpoint with the 'host' and 'port' arguments,
like so::
clientFromString(reactor, "tcp:host=www.example.com:port=80")
or, without specifying host and port keywords::
clientFromString(reactor, "tcp:www.example.com:80")
Or you can specify only one or the other, as in the following 2 examples::
clientFromString(reactor, "tcp:host=www.example.com:80")
clientFromString(reactor, "tcp:www.example.com:port=80")
or an SSL client endpoint with those arguments, plus the arguments used by
the server SSL, for a client certificate::
clientFromString(reactor, "ssl:web.example.com:443:"
"privateKey=foo.pem:certKey=foo.pem")
to specify your certificate trust roots, you can identify a directory with
PEM files in it with the C{caCertsDir} argument::
clientFromString(reactor, "ssl:host=web.example.com:port=443:"
"caCertsDir=/etc/ssl/certs")
You can create a UNIX client endpoint with the 'path' argument and optional
'lockfile' and 'timeout' arguments::
clientFromString(reactor, "unix:path=/var/foo/bar:lockfile=1:timeout=9")
or, with the path as a positional argument with or without optional
arguments as in the following 2 examples::
clientFromString(reactor, "unix:/var/foo/bar")
clientFromString(reactor, "unix:/var/foo/bar:lockfile=1:timeout=9")
This function is also extensible; new endpoint types may be registered as
L{IStreamClientEndpointStringParser} plugins. See that interface for more
information.
@param reactor: The client endpoint will be constructed with this reactor.
@param description: The strports description to parse.
@return: A new endpoint which can be used to connect with the parameters
given by by C{description}.
@rtype: L{IStreamClientEndpoint<twisted.internet.interfaces.IStreamClientEndpoint>}
@since: 10.2
"""
args, kwargs = _parse(description)
aname = args.pop(0)
name = aname.upper()
for plugin in getPlugins(IStreamClientEndpointStringParser):
if plugin.prefix.upper() == name:
return plugin.parseStreamClient(*args, **kwargs)
if name not in _clientParsers:
raise ValueError("Unknown endpoint type: %r" % (aname,))
kwargs = _clientParsers[name](*args, **kwargs)
return _endpointClientFactories[name](reactor, **kwargs)
def connectProtocol(endpoint, protocol):
"""
Connect a protocol instance to an endpoint.
This allows using a client endpoint without having to create a factory.
@param endpoint: A client endpoint to connect to.
@param protocol: A protocol instance.
@return: The result of calling C{connect} on the endpoint, i.e. a
L{Deferred} that will fire with the protocol when connected, or an
appropriate error.
"""
class OneShotFactory(Factory):
def buildProtocol(self, addr):
return protocol
return endpoint.connect(OneShotFactory())
|
jrumball/PyKoans
|
refs/heads/master
|
python 3/runner/mountain.py
|
15
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import sys
from . import path_to_enlightenment
from .sensei import Sensei
from .writeln_decorator import WritelnDecorator
class Mountain:
def __init__(self):
self.stream = WritelnDecorator(sys.stdout)
self.tests = path_to_enlightenment.koans()
self.lesson = Sensei(self.stream)
def walk_the_path(self, args=None):
"Run the koans tests with a custom runner output."
if args and len(args) >=2:
self.tests = unittest.TestLoader().loadTestsFromName("koans." + args[1])
self.tests(self.lesson)
self.lesson.learn()
return self.lesson
|
JBARisk/QtStuff
|
refs/heads/master
|
build/lib/QtStuff/images.py
|
2
| null |
rafaeltomesouza/frontend-class1
|
refs/heads/master
|
aula2/a12/linkedin/client/.gradle/yarn/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
|
2779
|
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
|
cfg2015/EPT-2015-2
|
refs/heads/master
|
addons/mrp/product.py
|
131
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class product_template(osv.osv):
_inherit = "product.template"
def _bom_orders_count(self, cr, uid, ids, field_name, arg, context=None):
Bom = self.pool('mrp.bom')
res = {}
for product_tmpl_id in ids:
nb = Bom.search_count(cr, uid, [('product_tmpl_id', '=', product_tmpl_id)], context=context)
res[product_tmpl_id] = {
'bom_count': nb,
}
return res
def _bom_orders_count_mo(self, cr, uid, ids, name, arg, context=None):
res = {}
for product_tmpl_id in self.browse(cr, uid, ids):
res[product_tmpl_id.id] = sum([p.mo_count for p in product_tmpl_id.product_variant_ids])
return res
_columns = {
'bom_ids': fields.one2many('mrp.bom', 'product_tmpl_id','Bill of Materials'),
'bom_count': fields.function(_bom_orders_count, string='# Bill of Material', type='integer', multi="_bom_order_count"),
'mo_count': fields.function(_bom_orders_count_mo, string='# Manufacturing Orders', type='integer'),
'produce_delay': fields.float('Manufacturing Lead Time', help="Average delay in days to produce this product. In the case of multi-level BOM, the manufacturing lead times of the components will be added."),
'track_production': fields.boolean('Track Manufacturing Lots', help="Forces to specify a Serial Number for all moves containing this product and generated by a Manufacturing Order"),
}
_defaults = {
'produce_delay': 1,
}
def action_view_mos(self, cr, uid, ids, context=None):
products = self._get_products(cr, uid, ids, context=context)
result = self._get_act_window_dict(cr, uid, 'mrp.act_product_mrp_production', context=context)
if len(ids) == 1 and len(products) == 1:
result['context'] = "{'default_product_id': " + str(products[0]) + ", 'search_default_product_id': " + str(products[0]) + "}"
else:
result['domain'] = "[('product_id','in',[" + ','.join(map(str, products)) + "])]"
result['context'] = "{}"
return result
class product_product(osv.osv):
_inherit = "product.product"
def _bom_orders_count(self, cr, uid, ids, field_name, arg, context=None):
Production = self.pool('mrp.production')
res = {}
for product_id in ids:
res[product_id] = Production.search_count(cr,uid, [('product_id', '=', product_id)], context=context)
return res
_columns = {
'mo_count': fields.function(_bom_orders_count, string='# Manufacturing Orders', type='integer'),
}
def action_view_bom(self, cr, uid, ids, context=None):
tmpl_obj = self.pool.get("product.template")
products = set()
for product in self.browse(cr, uid, ids, context=context):
products.add(product.product_tmpl_id.id)
result = tmpl_obj._get_act_window_dict(cr, uid, 'mrp.product_open_bom', context=context)
# bom specific to this variant or global to template
domain = [
'|',
('product_id', 'in', ids),
'&',
('product_id', '=', False),
('product_tmpl_id', 'in', list(products)),
]
result['context'] = "{}"
result['domain'] = str(domain)
return result
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
zenoss/ZenPacks.community.PrinterToner
|
refs/heads/master
|
ZenPacks/community/PrinterToner/modeler/plugins/community/snmp/__init__.py
|
1165
|
# __init__.py
|
dkodnik/Ant
|
refs/heads/master
|
addons/mail/mail_message_subtype.py
|
60
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import osv
from openerp.osv import fields
class mail_message_subtype(osv.osv):
""" Class holding subtype definition for messages. Subtypes allow to tune
the follower subscription, allowing only some subtypes to be pushed
on the Wall. """
_name = 'mail.message.subtype'
_description = 'Message subtypes'
_columns = {
'name': fields.char('Message Type', required=True, translate=True,
help='Message subtype gives a more precise type on the message, '\
'especially for system notifications. For example, it can be '\
'a notification related to a new record (New), or to a stage '\
'change in a process (Stage change). Message subtypes allow to '\
'precisely tune the notifications the user want to receive on its wall.'),
'description': fields.text('Description', translate=True,
help='Description that will be added in the message posted for this '\
'subtype. If void, the name will be added instead.'),
'parent_id': fields.many2one('mail.message.subtype', string='Parent',
ondelete='set null',
help='Parent subtype, used for automatic subscription.'),
'relation_field': fields.char('Relation field',
help='Field used to link the related model to the subtype model when '\
'using automatic subscription on a related document. The field '\
'is used to compute getattr(related_document.relation_field).'),
'res_model': fields.char('Model',
help="Model the subtype applies to. If False, this subtype applies to all models."),
'default': fields.boolean('Default',
help="Activated by default when subscribing."),
}
_defaults = {
'default': True,
}
|
tumbl3w33d/ansible
|
refs/heads/devel
|
test/units/modules/remote_management/oneview/test_oneview_fcoe_network_info.py
|
21
|
# Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from units.compat import unittest
from oneview_module_loader import FcoeNetworkInfoModule
from hpe_test_utils import FactsParamsTestCase
ERROR_MSG = 'Fake message error'
PARAMS_GET_ALL = dict(
config='config.json',
name=None
)
PARAMS_GET_BY_NAME = dict(
config='config.json',
name="Test FCoE Networks"
)
PRESENT_NETWORKS = [{
"name": "Test FCoE Networks",
"uri": "/rest/fcoe-networks/c6bf9af9-48e7-4236-b08a-77684dc258a5"
}]
class FcoeNetworkInfoSpec(unittest.TestCase,
FactsParamsTestCase
):
def setUp(self):
self.configure_mocks(self, FcoeNetworkInfoModule)
self.fcoe_networks = self.mock_ov_client.fcoe_networks
FactsParamsTestCase.configure_client_mock(self, self.fcoe_networks)
def test_should_get_all_fcoe_network(self):
self.fcoe_networks.get_all.return_value = PRESENT_NETWORKS
self.mock_ansible_module.params = PARAMS_GET_ALL
FcoeNetworkInfoModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
fcoe_networks=PRESENT_NETWORKS
)
def test_should_get_fcoe_network_by_name(self):
self.fcoe_networks.get_by.return_value = PRESENT_NETWORKS
self.mock_ansible_module.params = PARAMS_GET_BY_NAME
FcoeNetworkInfoModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
fcoe_networks=PRESENT_NETWORKS
)
if __name__ == '__main__':
unittest.main()
|
oasiswork/odoo
|
refs/heads/8.0
|
addons/analytic_user_function/__openerp__.py
|
260
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Jobs on Contracts',
'version': '1.0',
'category': 'Sales Management',
'description': """
This module allows you to define what is the default function of a specific user on a given account.
====================================================================================================
This is mostly used when a user encodes his timesheet: the values are retrieved
and the fields are auto-filled. But the possibility to change these values is
still available.
Obviously if no data has been recorded for the current account, the default
value is given as usual by the employee data so that this module is perfectly
compatible with older configurations.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/employees',
'depends': ['hr_timesheet_sheet'],
'data': ['analytic_user_function_view.xml', 'security/ir.model.access.csv'],
'demo': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
closers-2015/Closers-2015_BOTS-_Final_Byte_Project
|
refs/heads/master
|
frontend/bots/urls.py
|
1
|
from django.conf.urls import url
from .views import IndexView
urlpatterns = [
url(r'^$', IndexView.as_view(), name='index'),
# url(r'^create$', PostCreateView.as_view(), name='create'),
# url(r'^read/(?P<slug>[\w-]+)$', PostDetailView.as_view(), name='detail'),
# url(r'^update/(?P<slug>[\w-]+)$', PostUpdateView.as_view(), name='update'),
# url(r'^delete/(?P<slug>[\w-]+)$', PostDeleteView.as_view(), name='delete'),
# url(r'^category$', CategoryIndexView.as_view(), name='index-category'),
# url(r'^section/(?P<slug>[\w-]+)$', CategoryDetailView.as_view(), name='detail-category'),
]
|
betoesquivel/fil2014
|
refs/heads/master
|
filenv/lib/python2.7/site-packages/django/contrib/formtools/tests/wizard/namedwizardtests/urls.py
|
320
|
from django.conf.urls import patterns, url
from django.contrib.formtools.tests.wizard.namedwizardtests.forms import (
SessionContactWizard, CookieContactWizard, Page1, Page2, Page3, Page4)
def get_named_session_wizard():
return SessionContactWizard.as_view(
[('form1', Page1), ('form2', Page2), ('form3', Page3), ('form4', Page4)],
url_name='nwiz_session',
done_step_name='nwiz_session_done'
)
def get_named_cookie_wizard():
return CookieContactWizard.as_view(
[('form1', Page1), ('form2', Page2), ('form3', Page3), ('form4', Page4)],
url_name='nwiz_cookie',
done_step_name='nwiz_cookie_done'
)
urlpatterns = patterns('',
url(r'^nwiz_session/(?P<step>.+)/$', get_named_session_wizard(), name='nwiz_session'),
url(r'^nwiz_session/$', get_named_session_wizard(), name='nwiz_session_start'),
url(r'^nwiz_cookie/(?P<step>.+)/$', get_named_cookie_wizard(), name='nwiz_cookie'),
url(r'^nwiz_cookie/$', get_named_cookie_wizard(), name='nwiz_cookie_start'),
)
|
mffrench/fabric
|
refs/heads/v1.0.0-alpha2-elixir
|
bddtests/msp/msp_principal_pb2.py
|
5
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: msp/msp_principal.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='msp/msp_principal.proto',
package='common',
syntax='proto3',
serialized_pb=_b('\n\x17msp/msp_principal.proto\x12\x06\x63ommon\"\xa9\x01\n\x0cMSPPrincipal\x12\x45\n\x18principal_classification\x18\x01 \x01(\x0e\x32#.common.MSPPrincipal.Classification\x12\x11\n\tprincipal\x18\x02 \x01(\x0c\"?\n\x0e\x43lassification\x12\x08\n\x04ROLE\x10\x00\x12\x15\n\x11ORGANIZATION_UNIT\x10\x01\x12\x0c\n\x08IDENTITY\x10\x02\"q\n\x10OrganizationUnit\x12\x16\n\x0emsp_identifier\x18\x01 \x01(\t\x12&\n\x1eorganizational_unit_identifier\x18\x02 \x01(\t\x12\x1d\n\x15\x63\x65rtifiers_identifier\x18\x03 \x01(\x0c\"r\n\x07MSPRole\x12\x16\n\x0emsp_identifier\x18\x01 \x01(\t\x12)\n\x04role\x18\x02 \x01(\x0e\x32\x1b.common.MSPRole.MSPRoleType\"$\n\x0bMSPRoleType\x12\n\n\x06MEMBER\x10\x00\x12\t\n\x05\x41\x44MIN\x10\x01\x42P\n$org.hyperledger.fabric.protos.commonZ(github.com/hyperledger/fabric/protos/mspb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_MSPPRINCIPAL_CLASSIFICATION = _descriptor.EnumDescriptor(
name='Classification',
full_name='common.MSPPrincipal.Classification',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='ROLE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ORGANIZATION_UNIT', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IDENTITY', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=142,
serialized_end=205,
)
_sym_db.RegisterEnumDescriptor(_MSPPRINCIPAL_CLASSIFICATION)
_MSPROLE_MSPROLETYPE = _descriptor.EnumDescriptor(
name='MSPRoleType',
full_name='common.MSPRole.MSPRoleType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MEMBER', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ADMIN', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=400,
serialized_end=436,
)
_sym_db.RegisterEnumDescriptor(_MSPROLE_MSPROLETYPE)
_MSPPRINCIPAL = _descriptor.Descriptor(
name='MSPPrincipal',
full_name='common.MSPPrincipal',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='principal_classification', full_name='common.MSPPrincipal.principal_classification', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='principal', full_name='common.MSPPrincipal.principal', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_MSPPRINCIPAL_CLASSIFICATION,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=36,
serialized_end=205,
)
_ORGANIZATIONUNIT = _descriptor.Descriptor(
name='OrganizationUnit',
full_name='common.OrganizationUnit',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='msp_identifier', full_name='common.OrganizationUnit.msp_identifier', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='organizational_unit_identifier', full_name='common.OrganizationUnit.organizational_unit_identifier', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='certifiers_identifier', full_name='common.OrganizationUnit.certifiers_identifier', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=207,
serialized_end=320,
)
_MSPROLE = _descriptor.Descriptor(
name='MSPRole',
full_name='common.MSPRole',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='msp_identifier', full_name='common.MSPRole.msp_identifier', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='role', full_name='common.MSPRole.role', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_MSPROLE_MSPROLETYPE,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=322,
serialized_end=436,
)
_MSPPRINCIPAL.fields_by_name['principal_classification'].enum_type = _MSPPRINCIPAL_CLASSIFICATION
_MSPPRINCIPAL_CLASSIFICATION.containing_type = _MSPPRINCIPAL
_MSPROLE.fields_by_name['role'].enum_type = _MSPROLE_MSPROLETYPE
_MSPROLE_MSPROLETYPE.containing_type = _MSPROLE
DESCRIPTOR.message_types_by_name['MSPPrincipal'] = _MSPPRINCIPAL
DESCRIPTOR.message_types_by_name['OrganizationUnit'] = _ORGANIZATIONUNIT
DESCRIPTOR.message_types_by_name['MSPRole'] = _MSPROLE
MSPPrincipal = _reflection.GeneratedProtocolMessageType('MSPPrincipal', (_message.Message,), dict(
DESCRIPTOR = _MSPPRINCIPAL,
__module__ = 'msp.msp_principal_pb2'
# @@protoc_insertion_point(class_scope:common.MSPPrincipal)
))
_sym_db.RegisterMessage(MSPPrincipal)
OrganizationUnit = _reflection.GeneratedProtocolMessageType('OrganizationUnit', (_message.Message,), dict(
DESCRIPTOR = _ORGANIZATIONUNIT,
__module__ = 'msp.msp_principal_pb2'
# @@protoc_insertion_point(class_scope:common.OrganizationUnit)
))
_sym_db.RegisterMessage(OrganizationUnit)
MSPRole = _reflection.GeneratedProtocolMessageType('MSPRole', (_message.Message,), dict(
DESCRIPTOR = _MSPROLE,
__module__ = 'msp.msp_principal_pb2'
# @@protoc_insertion_point(class_scope:common.MSPRole)
))
_sym_db.RegisterMessage(MSPRole)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n$org.hyperledger.fabric.protos.commonZ(github.com/hyperledger/fabric/protos/msp'))
try:
# THESE ELEMENTS WILL BE DEPRECATED.
# Please use the generated *_pb2_grpc.py files instead.
import grpc
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
except ImportError:
pass
# @@protoc_insertion_point(module_scope)
|
toastedcornflakes/scikit-learn
|
refs/heads/master
|
sklearn/manifold/tests/test_spectral_embedding.py
|
37
|
from nose.tools import assert_true
from nose.tools import assert_equal
from scipy.sparse import csr_matrix
from scipy.sparse import csc_matrix
from scipy.sparse import coo_matrix
from scipy.linalg import eigh
import numpy as np
from numpy.testing import assert_array_almost_equal
from numpy.testing import assert_array_equal
from nose.tools import assert_raises
from nose.plugins.skip import SkipTest
from sklearn.manifold.spectral_embedding_ import SpectralEmbedding
from sklearn.manifold.spectral_embedding_ import _graph_is_connected
from sklearn.manifold.spectral_embedding_ import _graph_connected_component
from sklearn.manifold import spectral_embedding
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.metrics import normalized_mutual_info_score
from sklearn.cluster import KMeans
from sklearn.datasets.samples_generator import make_blobs
from sklearn.utils.graph import graph_laplacian
from sklearn.utils.extmath import _deterministic_vector_sign_flip
# non centered, sparse centers to check the
centers = np.array([
[0.0, 5.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 4.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 5.0, 1.0],
])
n_samples = 1000
n_clusters, n_features = centers.shape
S, true_labels = make_blobs(n_samples=n_samples, centers=centers,
cluster_std=1., random_state=42)
def _check_with_col_sign_flipping(A, B, tol=0.0):
""" Check array A and B are equal with possible sign flipping on
each columns"""
sign = True
for column_idx in range(A.shape[1]):
sign = sign and ((((A[:, column_idx] -
B[:, column_idx]) ** 2).mean() <= tol ** 2) or
(((A[:, column_idx] +
B[:, column_idx]) ** 2).mean() <= tol ** 2))
if not sign:
return False
return True
def test_sparse_graph_connected_component():
rng = np.random.RandomState(42)
n_samples = 300
boundaries = [0, 42, 121, 200, n_samples]
p = rng.permutation(n_samples)
connections = []
for start, stop in zip(boundaries[:-1], boundaries[1:]):
group = p[start:stop]
# Connect all elements within the group at least once via an
# arbitrary path that spans the group.
for i in range(len(group) - 1):
connections.append((group[i], group[i + 1]))
# Add some more random connections within the group
min_idx, max_idx = 0, len(group) - 1
n_random_connections = 1000
source = rng.randint(min_idx, max_idx, size=n_random_connections)
target = rng.randint(min_idx, max_idx, size=n_random_connections)
connections.extend(zip(group[source], group[target]))
# Build a symmetric affinity matrix
row_idx, column_idx = tuple(np.array(connections).T)
data = rng.uniform(.1, 42, size=len(connections))
affinity = coo_matrix((data, (row_idx, column_idx)))
affinity = 0.5 * (affinity + affinity.T)
for start, stop in zip(boundaries[:-1], boundaries[1:]):
component_1 = _graph_connected_component(affinity, p[start])
component_size = stop - start
assert_equal(component_1.sum(), component_size)
# We should retrieve the same component mask by starting by both ends
# of the group
component_2 = _graph_connected_component(affinity, p[stop - 1])
assert_equal(component_2.sum(), component_size)
assert_array_equal(component_1, component_2)
def test_spectral_embedding_two_components(seed=36):
# Test spectral embedding with two components
random_state = np.random.RandomState(seed)
n_sample = 100
affinity = np.zeros(shape=[n_sample * 2, n_sample * 2])
# first component
affinity[0:n_sample,
0:n_sample] = np.abs(random_state.randn(n_sample, n_sample)) + 2
# second component
affinity[n_sample::,
n_sample::] = np.abs(random_state.randn(n_sample, n_sample)) + 2
# Test of internal _graph_connected_component before connection
component = _graph_connected_component(affinity, 0)
assert_true(component[:n_sample].all())
assert_true(not component[n_sample:].any())
component = _graph_connected_component(affinity, -1)
assert_true(not component[:n_sample].any())
assert_true(component[n_sample:].all())
# connection
affinity[0, n_sample + 1] = 1
affinity[n_sample + 1, 0] = 1
affinity.flat[::2 * n_sample + 1] = 0
affinity = 0.5 * (affinity + affinity.T)
true_label = np.zeros(shape=2 * n_sample)
true_label[0:n_sample] = 1
se_precomp = SpectralEmbedding(n_components=1, affinity="precomputed",
random_state=np.random.RandomState(seed))
embedded_coordinate = se_precomp.fit_transform(affinity)
# Some numpy versions are touchy with types
embedded_coordinate = \
se_precomp.fit_transform(affinity.astype(np.float32))
# thresholding on the first components using 0.
label_ = np.array(embedded_coordinate.ravel() < 0, dtype="float")
assert_equal(normalized_mutual_info_score(true_label, label_), 1.0)
def test_spectral_embedding_precomputed_affinity(seed=36):
# Test spectral embedding with precomputed kernel
gamma = 1.0
se_precomp = SpectralEmbedding(n_components=2, affinity="precomputed",
random_state=np.random.RandomState(seed))
se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
gamma=gamma,
random_state=np.random.RandomState(seed))
embed_precomp = se_precomp.fit_transform(rbf_kernel(S, gamma=gamma))
embed_rbf = se_rbf.fit_transform(S)
assert_array_almost_equal(
se_precomp.affinity_matrix_, se_rbf.affinity_matrix_)
assert_true(_check_with_col_sign_flipping(embed_precomp, embed_rbf, 0.05))
def test_spectral_embedding_callable_affinity(seed=36):
# Test spectral embedding with callable affinity
gamma = 0.9
kern = rbf_kernel(S, gamma=gamma)
se_callable = SpectralEmbedding(n_components=2,
affinity=(
lambda x: rbf_kernel(x, gamma=gamma)),
gamma=gamma,
random_state=np.random.RandomState(seed))
se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
gamma=gamma,
random_state=np.random.RandomState(seed))
embed_rbf = se_rbf.fit_transform(S)
embed_callable = se_callable.fit_transform(S)
assert_array_almost_equal(
se_callable.affinity_matrix_, se_rbf.affinity_matrix_)
assert_array_almost_equal(kern, se_rbf.affinity_matrix_)
assert_true(
_check_with_col_sign_flipping(embed_rbf, embed_callable, 0.05))
def test_spectral_embedding_amg_solver(seed=36):
# Test spectral embedding with amg solver
try:
from pyamg import smoothed_aggregation_solver
except ImportError:
raise SkipTest("pyamg not available.")
se_amg = SpectralEmbedding(n_components=2, affinity="nearest_neighbors",
eigen_solver="amg", n_neighbors=5,
random_state=np.random.RandomState(seed))
se_arpack = SpectralEmbedding(n_components=2, affinity="nearest_neighbors",
eigen_solver="arpack", n_neighbors=5,
random_state=np.random.RandomState(seed))
embed_amg = se_amg.fit_transform(S)
embed_arpack = se_arpack.fit_transform(S)
assert_true(_check_with_col_sign_flipping(embed_amg, embed_arpack, 0.05))
def test_pipeline_spectral_clustering(seed=36):
# Test using pipeline to do spectral clustering
random_state = np.random.RandomState(seed)
se_rbf = SpectralEmbedding(n_components=n_clusters,
affinity="rbf",
random_state=random_state)
se_knn = SpectralEmbedding(n_components=n_clusters,
affinity="nearest_neighbors",
n_neighbors=5,
random_state=random_state)
for se in [se_rbf, se_knn]:
km = KMeans(n_clusters=n_clusters, random_state=random_state)
km.fit(se.fit_transform(S))
assert_array_almost_equal(
normalized_mutual_info_score(
km.labels_,
true_labels), 1.0, 2)
def test_spectral_embedding_unknown_eigensolver(seed=36):
# Test that SpectralClustering fails with an unknown eigensolver
se = SpectralEmbedding(n_components=1, affinity="precomputed",
random_state=np.random.RandomState(seed),
eigen_solver="<unknown>")
assert_raises(ValueError, se.fit, S)
def test_spectral_embedding_unknown_affinity(seed=36):
# Test that SpectralClustering fails with an unknown affinity type
se = SpectralEmbedding(n_components=1, affinity="<unknown>",
random_state=np.random.RandomState(seed))
assert_raises(ValueError, se.fit, S)
def test_connectivity(seed=36):
# Test that graph connectivity test works as expected
graph = np.array([[1, 0, 0, 0, 0],
[0, 1, 1, 0, 0],
[0, 1, 1, 1, 0],
[0, 0, 1, 1, 1],
[0, 0, 0, 1, 1]])
assert_equal(_graph_is_connected(graph), False)
assert_equal(_graph_is_connected(csr_matrix(graph)), False)
assert_equal(_graph_is_connected(csc_matrix(graph)), False)
graph = np.array([[1, 1, 0, 0, 0],
[1, 1, 1, 0, 0],
[0, 1, 1, 1, 0],
[0, 0, 1, 1, 1],
[0, 0, 0, 1, 1]])
assert_equal(_graph_is_connected(graph), True)
assert_equal(_graph_is_connected(csr_matrix(graph)), True)
assert_equal(_graph_is_connected(csc_matrix(graph)), True)
def test_spectral_embedding_deterministic():
# Test that Spectral Embedding is deterministic
random_state = np.random.RandomState(36)
data = random_state.randn(10, 30)
sims = rbf_kernel(data)
embedding_1 = spectral_embedding(sims)
embedding_2 = spectral_embedding(sims)
assert_array_almost_equal(embedding_1, embedding_2)
def test_spectral_embedding_unnormalized():
# Test that spectral_embedding is also processing unnormalized laplacian
# correctly
random_state = np.random.RandomState(36)
data = random_state.randn(10, 30)
sims = rbf_kernel(data)
n_components = 8
embedding_1 = spectral_embedding(sims,
norm_laplacian=False,
n_components=n_components,
drop_first=False)
# Verify using manual computation with dense eigh
laplacian, dd = graph_laplacian(sims, normed=False, return_diag=True)
_, diffusion_map = eigh(laplacian)
embedding_2 = diffusion_map.T[:n_components] * dd
embedding_2 = _deterministic_vector_sign_flip(embedding_2).T
assert_array_almost_equal(embedding_1, embedding_2)
|
ESS-LLP/frappe
|
refs/heads/develop
|
frappe/desk/report_dump.py
|
11
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from six.moves import range
import frappe
import json
import copy
@frappe.whitelist()
def get_data(doctypes, last_modified):
data_map = {}
for dump_report_map in frappe.get_hooks().dump_report_map:
data_map.update(frappe.get_attr(dump_report_map))
out = {}
doctypes = json.loads(doctypes)
last_modified = json.loads(last_modified)
for d in doctypes:
args = copy.deepcopy(data_map[d])
dt = d.find("[") != -1 and d[:d.find("[")] or d
out[dt] = {}
if args.get("from"):
modified_table = "item."
else:
modified_table = ""
conditions = order_by = ""
table = args.get("from") or ("`tab%s`" % dt)
if d in last_modified:
if not args.get("conditions"):
args['conditions'] = []
args['conditions'].append(modified_table + "modified > '" + last_modified[d] + "'")
out[dt]["modified_names"] = frappe.db.sql_list("""select %sname from %s
where %smodified > %s""" % (modified_table, table, modified_table, "%s"), last_modified[d])
if args.get("force_index"):
conditions = " force index (%s) " % args["force_index"]
if args.get("conditions"):
conditions += " where " + " and ".join(args["conditions"])
if args.get("order_by"):
order_by = " order by " + args["order_by"]
out[dt]["data"] = [list(t) for t in frappe.db.sql("""select %s from %s %s %s""" \
% (",".join(args["columns"]), table, conditions, order_by))]
# last modified
modified_table = table
if "," in table:
modified_table = " ".join(table.split(",")[0].split(" ")[:-1])
tmp = frappe.db.sql("""select `modified`
from %s order by modified desc limit 1""" % modified_table)
out[dt]["last_modified"] = tmp and tmp[0][0] or ""
out[dt]["columns"] = list(map(lambda c: c.split(" as ")[-1], args["columns"]))
if args.get("links"):
out[dt]["links"] = args["links"]
for d in out:
unused_links = []
# only compress full dumps (not partial)
if out[d].get("links") and (d not in last_modified):
for link_key in out[d]["links"]:
link = out[d]["links"][link_key]
if link[0] in out and (link[0] not in last_modified):
# make a map of link ids
# to index
link_map = {}
doctype_data = out[link[0]]
col_idx = doctype_data["columns"].index(link[1])
for row_idx in range(len(doctype_data["data"])):
row = doctype_data["data"][row_idx]
link_map[row[col_idx]] = row_idx
for row in out[d]["data"]:
columns = list(out[d]["columns"])
if link_key in columns:
col_idx = columns.index(link_key)
# replace by id
if row[col_idx]:
row[col_idx] = link_map.get(row[col_idx])
else:
unused_links.append(link_key)
for link in unused_links:
del out[d]["links"][link]
return out
|
perezg/infoxchange
|
refs/heads/master
|
BASE/lib/python2.7/site-packages/django/contrib/gis/geometry/backend/__init__.py
|
128
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
geom_backend = getattr(settings, 'GEOMETRY_BACKEND', 'geos')
try:
module = import_module('.%s' % geom_backend, 'django.contrib.gis.geometry.backend')
except ImportError:
try:
module = import_module(geom_backend)
except ImportError:
raise ImproperlyConfigured('Could not import user-defined GEOMETRY_BACKEND '
'"%s".' % geom_backend)
try:
Geometry = module.Geometry
GeometryException = module.GeometryException
except AttributeError:
raise ImproperlyConfigured('Cannot import Geometry from the "%s" '
'geometry backend.' % geom_backend)
|
hojel/calibre
|
refs/heads/master
|
src/calibre/ebooks/conversion/plugins/docx_output.py
|
11
|
#!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
from calibre.customize.conversion import OutputFormatPlugin, OptionRecommendation
PAGE_SIZES = ['a0', 'a1', 'a2', 'a3', 'a4', 'a5', 'a6', 'b0', 'b1',
'b2', 'b3', 'b4', 'b5', 'b6', 'legal', 'letter']
class DOCXOutput(OutputFormatPlugin):
name = 'DOCX Output'
author = 'Kovid Goyal'
file_type = 'docx'
options = {
OptionRecommendation(name='docx_page_size', recommended_value='letter',
level=OptionRecommendation.LOW, choices=PAGE_SIZES,
help=_('The size of the page. Default is letter. Choices '
'are %s') % PAGE_SIZES),
OptionRecommendation(name='docx_custom_page_size', recommended_value=None,
help=_('Custom size of the document. Use the form widthxheight '
'EG. `123x321` to specify the width and height (in pts). '
'This overrides any specified page-size.')),
OptionRecommendation(name='docx_no_cover', recommended_value=False,
help=_('Do not insert the book cover as an image at the start of the document.'
' If you use this option, the book cover will be discarded.')),
OptionRecommendation(name='docx_no_toc', recommended_value=False,
help=_('Do not insert the table of contents as a page at the start of the document.')),
OptionRecommendation(name='extract_to',
help=_('Extract the contents of the generated %s file to the '
'specified directory. The contents of the directory are first '
'deleted, so be careful.') % 'DOCX'),
}
recommendations = {
('margin_left', 72.0, OptionRecommendation.MED),
('margin_right', 72.0, OptionRecommendation.MED),
('margin_top', 72.0, OptionRecommendation.MED),
('margin_bottom', 72.0, OptionRecommendation.MED),
}
def convert_metadata(self, oeb):
from lxml import etree
from calibre.ebooks.oeb.base import OPF, OPF2_NS
from calibre.ebooks.metadata.opf2 import OPF as ReadOPF
from io import BytesIO
package = etree.Element(OPF('package'), attrib={'version': '2.0'}, nsmap={None: OPF2_NS})
oeb.metadata.to_opf2(package)
self.mi = ReadOPF(BytesIO(etree.tostring(package, encoding='utf-8')), populate_spine=False, try_to_guess_cover=False).to_book_metadata()
def convert(self, oeb, output_path, input_plugin, opts, log):
from calibre.ebooks.docx.writer.container import DOCX
from calibre.ebooks.docx.writer.from_html import Convert
docx = DOCX(opts, log)
self.convert_metadata(oeb)
Convert(oeb, docx, self.mi, not opts.docx_no_cover, not opts.docx_no_toc)()
docx.write(output_path, self.mi)
if opts.extract_to:
from calibre.ebooks.docx.dump import do_dump
do_dump(output_path, opts.extract_to)
|
miptliot/edx-platform
|
refs/heads/ginkgo_openedu_docker
|
common/lib/dogstats/setup.py
|
210
|
from setuptools import setup
setup(
name="dogstats_wrapper",
version="0.1",
packages=["dogstats_wrapper"],
install_requires=[
"dogapi",
],
)
|
AmandaCMS/amanda-cms
|
refs/heads/master
|
amanda/faq/search_indexes.py
|
1
|
from haystack import indexes
from haystack.exceptions import NotHandled
from amanda.faq.models import Topic
class TopicIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.EdgeNgramField(document=True, use_template=True)
created_at = indexes.DateTimeField()
def get_model(self):
return Topic
def index_queryset(self, using=None):
return self.get_model().objects.all()
def should_update(self, instance, **kwargs):
if instance.hidden:
try:
self.remove_object(instance)
except NotHandled:
pass
return False
else:
return True
def prepare_created_at(self, instance):
return instance.created_at
|
crepererum/invenio
|
refs/heads/master
|
invenio/utils/autodiscovery/helpers.py
|
17
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2009, 2010, 2011, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Autodiscovery helper functions."""
import inspect
from invenio.utils.text import wrap_text_in_a_box
def get_callable_signature_as_string(the_callable):
"""
Returns a string representing a callable as if it would have been
declared on the prompt.
>>> def foo(arg1, arg2, arg3='val1', arg4='val2', *args, **argd):
... pass
>>> get_callable_signature_as_string(foo)
def foo(arg1, arg2, arg3='val1', arg4='val2', *args, **argd)
:param the_callable: the callable to be analyzed.
:type the_callable: function/callable.
:return: the signature.
"""
args, varargs, varkw, defaults = inspect.getargspec(the_callable)
tmp_args = list(args)
args_dict = {}
if defaults:
defaults = list(defaults)
else:
defaults = []
while defaults:
args_dict[tmp_args.pop()] = defaults.pop()
while tmp_args:
args_dict[tmp_args.pop()] = None
args_list = []
for arg in args:
if args_dict[arg] is not None:
args_list.append("%s=%s" % (arg, repr(args_dict[arg])))
else:
args_list.append(arg)
if varargs:
args_list.append("*%s" % varargs)
if varkw:
args_list.append("**%s" % varkw)
args_string = ', '.join(args_list)
return "def %s(%s)" % (the_callable.__name__, args_string)
def get_callable_documentation(the_callable):
"""
Returns a string with the callable signature and its docstring.
:param the_callable: the callable to be analyzed.
:type the_callable: function/callable.
:return: the signature.
"""
return wrap_text_in_a_box(
title=get_callable_signature_as_string(the_callable),
body=(getattr(the_callable, '__doc__') or 'No documentation').replace(
'\n', '\n\n'),
style='ascii_double')
|
alexanderad/pony-standup-bot
|
refs/heads/master
|
tests/test_jobs.py
|
1
|
from __future__ import absolute_import
import collections
import unittest
from flexmock import flexmock
from pony.jobs import WorldTick
class WorldTickTest(unittest.TestCase):
def setUp(self):
queue = collections.deque()
self.fake_bot = flexmock()
self.job = WorldTick(self.fake_bot, queue, interval=5)
def test_init(self):
self.assertEqual(self.job.interval, 5)
self.assertIsInstance(self.job.queue, collections.deque)
self.assertIsNotNone(self.job.bot)
def test_run(self):
fake_task = flexmock()
fake_slack = flexmock()
(flexmock(fake_task)
.should_receive('execute')
.with_args(bot=self.fake_bot, slack=fake_slack)
.once())
self.job.queue.append(fake_task)
self.assertListEqual(self.job.run(fake_slack), list())
self.assertEqual(len(self.job.queue), 0)
|
gwpy/gwpy.github.io
|
refs/heads/master
|
docs/0.9.0/examples/signal/gw150914-1.py
|
9
|
from gwpy.timeseries import TimeSeries
hdata = TimeSeries.fetch_open_data('H1', 1126259446, 1126259478)
|
dirkhusemann/rezzme
|
refs/heads/master
|
RezzMe/gridinfo.py
|
1
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
# Copyright (c) Contributors, http://opensimulator.org/
# See CONTRIBUTORS.TXT for a full list of copyright holders.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the OpenSim Project nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import urllib
import urllib2
import xml.etree.ElementTree
fakeGridInfo = {
'rezzme://lindenlab.com/': {
'login': 'https://login.agni.lindenlab.com/cgi-bin/login.cgi',
'gridname': 'LindenLab SecondLife(tm) main grid',
'gridnick': 'secondlife',
'platform': 'secondlife'},
'rezzme://secondlife.com/': {
'login': 'https://login.agni.lindenlab.com/cgi-bin/login.cgi',
'gridname': 'LindenLab SecondLife(tm) main grid',
'gridnick': 'secondlife',
'platform': 'secondlife'},
'rezzme://aditi.lindenlab.com/': {
'login': 'https://login.aditi.lindenlab.com/cgi-bin/login.cgi',
'gridname': 'LindenLab SecondLife(tm) BETA grid',
'gridnick': 'secondlife-beta',
'platform': 'secondlife'},
}
def GetGridInfo(uri):
'''Invoke /get_grid_info on target grid and obtain additional parameters
'''
logging.debug('gridinfo.GetGridInfo: retrieving grid info from uri %s', uri)
logging.debug('gridinfo.GetGridInfo: base URI %s', uri.BaseUri)
# short circuit for "fake" uris
if uri.BaseUri in fakeGridInfo:
logging.debug('gridinfo.GetGridInfo: returning fake grid info for uri %s', uri)
gridInfo = fakeGridInfo[uri.BaseUri]
gridInfo['faked'] = True
gridInfo['error'] = None
return fakeGridInfo[uri.BaseUri]
# construct GridInfo URL
infoUri = '%s/get_grid_info' % (uri.BaseHttpUri)
# try to retrieve GridInfo
gridInfo = {}
# default platform: opensim
gridInfo['platform'] = 'opensim'
gridInfo['error'] = None
try:
req = urllib2.Request(url = infoUri)
req.add_header('X-OpenSim-Host', uri.Host)
if uri.Region: req.add_header('X-OpenSim-Region', uri.DecodedRegion)
logging.debug('gridinfo.GetGridInfo: req %s headers[%s]', req.get_full_url(),
' '.join(['%s: %s' % (x, req.headers[x]) for x in req.headers]))
gridInfoXml = xml.etree.ElementTree.parse(urllib2.urlopen(req))
for e in gridInfoXml.findall('/*'):
if e.text: gridInfo[e.tag] = e.text
logging.debug('gridinfo.GetGridInfo: %s = %s', e.tag, e.text)
except urllib2.URLError, ue:
logging.error('gridinfo.GetGridInfo: oops, failed to retrieve grid info: %s', ue)
# we can get any number of URLErrors here, unfortunately they
# are not uniform, hence i need to test for attribs
if getattr(ue, 'reason', None):
gridInfo['error'] = str(ue.reason)
elif getattr(ue, 'code', None):
# the server/proxy can send back a 404 if the region does not exist
if ue.code == 404:
gridInfo['error'] = 'region not found (%s)' % ue.msg
else:
gridInfo['error'] = 'server returned a "%s"' % str(ue.code)
else:
gridInfo['error'] = 'connection failed (network problem)'
except Exception, ex:
gridInfo['error'] = 'connection failed (general problem: %s)' % str(ex)
gridKeys = gridInfo.keys()
if not 'login' in gridKeys:
gridInfo['login'] = '%s/' % uri.BaseHttpUri
logging.info('gridinfo.GetGridInfo: curing missing "login" key with %s', gridInfo['login'])
if not 'gridname' in gridKeys:
gridInfo['gridname'] = '-'
logging.info('gridinfo.GetGridInfo: curing missing "gridname" key with %s', gridInfo['gridname'])
if not 'gridnick' in gridKeys:
gridInfo['gridnick'] = 'grid'
logging.info('gridinfo.GetGridInfo: curing missing "gridnick" key with %s', gridInfo['gridnick'])
if not 'regioninfoavailable' in gridKeys:
gridInfo['regioninfoavailable'] = False
else:
gridInfo['regioninfoavailable'] = (gridInfo['regioninfoavailable'].lower() == 'true')
# construct the grid key: login server plus region only
gridKey = gridInfo['login'].rstrip('/')
if uri.Region:
region = urllib.quote(uri.Region)
gridKey = '%s/%s' % (gridKey, region)
gridInfo['gridkey'] = urllib.quote(gridKey, '')
return gridInfo
|
ubgarbage/gae-blog
|
refs/heads/master
|
django/contrib/admin/widgets.py
|
156
|
"""
Form Widget classes specific to the Django admin site.
"""
import django.utils.copycompat as copy
from django import forms
from django.forms.widgets import RadioFieldRenderer
from django.forms.util import flatatt
from django.utils.html import escape
from django.utils.text import truncate_words
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
from django.utils.encoding import force_unicode
from django.conf import settings
from django.core.urlresolvers import reverse, NoReverseMatch
class FilteredSelectMultiple(forms.SelectMultiple):
"""
A SelectMultiple with a JavaScript filter interface.
Note that the resulting JavaScript assumes that the jsi18n
catalog has been loaded in the page
"""
class Media:
js = (settings.ADMIN_MEDIA_PREFIX + "js/core.js",
settings.ADMIN_MEDIA_PREFIX + "js/SelectBox.js",
settings.ADMIN_MEDIA_PREFIX + "js/SelectFilter2.js")
def __init__(self, verbose_name, is_stacked, attrs=None, choices=()):
self.verbose_name = verbose_name
self.is_stacked = is_stacked
super(FilteredSelectMultiple, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
if attrs is None: attrs = {}
attrs['class'] = 'selectfilter'
if self.is_stacked: attrs['class'] += 'stacked'
output = [super(FilteredSelectMultiple, self).render(name, value, attrs, choices)]
output.append(u'<script type="text/javascript">addEvent(window, "load", function(e) {')
# TODO: "id_" is hard-coded here. This should instead use the correct
# API to determine the ID dynamically.
output.append(u'SelectFilter.init("id_%s", "%s", %s, "%s"); });</script>\n' % \
(name, self.verbose_name.replace('"', '\\"'), int(self.is_stacked), settings.ADMIN_MEDIA_PREFIX))
return mark_safe(u''.join(output))
class AdminDateWidget(forms.DateInput):
class Media:
js = (settings.ADMIN_MEDIA_PREFIX + "js/calendar.js",
settings.ADMIN_MEDIA_PREFIX + "js/admin/DateTimeShortcuts.js")
def __init__(self, attrs={}, format=None):
super(AdminDateWidget, self).__init__(attrs={'class': 'vDateField', 'size': '10'}, format=format)
class AdminTimeWidget(forms.TimeInput):
class Media:
js = (settings.ADMIN_MEDIA_PREFIX + "js/calendar.js",
settings.ADMIN_MEDIA_PREFIX + "js/admin/DateTimeShortcuts.js")
def __init__(self, attrs={}, format=None):
super(AdminTimeWidget, self).__init__(attrs={'class': 'vTimeField', 'size': '8'}, format=format)
class AdminSplitDateTime(forms.SplitDateTimeWidget):
"""
A SplitDateTime Widget that has some admin-specific styling.
"""
def __init__(self, attrs=None):
widgets = [AdminDateWidget, AdminTimeWidget]
# Note that we're calling MultiWidget, not SplitDateTimeWidget, because
# we want to define widgets.
forms.MultiWidget.__init__(self, widgets, attrs)
def format_output(self, rendered_widgets):
return mark_safe(u'<p class="datetime">%s %s<br />%s %s</p>' % \
(_('Date:'), rendered_widgets[0], _('Time:'), rendered_widgets[1]))
class AdminRadioFieldRenderer(RadioFieldRenderer):
def render(self):
"""Outputs a <ul> for this set of radio fields."""
return mark_safe(u'<ul%s>\n%s\n</ul>' % (
flatatt(self.attrs),
u'\n'.join([u'<li>%s</li>' % force_unicode(w) for w in self]))
)
class AdminRadioSelect(forms.RadioSelect):
renderer = AdminRadioFieldRenderer
class AdminFileWidget(forms.ClearableFileInput):
template_with_initial = (u'<p class="file-upload">%s</p>'
% forms.ClearableFileInput.template_with_initial)
template_with_clear = (u'<span class="clearable-file-input">%s</span>'
% forms.ClearableFileInput.template_with_clear)
def url_params_from_lookup_dict(lookups):
"""
Converts the type of lookups specified in a ForeignKey limit_choices_to
attribute to a dictionary of query parameters
"""
params = {}
if lookups and hasattr(lookups, 'items'):
items = []
for k, v in lookups.items():
if isinstance(v, list):
v = u','.join([str(x) for x in v])
elif isinstance(v, bool):
# See django.db.fields.BooleanField.get_prep_lookup
v = ('0', '1')[v]
else:
v = unicode(v)
items.append((k, v))
params.update(dict(items))
return params
class ForeignKeyRawIdWidget(forms.TextInput):
"""
A Widget for displaying ForeignKeys in the "raw_id" interface rather than
in a <select> box.
"""
def __init__(self, rel, attrs=None, using=None):
self.rel = rel
self.db = using
super(ForeignKeyRawIdWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
if attrs is None:
attrs = {}
related_url = '../../../%s/%s/' % (self.rel.to._meta.app_label, self.rel.to._meta.object_name.lower())
params = self.url_parameters()
if params:
url = u'?' + u'&'.join([u'%s=%s' % (k, v) for k, v in params.items()])
else:
url = u''
if "class" not in attrs:
attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript looks for this hook.
output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)]
# TODO: "id_" is hard-coded here. This should instead use the correct
# API to determine the ID dynamically.
output.append(u'<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> ' % \
(related_url, url, name))
output.append(u'<img src="%simg/admin/selector-search.gif" width="16" height="16" alt="%s" /></a>' % (settings.ADMIN_MEDIA_PREFIX, _('Lookup')))
if value:
output.append(self.label_for_value(value))
return mark_safe(u''.join(output))
def base_url_parameters(self):
return url_params_from_lookup_dict(self.rel.limit_choices_to)
def url_parameters(self):
from django.contrib.admin.views.main import TO_FIELD_VAR
params = self.base_url_parameters()
params.update({TO_FIELD_VAR: self.rel.get_related_field().name})
return params
def label_for_value(self, value):
key = self.rel.get_related_field().name
try:
obj = self.rel.to._default_manager.using(self.db).get(**{key: value})
return ' <strong>%s</strong>' % escape(truncate_words(obj, 14))
except (ValueError, self.rel.to.DoesNotExist):
return ''
class ManyToManyRawIdWidget(ForeignKeyRawIdWidget):
"""
A Widget for displaying ManyToMany ids in the "raw_id" interface rather than
in a <select multiple> box.
"""
def render(self, name, value, attrs=None):
if attrs is None:
attrs = {}
attrs['class'] = 'vManyToManyRawIdAdminField'
if value:
value = ','.join([force_unicode(v) for v in value])
else:
value = ''
return super(ManyToManyRawIdWidget, self).render(name, value, attrs)
def url_parameters(self):
return self.base_url_parameters()
def label_for_value(self, value):
return ''
def value_from_datadict(self, data, files, name):
value = data.get(name)
if value:
return value.split(',')
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
for pk1, pk2 in zip(initial, data):
if force_unicode(pk1) != force_unicode(pk2):
return True
return False
class RelatedFieldWidgetWrapper(forms.Widget):
"""
This class is a wrapper to a given widget to add the add icon for the
admin interface.
"""
def __init__(self, widget, rel, admin_site, can_add_related=None):
self.is_hidden = widget.is_hidden
self.needs_multipart_form = widget.needs_multipart_form
self.attrs = widget.attrs
self.choices = widget.choices
self.widget = widget
self.rel = rel
# Backwards compatible check for whether a user can add related
# objects.
if can_add_related is None:
can_add_related = rel.to in admin_site._registry
self.can_add_related = can_add_related
# so we can check if the related object is registered with this AdminSite
self.admin_site = admin_site
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.widget = copy.deepcopy(self.widget, memo)
obj.attrs = self.widget.attrs
memo[id(self)] = obj
return obj
def _media(self):
return self.widget.media
media = property(_media)
def render(self, name, value, *args, **kwargs):
rel_to = self.rel.to
info = (rel_to._meta.app_label, rel_to._meta.object_name.lower())
try:
related_url = reverse('admin:%s_%s_add' % info, current_app=self.admin_site.name)
except NoReverseMatch:
info = (self.admin_site.root_path, rel_to._meta.app_label, rel_to._meta.object_name.lower())
related_url = '%s%s/%s/add/' % info
self.widget.choices = self.choices
output = [self.widget.render(name, value, *args, **kwargs)]
if self.can_add_related:
# TODO: "id_" is hard-coded here. This should instead use the correct
# API to determine the ID dynamically.
output.append(u'<a href="%s" class="add-another" id="add_id_%s" onclick="return showAddAnotherPopup(this);"> ' % \
(related_url, name))
output.append(u'<img src="%simg/admin/icon_addlink.gif" width="10" height="10" alt="%s"/></a>' % (settings.ADMIN_MEDIA_PREFIX, _('Add Another')))
return mark_safe(u''.join(output))
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs)
return self.attrs
def value_from_datadict(self, data, files, name):
return self.widget.value_from_datadict(data, files, name)
def _has_changed(self, initial, data):
return self.widget._has_changed(initial, data)
def id_for_label(self, id_):
return self.widget.id_for_label(id_)
class AdminTextareaWidget(forms.Textarea):
def __init__(self, attrs=None):
final_attrs = {'class': 'vLargeTextField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminTextareaWidget, self).__init__(attrs=final_attrs)
class AdminTextInputWidget(forms.TextInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vTextField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminTextInputWidget, self).__init__(attrs=final_attrs)
class AdminURLFieldWidget(forms.TextInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vURLField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminURLFieldWidget, self).__init__(attrs=final_attrs)
class AdminIntegerFieldWidget(forms.TextInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vIntegerField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminIntegerFieldWidget, self).__init__(attrs=final_attrs)
class AdminCommaSeparatedIntegerFieldWidget(forms.TextInput):
def __init__(self, attrs=None):
final_attrs = {'class': 'vCommaSeparatedIntegerField'}
if attrs is not None:
final_attrs.update(attrs)
super(AdminCommaSeparatedIntegerFieldWidget, self).__init__(attrs=final_attrs)
|
Ensembles/ert
|
refs/heads/master
|
python/tests/core/util/test_work_area.py
|
2
|
#!/usr/bin/env python
# Copyright (C) 2014 Statoil ASA, Norway.
#
# The file 'test_work_area.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
import os.path
import os
try:
from unittest2 import skipIf
except ImportError:
from unittest import skipIf
from ert.test import ExtendedTestCase , TestAreaContext, TempAreaContext
class WorkAreaTest(ExtendedTestCase):
def test_full_path(self):
with TestAreaContext("TestArea") as test_area:
with open("test_file" , "w") as fileH:
fileH.write("Something")
self.assertTrue( os.path.isfile( "test_file") )
with self.assertRaises(IOError):
full_path = test_area.getFullPath( "does/not/exists" )
with self.assertRaises(IOError):
full_path = test_area.getFullPath( "/already/absolute" )
full_path = test_area.getFullPath( "test_file" )
self.assertTrue( os.path.isfile( full_path ))
self.assertTrue( os.path.isabs( full_path ))
def test_temp_area(self):
with TestAreaContext("TestArea") as test_area:
cwd = os.getcwd()
with open("file.txt" , "w") as f:
f.write("File")
with TempAreaContext("TempArea") as temp_area:
self.assertEqual( cwd, os.getcwd())
self.assertEqual( cwd, temp_area.get_cwd())
temp_area.copy_file( "file.txt" )
self.assertTrue( os.path.isfile( os.path.join( temp_area.getPath( ) , "file.txt")))
os.mkdir("tmp")
os.chdir("tmp")
self.assertEqual( os.getcwd() , os.path.join( cwd , "tmp"))
def test_IOError(self):
with TestAreaContext("TestArea") as test_area:
with self.assertRaises(IOError):
test_area.copy_file( "Does/not/exist" )
with self.assertRaises(IOError):
test_area.install_file( "Does/not/exist" )
with self.assertRaises(IOError):
test_area.copy_directory( "Does/not/exist" )
with self.assertRaises(IOError):
test_area.copy_parent_directory( "Does/not/exist" )
os.makedirs("path1/path2")
with open("path1/file.txt" , "w") as f:
f.write("File ...")
with self.assertRaises(IOError):
test_area.copy_directory( "path1/file.txt" )
def test_sync(self):
with TestAreaContext("test_sync") as t:
with open("file.txt" , "w") as f:
f.write("content")
t.sync()
self.assertTrue( os.path.isfile( "file.txt"))
|
Dev4X/oppia
|
refs/heads/master
|
extensions/interactions/LogicProof/LogicProof.py
|
9
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Jacob Davis'
from extensions.interactions import base
class LogicProof(base.BaseInteraction):
"""Interaction for entering logic proofs."""
name = 'Logic Proof'
description = (
'Allows learners to write proofs for simple logical statements.')
display_mode = base.DISPLAY_MODE_SUPPLEMENTAL
_dependency_ids = ['logic_proof', 'codemirror']
answer_type = 'CheckedProof'
_customization_arg_specs = [{
'name': 'question',
'description': 'Question to ask',
'schema': {
'type': 'custom',
'obj_type': 'LogicQuestion',
},
'default_value': {
'assumptions': [{
'top_kind_name': 'variable',
'top_operator_name': 'p',
'arguments': [],
'dummies': []
}],
'results': [{
'top_kind_name': 'variable',
'top_operator_name': 'p',
'arguments': [],
'dummies': []
}],
'default_proof_string': ''
},
}]
|
hyu754/hyu754.github.io
|
refs/heads/master
|
tutorials/examples/materials_and_texture/icons_rc.py
|
8
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Wed Jan 9 11:04:13 2013
# by: The Resource Compiler for PyQt (Qt v4.8.4)
#
# WARNING! All changes made in this file will be lost!
try:
from PySide import QtCore
except ImportError:
from PyQt4 import QtCore
qt_resource_data = b"\
\x00\x00\x96\x4a\
\x00\
\x00\x01\x00\x01\x00\x5e\x63\x00\x00\x01\x00\x20\x00\x34\x96\x00\
\x00\x16\x00\x00\x00\x28\x00\x00\x00\x5e\x00\x00\x00\xc6\x00\x00\
\x00\x01\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\x55\xaa\x00\x03\xff\x00\x00\x02\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\xff\xff\x02\x00\xff\xff\x04\x00\xff\xff\x05\x00\xff\xff\
\x0c\x20\xf1\xff\x30\xe8\xfb\xfe\xff\xe1\xf8\xfa\xff\xdb\xf7\xf8\
\xff\xd9\xf8\xfb\xff\xdc\xf9\xfe\xff\xd3\xef\xf6\xff\xd3\xef\xf6\
\xff\xd5\xef\xf6\xff\xd5\xef\xf6\xff\xd5\xee\xf8\xff\xd5\xee\xf8\
\xff\xd4\xed\xf7\xff\xd6\xec\xf8\xff\xdb\xee\xfd\xff\xda\xec\xfd\
\xff\xd8\xe9\xfe\xff\xd9\xe7\xfe\xff\xd6\xe3\xfd\xff\xd6\xe2\xfe\
\xff\xd7\xe4\xfe\xff\xd8\xe6\xfd\xff\xdf\xf0\xff\xff\xdd\xf0\xff\
\xff\xdc\xef\xff\xff\xda\xef\xff\xff\xda\xef\xff\xff\xda\xef\xff\
\xff\xdb\xf0\xff\xff\xdb\xf0\xff\xff\xd5\xea\xff\xff\xd4\xea\xfc\
\xff\xd4\xea\xfc\xff\xd6\xea\xfb\xff\xd7\xec\xfb\xff\xd8\xee\xfa\
\xff\xd7\xed\xf8\xff\xd6\xec\xf7\xff\xd5\xee\xf8\xff\xd3\xf0\xf9\
\xff\xd3\xf0\xf9\xff\xd3\xf0\xf9\xff\xd3\xf0\xf7\xff\xd3\xf0\xf7\
\xff\xd4\xf1\xf6\xff\xd6\xf0\xf6\xff\xe2\xf8\xfd\xff\xe3\xf7\xfc\
\xff\xbb\xf2\xfc\xd0\x00\xff\xff\x0d\x00\xe8\xe8\x0b\x00\xaa\xd5\
\x06\x00\xaa\xff\x03\x00\xaa\xaa\x03\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\xff\xff\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\x00\x00\x01\xff\x55\x00\x03\xff\x55\x00\x03\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\x00\x00\x03\x00\x27\x27\x0d\xd9\xe0\xe2\
\xef\xdc\xe4\xe4\xff\xd0\xdc\xde\xff\xc3\xd2\xd4\xff\xb9\xcc\xcf\
\xff\xb6\xcd\xcf\xff\xb3\xd0\xd4\xff\xaf\xd2\xd5\xff\xad\xd1\xd7\
\xff\xab\xd2\xda\xff\xa7\xd3\xda\xff\xa6\xd2\xd9\xff\xa8\xd2\xd9\
\xff\xa8\xd1\xda\xff\xa8\xd1\xda\xff\xa8\xd0\xdc\xff\xaa\xcf\xdd\
\xff\xad\xd1\xe1\xff\xa5\xc8\xdc\xff\x9a\xbc\xd4\xff\x99\xb7\xd2\
\xff\x9b\xb9\xd6\xff\x9e\xbb\xda\xff\x9e\xbb\xda\xff\x9d\xba\xd9\
\xff\x9a\xb6\xd8\xff\x95\xb3\xd6\xff\x90\xb0\xd4\xff\x8e\xae\xd2\
\xff\x8f\xaf\xd3\xff\x92\xb2\xd6\xff\x96\xb6\xda\xff\x97\xb9\xdd\
\xff\x9c\xbf\xe1\xff\x99\xbc\xdd\xff\x99\xbc\xdd\xff\x9f\xc1\xdf\
\xff\xa5\xc7\xe4\xff\xa8\xcb\xe5\xff\xaa\xcc\xe4\xff\xac\xcb\xe0\
\xff\xad\xce\xde\xff\xad\xcf\xdc\xff\xad\xd1\xd9\xff\xae\xd2\xda\
\xff\xae\xd2\xd8\xff\xae\xd2\xd8\xff\xb0\xd2\xd8\xff\xb3\xd0\xd7\
\xff\xb6\xd0\xd6\xff\xba\xd0\xd6\xff\xc1\xd4\xd9\xff\xcd\xdc\xdf\
\xff\xda\xe6\xe8\xff\xbb\xcb\xd5\xd0\x00\x71\x8e\x09\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\xff\xff\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\x00\x00\x01\xff\x80\x00\
\x02\xff\xff\x00\x01\xff\xff\xff\x00\x00\x00\x00\x02\x9d\xa7\xa7\
\xb1\xd5\xd7\xd7\xff\xc2\xc6\xc7\xff\xc9\xd1\xd1\xff\xba\xc3\xc6\
\xff\xa8\xb7\xb9\xff\xa1\xb6\xb8\xff\xa4\xbf\xc3\xff\xa8\xcb\xcf\
\xff\xa8\xcf\xd7\xff\xa3\xd1\xd9\xff\x9e\xcf\xd9\xff\x9a\xcf\xd9\
\xff\x99\xd0\xd9\xff\x9a\xcf\xd9\xff\x9a\xcf\xd9\xff\x9a\xce\xda\
\xff\x99\xcd\xda\xff\x99\xcb\xdd\xff\x95\xc6\xdc\xff\x87\xb6\xd2\
\xff\x77\xa4\xc5\xff\x72\x9c\xc1\xff\x77\xa0\xc7\xff\x7d\xa5\xcf\
\xff\x7d\xa5\xcf\xff\x7c\xa2\xcc\xff\x83\xaa\xd6\xff\x7c\xa4\xd4\
\xff\x76\x9e\xce\xff\x72\x9a\xcb\xff\x73\x9b\xcc\xff\x77\x9f\xd0\
\xff\x80\xa8\xd8\xff\x84\xae\xdd\xff\x80\xaa\xd9\xff\x7e\xa8\xd5\
\xff\x7f\xa9\xd6\xff\x87\xb1\xdc\xff\x91\xbb\xe6\xff\x98\xc2\xec\
\xff\x9b\xc5\xe8\xff\x9d\xc5\xe2\xff\xa0\xc9\xdf\xff\xa1\xca\xd9\
\xff\xa1\xcb\xd7\xff\xa2\xcc\xd8\xff\xa2\xcd\xd6\xff\xa4\xcd\xd6\
\xff\xa6\xcd\xd5\xff\xa9\xcd\xd5\xff\xa1\xc0\xc9\xff\x9d\xb9\xc0\
\xff\x9c\xb4\xba\xff\xa2\xb2\xb8\xff\xaf\xbb\xbf\xff\xc1\xc8\xcb\
\xff\xd5\xd9\xda\xff\xe4\xe6\xe6\xff\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\xff\xff\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x24\x49\x00\x07\xff\xff\x00\x01\x00\x00\x00\
\x01\xe3\xe5\xe5\xff\xb8\xba\xba\xff\xc1\xc3\xc3\xff\xc1\xc3\xc3\
\xff\xc3\xc9\xc8\xff\xca\xd2\xd2\xff\x9c\xab\xad\xff\x9a\xb0\xb5\
\xff\xa4\xc4\xca\xff\xa3\xcc\xd5\xff\x9a\xca\xd6\xff\x9d\xd0\xe0\
\xff\x9a\xd0\xe1\xff\x96\xcf\xde\xff\x9a\xd4\xe0\xff\x95\xd0\xda\
\xff\x8b\xc8\xd2\xff\x94\xd0\xdc\xff\x99\xd4\xe7\xff\x8c\xc5\xde\
\xff\x92\xc9\xe8\xff\x72\xa6\xcf\xff\x66\x97\xc7\xff\x6f\x9e\xd2\
\xff\x70\x9d\xd6\xff\x71\x9e\xd7\xff\x79\xa6\xdf\xff\x7c\xa7\xe0\
\xff\x73\xa0\xd9\xff\x6e\x9b\xd4\xff\x6a\x97\xd0\xff\x6c\x98\xd3\
\xff\x6d\x9a\xd3\xff\x6e\x9b\xd4\xff\x70\x9d\xd6\xff\x71\x9e\xd7\
\xff\x74\xa1\xda\xff\x7a\xa7\xe0\xff\x7a\xa8\xde\xff\x72\xa0\xd6\
\xff\x6f\x9d\xd3\xff\x76\xa5\xd9\xff\x88\xb9\xe7\xff\x99\xc8\xee\
\xff\x95\xc7\xe3\xff\x95\xc8\xdc\xff\x99\xcc\xdc\xff\x9f\xd2\xe2\
\xff\x9e\xd2\xdf\xff\x9a\xcb\xd9\xff\x9a\xca\xd6\xff\xa0\xcd\xd8\
\xff\x9d\xc6\xcf\xff\x96\xba\xc2\xff\x91\xae\xb5\xff\x88\x9e\xa3\
\xff\xbc\xcb\xcd\xff\xbe\xc6\xc6\xff\xc1\xc6\xc5\xff\xbb\xbb\xbb\
\xff\x3f\x3f\x3f\x50\x00\x00\x00\x05\x00\xff\xff\x02\xff\xff\xff\
\x00\x00\x00\x00\x03\x00\x00\x00\x01\xff\xff\xff\x00\x00\x00\x00\
\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x40\x80\x00\
\x04\xff\xff\x00\x01\x00\xff\xff\x02\xbb\xc2\xc2\xd0\xb5\xb7\xb7\
\xff\xc0\xc2\xc2\xff\xc6\xc8\xc8\xff\xca\xcc\xcc\xff\xcc\xd1\xd2\
\xff\xac\xb9\xbb\xff\x90\xa6\xac\xff\x93\xb4\xbd\xff\x94\xc0\xcd\
\xff\x99\xcc\xdc\xff\x98\xd0\xe3\xff\x9a\xd3\xe8\xff\x91\xcc\xdf\
\xff\x95\xd1\xe1\xff\x99\xd6\xe4\xff\x91\xd0\xde\xff\x8f\xd0\xdf\
\xff\x91\xd2\xe7\xff\x8d\xc9\xe7\xff\x78\xb3\xdb\xff\x67\x9f\xd0\
\xff\x60\x95\xce\xff\x62\x95\xd4\xff\x61\x93\xd5\xff\x6b\x9b\xdd\
\xff\x74\xa4\xe6\xff\x72\xa2\xe2\xff\x6a\x9b\xd9\xff\x66\x97\xd5\
\xff\x64\x95\xd3\xff\x66\x97\xd5\xff\x68\x99\xd7\xff\x67\x98\xd6\
\xff\x68\x99\xd7\xff\x6a\x9b\xd9\xff\x70\xa1\xdf\xff\x6e\x9f\xdd\
\xff\x6b\x9c\xda\xff\x65\x96\xd4\xff\x62\x94\xd0\xff\x66\x98\xd4\
\xff\x6f\xa3\xd8\xff\x79\xad\xdb\xff\x93\xc9\xec\xff\x97\xcf\xe8\
\xff\x95\xce\xe3\xff\x92\xc9\xde\xff\x96\xcb\xdf\xff\x9d\xd3\xe4\
\xff\x9b\xcf\xdf\xff\x94\xc5\xd3\xff\x91\xbe\xc9\xff\x8e\xb5\xbe\
\xff\x89\xab\xb1\xff\x86\x9f\xa3\xff\xbc\xcb\xcd\xff\xbe\xc6\xc6\
\xff\xbd\xbf\xbf\xff\xb5\xb3\xb3\xff\x27\x00\x00\x0d\x00\x00\x00\
\x04\x00\xff\xff\x02\x00\x00\x00\x01\x00\x00\x00\x03\x00\x00\x00\
\x01\xff\xff\xff\x00\x00\x00\x00\x02\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x01\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\
\x02\x00\x33\x4c\x0a\xb1\xb3\xb3\xff\xbd\xbf\xbf\xff\xcc\xcd\xcb\
\xff\xd3\xd4\xd2\xff\xd1\xd6\xd5\xff\xc8\xd2\xd2\xff\x95\xa9\xae\
\xff\x8c\xab\xb4\xff\x90\xba\xc7\xff\x9c\xcf\xdf\xff\x9b\xd3\xe6\
\xff\x9d\xd8\xec\xff\x96\xd4\xe6\xff\x99\xd7\xe9\xff\x9e\xdc\xec\
\xff\x99\xd9\xeb\xff\x95\xd6\xeb\xff\x8f\xd0\xec\xff\x87\xc3\xe7\
\xff\x65\xa0\xce\xff\x63\x9a\xd3\xff\x5f\x94\xd3\xff\x59\x8d\xd0\
\xff\x56\x89\xcf\xff\x62\x95\xdb\xff\x6c\xa0\xe3\xff\x6a\x9b\xdf\
\xff\x62\x94\xd6\xff\x60\x93\xd2\xff\x62\x95\xd4\xff\x64\x97\xd6\
\xff\x65\x98\xd7\xff\x63\x95\xd7\xff\x62\x94\xd6\xff\x63\x95\xd7\
\xff\x64\x97\xd6\xff\x5d\x90\xcf\xff\x5b\x8e\xcd\xff\x5f\x92\xd1\
\xff\x62\x95\xd3\xff\x60\x96\xd3\xff\x62\x99\xd2\xff\x67\x9e\xd1\
\xff\x79\xb0\xdb\xff\x8f\xc8\xe8\xff\x9e\xd8\xf5\xff\x9f\xd8\xf2\
\xff\x9c\xd4\xed\xff\x9d\xd4\xe9\xff\x99\xcf\xe0\xff\x93\xc6\xd6\
\xff\x8d\xba\xc7\xff\x8b\xb5\xbc\xff\x89\xac\xb0\xff\x8f\xa8\xac\
\xff\xc0\xcf\xd1\xff\xc5\xca\xcb\xff\xbc\xbc\xbc\xff\xb7\xb2\xb3\
\xff\x8e\x00\x1c\x09\x00\x00\x00\x02\xff\xff\xff\x00\x00\x00\x00\
\x02\x00\x00\x00\x02\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\
\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x02\xff\xff\xff\
\x00\x00\xaa\xaa\x03\x00\x80\xbf\x04\x00\xff\xff\x02\xb5\xb7\xb7\
\xff\xba\xba\xba\xff\xce\xcc\xcb\xff\xd7\xd5\xd4\xff\xd4\xd6\xd6\
\xff\xdb\xe3\xe3\xff\xad\xbe\xc1\xff\x8e\xaa\xb1\xff\x8e\xb6\xc2\
\xff\x97\xc8\xd6\xff\x9c\xd2\xe3\xff\x9a\xd5\xe8\xff\x9f\xdd\xef\
\xff\x9e\xde\xf0\xff\x99\xd8\xec\xff\x98\xd9\xee\xff\x9b\xdc\xf8\
\xff\x8a\xca\xec\xff\x73\xaf\xd9\xff\x66\xa0\xd4\xff\x66\x9d\xda\
\xff\x61\x96\xd9\xff\x56\x8a\xd1\xff\x53\x87\xce\xff\x5b\x8f\xd6\
\xff\x61\x95\xdb\xff\x5f\x92\xd8\xff\x5c\x90\xd3\xff\x5b\x8f\xd1\
\xff\x5c\x91\xd0\xff\x60\x94\xd6\xff\x62\x96\xd8\xff\x60\x94\xd7\
\xff\x5c\x90\xd3\xff\x5b\x8f\xd1\xff\x55\x89\xcb\xff\x4e\x82\xc4\
\xff\x4e\x82\xc4\xff\x59\x8d\xcf\xff\x60\x94\xd6\xff\x5e\x94\xd5\
\xff\x5d\x94\xd1\xff\x62\x99\xd2\xff\x64\x9b\xce\xff\x73\xad\xd7\
\xff\x90\xca\xee\xff\xa1\xd9\xfc\xff\xa3\xda\xf9\xff\x9a\xd2\xeb\
\xff\x95\xca\xde\xff\x95\xc9\xd9\xff\x8c\xbd\xc7\xff\x8d\xb7\xbe\
\xff\x8b\xab\xb0\xff\x9b\xb1\xb6\xff\xc4\xd1\xd3\xff\xc8\xcd\xce\
\xff\xbc\xbc\xbc\xff\xbd\xb8\xb9\xff\xff\x00\x40\x04\xff\xff\xff\
\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\x00\x00\x02\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x02\x00\xff\xff\x02\x00\xaa\xaa\x03\x00\x80\xbf\
\x04\x00\xff\xff\x02\xc1\xc3\xc3\xff\xb7\xb7\xb7\xff\xcc\xc8\xc7\
\xff\xd6\xd2\xd1\xff\xd4\xd2\xd1\xff\xdc\xe1\xe0\xff\xc3\xd0\xd2\
\xff\x90\xa8\xae\xff\x8b\xaf\xb7\xff\x8c\xb9\xc6\xff\x9a\xce\xde\
\xff\x98\xd3\xe3\xff\x9b\xdb\xed\xff\x98\xda\xed\xff\x95\xd6\xeb\
\xff\x95\xd6\xf2\xff\x90\xd0\xf2\xff\x77\xb8\xdf\xff\x60\x9b\xcc\
\xff\x6d\xa8\xe0\xff\x65\x9d\xde\xff\x59\x8f\xd6\xff\x52\x87\xd0\
\xff\x53\x88\xd1\xff\x53\x89\xd0\xff\x54\x8b\xd0\xff\x58\x8c\xd2\
\xff\x55\x8a\xcd\xff\x55\x8a\xcd\xff\x57\x8d\xce\xff\x5c\x91\xd4\
\xff\x5d\x92\xd5\xff\x5b\x90\xd3\xff\x56\x8b\xce\xff\x56\x8b\xce\
\xff\x52\x87\xca\xff\x49\x7e\xc1\xff\x47\x7c\xbf\xff\x4d\x82\xc5\
\xff\x52\x87\xca\xff\x51\x86\xc9\xff\x55\x8b\xcc\xff\x5c\x92\xd1\
\xff\x68\x9e\xda\xff\x65\x9e\xd2\xff\x6c\xa4\xd5\xff\x7a\xb4\xde\
\xff\x8e\xc5\xea\xff\x95\xcd\xea\xff\x91\xc8\xdd\xff\x8e\xc1\xd1\
\xff\x8e\xbc\xc7\xff\x8f\xb6\xbe\xff\x8a\xa9\xac\xff\xa2\xb7\xb9\
\xff\xbe\xcb\xcd\xff\xc2\xc7\xc8\xff\xba\xb9\xbb\xff\xc8\xc7\xc9\
\xff\xff\x00\xff\x01\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\
\x02\x00\x00\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\
\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x80\x80\
\x04\x00\xff\xff\x02\x00\x7f\x9f\x08\x00\xff\xff\x04\xdb\xdb\xdb\
\xff\xb9\xb7\xb7\xff\xc5\xc1\xc0\xff\xd2\xcd\xcc\xff\xd3\xcf\xce\
\xff\xd5\xd7\xd7\xff\xca\xd6\xd8\xff\x96\xaa\xaf\xff\x87\xa6\xaf\
\xff\x85\xaf\xbb\xff\x96\xca\xd7\xff\x99\xd4\xe3\xff\x93\xd4\xe3\
\xff\x90\xd4\xe7\xff\x97\xd9\xf2\xff\x94\xd5\xf4\xff\x7b\xbc\xe3\
\xff\x65\xa3\xd3\xff\x61\x9c\xd4\xff\x6d\xa8\xe6\xff\x5f\x98\xdd\
\xff\x4f\x86\xcf\xff\x4d\x84\xcf\xff\x56\x8a\xd6\xff\x52\x87\xd0\
\xff\x4e\x84\xcb\xff\x53\x8a\xcf\xff\x4e\x85\xca\xff\x4f\x86\xc9\
\xff\x51\x88\xcb\xff\x56\x8d\xd0\xff\x59\x90\xd3\xff\x57\x8e\xd1\
\xff\x55\x8c\xcf\xff\x55\x8c\xcf\xff\x54\x8b\xce\xff\x4d\x84\xc7\
\xff\x48\x7f\xc2\xff\x47\x7e\xc1\xff\x48\x7f\xc2\xff\x4c\x83\xc8\
\xff\x54\x8b\xd0\xff\x60\x97\xdc\xff\x65\x9a\xdd\xff\x6b\xa1\xe0\
\xff\x66\x9c\xd8\xff\x60\x99\xcc\xff\x6f\xa9\xd3\xff\x89\xbf\xe0\
\xff\x8d\xc3\xda\xff\x85\xb8\xc8\xff\x8e\xbb\xc6\xff\x90\xb4\xba\
\xff\x87\xa4\xa8\xff\xaa\xbe\xbf\xff\xb9\xc5\xc7\xff\xbc\xc0\xc1\
\xff\xb8\xba\xbb\xff\xdf\xde\xe0\xff\xff\x00\xff\x01\xff\xff\xff\
\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\x00\x00\x01\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x01\x00\x40\x40\x08\x00\xff\xff\x04\x00\x71\x8e\
\x09\x00\xff\xff\x04\x00\x00\x00\x0a\xbe\xbc\xbc\xff\xc0\xbb\xba\
\xff\xc9\xc4\xc1\xff\xd0\xcd\xc9\xff\xd2\xd3\xd1\xff\xc8\xd2\xd2\
\xff\xa1\xb4\xb7\xff\x81\x9e\xa5\xff\x84\xad\xb6\xff\x8d\xbf\xcb\
\xff\x90\xcb\xda\xff\x8f\xd0\xdf\xff\x8e\xd1\xe6\xff\x93\xd4\xf0\
\xff\x88\xc8\xeb\xff\x69\xa8\xd4\xff\x5a\x98\xce\xff\x67\xa2\xe0\
\xff\x66\xa1\xe6\xff\x5b\x94\xdf\xff\x4a\x82\xcf\xff\x49\x81\xce\
\xff\x56\x8c\xd9\xff\x53\x8a\xd3\xff\x4b\x83\xca\xff\x4f\x88\xcd\
\xff\x4a\x83\xc8\xff\x4a\x83\xc8\xff\x4d\x86\xcb\xff\x53\x8c\xd1\
\xff\x58\x91\xd6\xff\x59\x93\xd5\xff\x59\x93\xd5\xff\x5b\x95\xd7\
\xff\x59\x93\xd5\xff\x58\x92\xd4\xff\x56\x90\xd2\xff\x54\x8d\xd2\
\xff\x56\x8d\xd2\xff\x5a\x91\xd6\xff\x61\x97\xde\xff\x66\x9b\xe4\
\xff\x5d\x92\xdb\xff\x6a\xa0\xe7\xff\x6e\xa3\xe6\xff\x61\x99\xd4\
\xff\x60\x9b\xc9\xff\x75\xac\xd1\xff\x83\xbb\xd4\xff\x8a\xbb\xcb\
\xff\x8d\xb8\xc1\xff\x8f\xb2\xb6\xff\x8a\xa3\xa5\xff\xb5\xc4\xc6\
\xff\xba\xc2\xc2\xff\xb7\xbb\xbc\xff\xbd\xbc\xbe\xff\x1c\x00\x39\
\x09\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xaa\xaa\x03\x00\x2b\x2b\
\x0c\x00\xff\xff\x04\x00\x71\x8e\x09\x00\x92\xb6\x07\xff\xff\xff\
\x00\xc0\xbe\xbd\xff\xbe\xb9\xb8\xff\xbe\xb9\xb6\xff\xcb\xc6\xc3\
\xff\xcf\xd0\xce\xff\xc5\xcd\xcd\xff\xac\xbd\xc0\xff\x7e\x9a\xa1\
\xff\x85\xac\xb5\xff\x7f\xb1\xbd\xff\x81\xbc\xcb\xff\x8c\xcc\xde\
\xff\x8e\xd1\xe8\xff\x87\xc8\xe7\xff\x76\xb7\xde\xff\x5e\x9b\xcd\
\xff\x56\x92\xcd\xff\x65\xa0\xe5\xff\x60\x99\xe4\xff\x5b\x93\xe0\
\xff\x4c\x84\xd3\xff\x49\x81\xd0\xff\x55\x8d\xda\xff\x56\x8d\xd6\
\xff\x4c\x84\xcb\xff\x4d\x86\xcb\xff\x4b\x84\xc9\xff\x4b\x84\xc9\
\xff\x4e\x87\xcc\xff\x55\x8e\xd3\xff\x5b\x94\xd9\xff\x5e\x98\xda\
\xff\x5f\x99\xdb\xff\x62\x9c\xde\xff\x61\x9b\xdd\xff\x66\xa0\xe2\
\xff\x68\xa2\xe4\xff\x67\xa0\xe5\xff\x66\x9f\xe4\xff\x65\x9e\xe3\
\xff\x63\x9b\xe2\xff\x60\x96\xe3\xff\x62\x97\xe7\xff\x61\x97\xe4\
\xff\x5f\x94\xdd\xff\x5a\x93\xd1\xff\x5e\x97\xca\xff\x6c\xa5\xcc\
\xff\x7e\xb5\xd0\xff\x90\xc1\xd1\xff\x8a\xb3\xbc\xff\x8f\xad\xb2\
\xff\x8b\xa0\xa1\xff\xba\xc8\xc7\xff\xb9\xbe\xbf\xff\xb4\xb6\xb7\
\xff\xbe\xbd\xbf\xff\x80\x00\xff\x02\x80\x00\xff\x02\x00\x00\x00\
\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\
\x02\x00\x00\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\xaa\xaa\x03\x00\xff\xff\x02\x00\xb6\x92\x07\x00\xaa\xd5\
\x06\x00\xff\xff\x04\x00\x00\x00\x05\xc5\xbe\xbb\xd0\xb4\xaf\xae\
\xff\xc0\xbb\xb8\xff\xbc\xb7\xb4\xff\xca\xcb\xc9\xff\xc3\xcb\xcb\
\xff\x9d\xae\xb1\xff\x7d\x99\xa0\xff\x96\xbd\xc6\xff\x83\xb5\xc1\
\xff\x7c\xb7\xc6\xff\x7b\xbb\xcd\xff\x86\xc8\xe1\xff\x7d\xbd\xe0\
\xff\x5a\x99\xc5\xff\x61\x9d\xd3\xff\x58\x92\xd3\xff\x61\x9c\xe2\
\xff\x59\x91\xde\xff\x55\x8d\xdc\xff\x51\x89\xd8\xff\x51\x89\xd6\
\xff\x54\x8d\xd8\xff\x57\x8e\xd7\xff\x53\x8b\xd2\xff\x4f\x87\xce\
\xff\x53\x8b\xd2\xff\x51\x8c\xd2\xff\x52\x8d\xd3\xff\x53\x8e\xd3\
\xff\x56\x91\xd6\xff\x5b\x97\xd9\xff\x60\x9c\xde\xff\x64\xa0\xe2\
\xff\x63\x9f\xe1\xff\x66\xa2\xe4\xff\x67\xa3\xe5\xff\x68\xa3\xe8\
\xff\x68\xa3\xe9\xff\x65\xa0\xe6\xff\x66\x9d\xe6\xff\x63\x98\xe8\
\xff\x66\x9b\xec\xff\x5d\x91\xe4\xff\x5d\x93\xe0\xff\x5b\x95\xd7\
\xff\x5b\x94\xcb\xff\x5f\x97\xc0\xff\x8a\xbf\xda\xff\x8b\xb9\xca\
\xff\x87\xae\xb6\xff\x7e\x9b\x9f\xff\xa6\xba\xbb\xff\xb3\xbf\xbf\
\xff\xb1\xb6\xb7\xff\xa7\xa9\xaa\xff\x20\x00\x40\x08\x2a\x00\x55\
\x06\x80\x00\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\xff\xcc\x05\x00\xaa\xd5\x06\x00\xff\xff\x04\x00\x00\x00\
\x02\x27\x00\x00\x0d\xb6\xb1\xb0\xff\xbc\xb7\xb4\xff\xc0\xbd\xb9\
\xff\xc7\xc8\xc6\xff\xc5\xcf\xcf\xff\x94\xa7\xaa\xff\x88\xa5\xac\
\xff\x97\xc0\xc9\xff\x8c\xbe\xca\xff\x84\xbf\xcf\xff\x7d\xbc\xd0\
\xff\x7b\xbc\xd8\xff\x6a\xa8\xce\xff\x5c\x97\xc8\xff\x5b\x95\xd0\
\xff\x5e\x98\xda\xff\x62\x99\xe2\xff\x56\x8e\xdb\xff\x56\x8e\xdb\
\xff\x56\x8e\xdb\xff\x56\x8e\xdb\xff\x58\x8f\xda\xff\x58\x8f\xd8\
\xff\x55\x8d\xd4\xff\x54\x8c\xd3\xff\x56\x8e\xd5\xff\x58\x90\xd7\
\xff\x58\x93\xd9\xff\x5a\x95\xdb\xff\x5b\x96\xdb\xff\x5e\x99\xde\
\xff\x61\x9d\xdf\xff\x63\x9f\xe1\xff\x65\xa1\xe3\xff\x67\xa3\xe5\
\xff\x69\xa4\xe9\xff\x69\xa4\xea\xff\x69\xa4\xea\xff\x66\xa1\xe7\
\xff\x64\x9b\xe6\xff\x61\x96\xe7\xff\x62\x96\xe9\xff\x5b\x8f\xe2\
\xff\x5b\x93\xe2\xff\x5a\x94\xd6\xff\x5a\x93\xca\xff\x60\x98\xc1\
\xff\x86\xba\xd8\xff\x8b\xb7\xc8\xff\x89\xad\xb5\xff\x89\xa2\xa6\
\xff\xaf\xbe\xc0\xff\xb9\xc1\xc1\xff\xb3\xb7\xb8\xff\xaa\xac\xad\
\xff\x33\x00\x66\x05\x55\x00\xaa\x03\x80\x00\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xaa\xaa\x03\x00\xff\xff\x02\x00\xcc\xff\
\x05\x00\xcc\xff\x05\xff\xff\xff\x00\xaa\x00\x00\x03\xb6\xb1\xb2\
\xff\xbc\xb7\xb6\xff\xc9\xc5\xc4\xff\xc3\xc5\xc5\xff\xc6\xd2\xd4\
\xff\x8a\x9e\xa3\xff\x91\xb0\xb9\xff\x97\xc1\xcd\xff\x92\xc6\xd3\
\xff\x8b\xc5\xd8\xff\x81\xbf\xd7\xff\x6e\xae\xcd\xff\x57\x93\xbd\
\xff\x5e\x98\xcc\xff\x57\x90\xcd\xff\x64\x9b\xe0\xff\x5e\x93\xdc\
\xff\x57\x8e\xd9\xff\x5b\x92\xdd\xff\x5f\x96\xe1\xff\x5e\x95\xe0\
\xff\x5c\x91\xda\xff\x5a\x90\xd7\xff\x5a\x90\xd7\xff\x5d\x92\xdb\
\xff\x5c\x93\xdc\xff\x5e\x95\xde\xff\x62\x9a\xe1\xff\x64\x9c\xe3\
\xff\x64\x9d\xe2\xff\x64\x9d\xe2\xff\x64\x9e\xe0\xff\x65\x9f\xe1\
\xff\x67\xa1\xe3\xff\x66\xa0\xe2\xff\x65\x9e\xe3\xff\x63\x9b\xe2\
\xff\x62\x9a\xe1\xff\x61\x98\xe1\xff\x60\x97\xe2\xff\x61\x93\xe5\
\xff\x5f\x91\xe4\xff\x5a\x8f\xe0\xff\x5b\x92\xdd\xff\x58\x92\xd3\
\xff\x59\x93\xc7\xff\x62\x9a\xc3\xff\x85\xb7\xd3\xff\x8a\xb5\xc6\
\xff\x8b\xac\xb5\xff\x98\xae\xb3\xff\xb9\xc6\xc8\xff\xbf\xc4\xc5\
\xff\xb7\xb9\xba\xff\xb0\xaf\xb1\xff\x80\x00\xff\x02\xff\xff\xff\
\x00\x00\x00\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x80\x80\
\x04\x00\xff\xff\x02\x00\xff\xff\x02\x00\x55\x80\x06\x00\xaa\xaa\
\x03\xff\xff\xff\x00\xb5\xb0\xb1\xff\xbf\xba\xbb\xff\xcd\xcb\xca\
\xff\xc5\xca\xc9\xff\xc0\xcd\xcf\xff\x83\x9b\xa1\xff\x90\xb4\xbc\
\xff\x94\xc0\xcd\xff\x93\xc5\xd7\xff\x8c\xc5\xda\xff\x88\xc3\xdd\
\xff\x67\xa4\xc6\xff\x57\x90\xbd\xff\x61\x99\xd0\xff\x5b\x91\xd0\
\xff\x5f\x96\xdb\xff\x57\x8a\xd3\xff\x5c\x91\xda\xff\x60\x95\xde\
\xff\x63\x98\xe1\xff\x61\x97\xde\xff\x5d\x93\xda\xff\x5b\x92\xd7\
\xff\x5d\x93\xda\xff\x60\x95\xde\xff\x61\x95\xe1\xff\x63\x98\xe1\
\xff\x66\x9b\xe4\xff\x68\x9e\xe5\xff\x67\x9e\xe3\xff\x66\x9d\xe2\
\xff\x63\x9d\xdf\xff\x66\x9e\xdf\xff\x63\x9d\xde\xff\x62\x99\xdc\
\xff\x5b\x94\xd9\xff\x5a\x91\xd6\xff\x58\x90\xd7\xff\x5c\x91\xda\
\xff\x60\x94\xe0\xff\x64\x95\xe5\xff\x5f\x90\xe0\xff\x5b\x8f\xdc\
\xff\x59\x91\xd8\xff\x55\x91\xcd\xff\x57\x92\xc4\xff\x6b\xa2\xc9\
\xff\x85\xb5\xd1\xff\x8e\xb6\xc8\xff\x8e\xab\xb4\xff\xab\xbe\xc3\
\xff\xc2\xce\xd0\xff\xc3\xc7\xc8\xff\xbb\xba\xbc\xff\xb4\xb3\xb5\
\xff\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\x00\x00\x02\x00\xff\xff\x02\x00\xaa\xff\
\x03\x00\x49\x6d\x07\x00\xff\xff\x02\x00\xaa\xaa\x03\xb1\xaf\xae\
\xff\xc1\xbf\xbe\xff\xca\xcc\xcc\xff\xc9\xd1\xd1\xff\xb3\xc4\xc7\
\xff\x83\x9f\xa6\xff\x8b\xb1\xbd\xff\x90\xbf\xcd\xff\x8d\xc2\xd6\
\xff\x8c\xc4\xdd\xff\x89\xc3\xe0\xff\x62\x9c\xc0\xff\x5e\x94\xc3\
\xff\x5d\x93\xca\xff\x5b\x90\xcf\xff\x59\x8d\xd3\xff\x55\x87\xcf\
\xff\x60\x94\xdb\xff\x61\x95\xdb\xff\x61\x95\xdb\xff\x60\x95\xd8\
\xff\x5e\x93\xd6\xff\x5d\x92\xd5\xff\x5f\x93\xd9\xff\x61\x95\xdc\
\xff\x62\x95\xde\xff\x63\x96\xdf\xff\x64\x97\xe0\xff\x64\x98\xdf\
\xff\x63\x97\xdd\xff\x62\x97\xda\xff\x60\x98\xd9\xff\x63\x99\xda\
\xff\x5f\x97\xd8\xff\x5f\x95\xd6\xff\x5a\x91\xd4\xff\x5b\x8f\xd5\
\xff\x5a\x90\xd7\xff\x5f\x92\xdb\xff\x63\x96\xdf\xff\x65\x96\xe4\
\xff\x5f\x91\xdd\xff\x5e\x91\xda\xff\x59\x90\xd3\xff\x54\x8f\xc7\
\xff\x57\x93\xc1\xff\x73\xaa\xcf\xff\x86\xb7\xd1\xff\x95\xb9\xcb\
\xff\x8e\xa9\xb3\xff\xba\xca\xd0\xff\xcb\xd2\xd5\xff\xc8\xca\xcb\
\xff\xbc\xbb\xbd\xff\xb5\xb5\xb5\xff\x00\x80\x80\x04\x00\xaa\xaa\
\x03\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\
\x01\xff\xff\xff\x00\x00\x80\xbf\x04\x00\x80\xbf\x04\x00\xff\xcc\
\x05\x00\x39\x39\x09\xae\xaf\xad\xff\xbe\xbf\xbd\xff\xc3\xc8\xc7\
\xff\xc9\xd3\xd3\xff\xaa\xbd\xc2\xff\x88\xa5\xae\xff\x87\xb1\xbe\
\xff\x8b\xbe\xce\xff\x88\xbf\xd4\xff\x8b\xc4\xde\xff\x80\xb9\xd8\
\xff\x5e\x97\xbe\xff\x5f\x94\xc6\xff\x57\x89\xc3\xff\x5b\x8e\xcd\
\xff\x54\x88\xcb\xff\x58\x8b\xd1\xff\x5f\x92\xd8\xff\x5f\x93\xd6\
\xff\x5d\x93\xd4\xff\x5e\x92\xd4\xff\x5e\x92\xd4\xff\x5e\x92\xd4\
\xff\x5f\x93\xd6\xff\x5f\x91\xd9\xff\x62\x93\xdd\xff\x62\x93\xdd\
\xff\x61\x95\xdc\xff\x60\x92\xda\xff\x5e\x92\xd5\xff\x5c\x91\xd4\
\xff\x5c\x92\xd3\xff\x5d\x93\xd2\xff\x5a\x90\xcf\xff\x5a\x90\xd1\
\xff\x5c\x92\xd3\xff\x5d\x91\xd7\xff\x5e\x92\xd8\xff\x60\x92\xda\
\xff\x61\x92\xdc\xff\x60\x91\xdb\xff\x5e\x90\xd8\xff\x5f\x93\xd9\
\xff\x59\x8f\xce\xff\x54\x8e\xc2\xff\x5a\x95\xc2\xff\x80\xb6\xd9\
\xff\x87\xb6\xd1\xff\x95\xb9\xcb\xff\x8f\xa8\xb2\xff\xc4\xd1\xd9\
\xff\xcd\xd2\xd5\xff\xca\xc9\xcb\xff\xc0\xbd\xbf\xff\xb4\xb4\xb4\
\xff\x00\x39\x39\x09\x00\xaa\xaa\x03\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\x80\xff\x00\x02\x00\x80\x80\
\x04\x00\xff\xff\x02\x00\x80\x80\x04\x00\x22\x22\x0f\xb0\xb0\xb0\
\xff\xbd\xbf\xbf\xff\xc0\xc5\xc6\xff\xc5\xd1\xd3\xff\xa7\xbd\xc3\
\xff\x88\xa9\xb2\xff\x88\xb4\xc1\xff\x84\xb8\xc9\xff\x82\xbb\xd1\
\xff\x83\xbe\xd8\xff\x6d\xa3\xc4\xff\x5f\x95\xbe\xff\x5d\x90\xc2\
\xff\x58\x8a\xc4\xff\x58\x8b\xca\xff\x55\x86\xca\xff\x5b\x8c\xd0\
\xff\x5b\x8f\xd2\xff\x5d\x92\xd1\xff\x60\x96\xd3\xff\x64\x97\xd6\
\xff\x64\x96\xd8\xff\x63\x95\xd7\xff\x63\x94\xda\xff\x63\x93\xdb\
\xff\x64\x95\xdf\xff\x65\x96\xe0\xff\x64\x96\xde\xff\x63\x95\xdd\
\xff\x5f\x93\xd6\xff\x5c\x90\xd3\xff\x5b\x8f\xd1\xff\x5a\x8e\xd0\
\xff\x59\x8d\xcf\xff\x5a\x8e\xd0\xff\x5b\x8f\xd1\xff\x5c\x8f\xd5\
\xff\x5c\x8f\xd5\xff\x5e\x8e\xd6\xff\x5c\x8e\xd6\xff\x5a\x8c\xd4\
\xff\x5b\x8e\xd4\xff\x5d\x92\xd1\xff\x58\x8f\xc8\xff\x55\x8e\xc1\
\xff\x61\x9c\xc4\xff\x88\xbf\xde\xff\x87\xb7\xcf\xff\x93\xb7\xc7\
\xff\x8c\xa5\xaf\xff\xc9\xd4\xdc\xff\xc9\xce\xd1\xff\xcb\xca\xcc\
\xff\xc1\xbf\xbf\xff\xb3\xb3\xb3\xff\x00\x2b\x2b\x0c\x00\xaa\xaa\
\x03\x00\xff\xff\x02\xff\x00\xff\x01\xff\x00\xff\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x01\x80\xff\x00\
\x02\x40\x80\x00\x04\x00\x80\x80\x04\x00\xff\xff\x02\x00\x66\x66\
\x05\x3f\x3f\x3f\x50\xb2\xb2\xb2\xff\xbf\xc1\xc1\xff\xc3\xc7\xc8\
\xff\xc3\xcf\xd1\xff\xab\xc1\xc7\xff\x88\xa9\xb2\xff\x8a\xb6\xc3\
\xff\x7e\xb2\xc3\xff\x7f\xb8\xce\xff\x7d\xb7\xd4\xff\x59\x91\xb4\
\xff\x60\x95\xc0\xff\x5a\x8c\xc0\xff\x5f\x8f\xc9\xff\x58\x88\xc8\
\xff\x57\x87\xc9\xff\x5a\x89\xcd\xff\x5a\x8c\xce\xff\x60\x93\xd1\
\xff\x67\x9a\xd8\xff\x6c\x9c\xdc\xff\x6b\x9b\xdd\xff\x67\x98\xdc\
\xff\x67\x98\xde\xff\x68\x98\xe0\xff\x6b\x9a\xe4\xff\x6a\x9b\xe5\
\xff\x69\x9b\xe3\xff\x67\x99\xe1\xff\x63\x96\xdc\xff\x5e\x92\xd5\
\xff\x5b\x8f\xd2\xff\x5a\x8e\xd1\xff\x60\x94\xd6\xff\x5e\x92\xd4\
\xff\x5d\x8f\xd1\xff\x5a\x8b\xcf\xff\x5c\x8b\xcf\xff\x5d\x8b\xd2\
\xff\x5f\x8d\xd4\xff\x5f\x90\xd4\xff\x59\x8b\xcd\xff\x5d\x90\xce\
\xff\x56\x8e\xc5\xff\x58\x90\xbf\xff\x66\x9f\xc6\xff\x8e\xc4\xe2\
\xff\x88\xb7\xcc\xff\x93\xb5\xc5\xff\x8a\xa3\xad\xff\xc9\xd5\xdb\
\xff\xc5\xca\xcd\xff\xca\xc9\xcb\xff\xc2\xc0\xc0\xff\xb0\xb2\xb2\
\xff\x00\x27\x27\x0d\x00\xaa\xaa\x03\x00\xff\xff\x02\xff\x00\xff\
\x01\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\x00\xff\x01\xff\x00\xff\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\xff\xff\x00\x01\xff\xff\x00\x01\xff\xff\x00\x01\x00\x80\x80\
\x04\x00\xaa\xaa\x03\x00\x00\x00\x03\x00\x00\x00\x0a\xb1\xaf\xaf\
\xff\xbe\xbe\xbe\xff\xbe\xc0\xc1\xff\xc5\xce\xd1\xff\xbb\xce\xd5\
\xff\x85\xa4\xad\xff\x8f\xbb\xc8\xff\x86\xba\xcb\xff\x84\xbc\xd5\
\xff\x80\xb9\xd8\xff\x4e\x85\xac\xff\x5f\x93\xc1\xff\x59\x8b\xbf\
\xff\x60\x8f\xc7\xff\x5e\x8d\xcb\xff\x55\x83\xc3\xff\x5b\x88\xcb\
\xff\x63\x90\xd3\xff\x66\x94\xd4\xff\x6a\x9a\xda\xff\x6f\x9f\xe1\
\xff\x6f\x9e\xe2\xff\x6c\x9a\xe1\xff\x6b\x98\xe1\xff\x6a\x99\xe3\
\xff\x6a\x9b\xe5\xff\x6d\xa1\xe8\xff\x6f\xa3\xea\xff\x6c\xa0\xe7\
\xff\x65\x99\xe0\xff\x5f\x93\xd9\xff\x5e\x92\xd8\xff\x5e\x91\xd7\
\xff\x60\x94\xd7\xff\x60\x92\xd4\xff\x5e\x90\xd2\xff\x5e\x8e\xd0\
\xff\x5d\x8a\xcd\xff\x5c\x87\xca\xff\x5d\x88\xcb\xff\x5e\x8b\xce\
\xff\x5d\x8b\xcb\xff\x5c\x8e\xca\xff\x59\x8f\xc4\xff\x4f\x86\xb3\
\xff\x75\xac\xd1\xff\x87\xb9\xd5\xff\x89\xb7\xc9\xff\x95\xb6\xc5\
\xff\x89\xa0\xa8\xff\xc7\xd5\xdb\xff\xc9\xce\xd1\xff\xc6\xc8\xc9\
\xff\xc3\xc3\xc3\xff\xac\xaf\xad\xff\xe5\xea\xe8\xff\x00\xaa\xaa\
\x03\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\x00\xff\
\x01\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\xff\xff\x00\x01\x80\xff\x00\
\x02\xff\xff\x00\x01\x00\xaa\xaa\x03\x00\xff\xff\x02\x00\x00\x00\
\x02\x00\x00\x00\x0b\xb2\xad\xae\xff\xc0\xbe\xbe\xff\xbf\xc1\xc2\
\xff\xc5\xce\xd1\xff\xbd\xcf\xd6\xff\x8a\xa7\xb0\xff\x91\xbb\xc8\
\xff\x89\xbb\xcd\xff\x86\xbe\xd7\xff\x80\xb9\xd9\xff\x58\x90\xb9\
\xff\x5b\x92\xbf\xff\x5b\x8d\xc1\xff\x5d\x8e\xc6\xff\x60\x8d\xca\
\xff\x59\x85\xc5\xff\x5e\x89\xcc\xff\x65\x92\xd5\xff\x68\x95\xd8\
\xff\x6d\x9d\xdf\xff\x72\xa1\xe5\xff\x70\xa1\xe7\xff\x6b\x9b\xe3\
\xff\x69\x98\xe2\xff\x67\x98\xe2\xff\x64\x97\xe0\xff\x67\x9c\xe5\
\xff\x6c\xa1\xea\xff\x6b\xa0\xe9\xff\x66\x9b\xe4\xff\x60\x95\xde\
\xff\x60\x94\xdb\xff\x60\x94\xdb\xff\x61\x94\xda\xff\x5e\x92\xd5\
\xff\x5d\x8f\xd1\xff\x5c\x8c\xce\xff\x5a\x87\xca\xff\x59\x84\xc7\
\xff\x5a\x86\xc6\xff\x5e\x8a\xca\xff\x5c\x8b\xc9\xff\x5c\x8b\xc8\
\xff\x58\x8c\xc1\xff\x4e\x85\xb2\xff\x76\xac\xcf\xff\x8a\xbb\xd5\
\xff\x8b\xb6\xc9\xff\x92\xb4\xc1\xff\x88\xa0\xa6\xff\xc5\xd4\xd7\
\xff\xc4\xcc\xcc\xff\xc5\xc7\xc7\xff\xc2\xc4\xc4\xff\xae\xb1\xaf\
\xff\xe6\xeb\xe9\xff\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\xff\xff\x00\x01\x80\xff\x00\x02\xff\xff\x00\x01\x00\xaa\xaa\
\x03\x00\xff\xff\x02\x00\x00\x00\x02\x00\x00\x00\x0b\xb1\xad\xac\
\xff\xc0\xbe\xbd\xff\xc1\xc3\xc3\xff\xc6\xd0\xd0\xff\xbe\xd1\xd6\
\xff\x90\xad\xb6\xff\x93\xba\xc8\xff\x8d\xbd\xcf\xff\x8b\xc0\xdb\
\xff\x82\xbb\xdb\xff\x68\xa3\xcb\xff\x55\x8d\xbc\xff\x5a\x8e\xc3\
\xff\x5a\x8c\xc6\xff\x5e\x8d\xcb\xff\x59\x87\xc7\xff\x5d\x8a\xcd\
\xff\x67\x94\xd8\xff\x68\x97\xdb\xff\x6b\x9c\xe0\xff\x70\xa1\xe5\
\xff\x6e\xa0\xe8\xff\x68\x9b\xe4\xff\x64\x96\xe2\xff\x61\x95\xe1\
\xff\x5d\x91\xdd\xff\x60\x97\xe2\xff\x67\x9e\xe9\xff\x69\xa0\xeb\
\xff\x65\x9c\xe7\xff\x62\x99\xe4\xff\x61\x96\xdf\xff\x61\x96\xdf\
\xff\x63\x96\xdf\xff\x60\x92\xda\xff\x5c\x8f\xd5\xff\x5a\x8b\xd1\
\xff\x56\x87\xcb\xff\x57\x84\xc8\xff\x59\x84\xc7\xff\x5d\x88\xcb\
\xff\x5b\x89\xc9\xff\x5b\x8b\xc5\xff\x58\x8d\xc0\xff\x51\x86\xb1\
\xff\x74\xa8\xcc\xff\x8b\xbc\xd6\xff\x8b\xb6\xc7\xff\x8e\xb1\xbb\
\xff\x87\x9f\xa5\xff\xc3\xd2\xd5\xff\xc3\xcb\xcb\xff\xc4\xc6\xc6\
\xff\xc0\xc2\xc2\xff\xb0\xb3\xb1\xff\xe8\xed\xeb\xff\x00\x80\x80\
\x04\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\x80\xff\x00\x02\x80\xff\x00\
\x02\xff\xff\x00\x01\x00\xaa\xaa\x03\x00\xff\xff\x02\x00\x00\x00\
\x02\x00\x00\x00\x0c\xb1\xad\xac\xff\xbf\xbd\xbc\xff\xc3\xc5\xc5\
\xff\xc8\xd0\xd0\xff\xc1\xd1\xd7\xff\x97\xb3\xba\xff\x92\xb8\xc4\
\xff\x8e\xbe\xd0\xff\x8d\xc2\xdd\xff\x88\xc0\xe3\xff\x78\xb3\xdb\
\xff\x54\x8c\xbb\xff\x5d\x91\xc6\xff\x5b\x8d\xc7\xff\x5b\x8c\xca\
\xff\x5b\x8b\xcd\xff\x5d\x8c\xd0\xff\x67\x96\xda\xff\x65\x96\xda\
\xff\x67\x9b\xde\xff\x6a\x9d\xe3\xff\x68\x9c\xe3\xff\x63\x97\xe3\
\xff\x5f\x95\xe2\xff\x5f\x95\xe2\xff\x58\x90\xdd\xff\x5d\x95\xe2\
\xff\x61\x9b\xe7\xff\x64\x9c\xe9\xff\x63\x9b\xe8\xff\x61\x99\xe6\
\xff\x60\x99\xe4\xff\x62\x98\xe5\xff\x63\x97\xe3\xff\x62\x95\xde\
\xff\x5f\x93\xda\xff\x5e\x90\xd8\xff\x5a\x8d\xd3\xff\x5a\x88\xcf\
\xff\x5a\x87\xcb\xff\x5c\x89\xcc\xff\x5f\x8e\xcc\xff\x5e\x8e\xc8\
\xff\x58\x8b\xbd\xff\x55\x88\xb3\xff\x73\xa6\xc7\xff\x8d\xbd\xd5\
\xff\x8a\xb5\xc6\xff\x8b\xab\xb6\xff\x83\x9b\xa1\xff\xc3\xd0\xd2\
\xff\xc4\xcc\xcc\xff\xc4\xc6\xc6\xff\xbf\xbf\xbf\xff\xb0\xb3\xb1\
\xff\x9d\xb5\xab\xb1\x00\x66\x66\x05\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\x80\xff\x00\x02\x80\xff\x00\x02\xff\xff\x00\x01\x00\xaa\xaa\
\x03\x00\xff\xff\x02\x80\xff\x00\x02\x15\x2b\x00\x0c\xb2\xae\xad\
\xff\xbf\xbd\xbc\xff\xc6\xc6\xc6\xff\xc8\xd0\xd0\xff\xc1\xd1\xd7\
\xff\x9c\xb8\xbf\xff\x8e\xb4\xc0\xff\x8e\xbc\xcd\xff\x8f\xc2\xdc\
\xff\x90\xc6\xe7\xff\x82\xba\xe3\xff\x59\x8f\xbe\xff\x60\x94\xc9\
\xff\x5d\x8f\xcb\xff\x5c\x8c\xcc\xff\x5f\x8f\xd1\xff\x5e\x8d\xd1\
\xff\x66\x97\xdd\xff\x62\x95\xdb\xff\x62\x96\xdc\xff\x65\x99\xe0\
\xff\x67\x9b\xe2\xff\x63\x97\xe3\xff\x60\x97\xe2\xff\x60\x98\xe5\
\xff\x5c\x96\xe2\xff\x5f\x99\xe5\xff\x5e\x99\xe8\xff\x61\x9a\xe9\
\xff\x60\x99\xe8\xff\x5f\x98\xe7\xff\x5e\x98\xe4\xff\x61\x99\xe8\
\xff\x60\x98\xe5\xff\x60\x96\xe3\xff\x62\x96\xe2\xff\x64\x96\xe2\
\xff\x63\x94\xde\xff\x5f\x8f\xd7\xff\x5d\x8b\xd2\xff\x5f\x8c\xcf\
\xff\x62\x91\xcf\xff\x5e\x8f\xc7\xff\x5b\x8c\xbc\xff\x57\x8b\xb4\
\xff\x6e\xa1\xc1\xff\x8c\xbd\xd3\xff\x8a\xb3\xc2\xff\x89\xa8\xb1\
\xff\x84\x9a\x9f\xff\xc2\xcf\xd1\xff\xc8\xcd\xce\xff\xc6\xc6\xc6\
\xff\xbb\xbb\xbb\xff\xb0\xb2\xb2\xff\x00\x24\x24\x0e\x00\xaa\xaa\
\x03\x00\xff\xff\x02\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\xff\xff\x00\x01\x80\xff\x00\
\x02\xff\xff\x00\x01\x00\x80\x80\x04\x00\xaa\xaa\x03\xff\xff\x00\
\x01\x19\x33\x00\x0a\xb5\xb1\xb0\xff\xbe\xbc\xbb\xff\xc8\xc8\xc8\
\xff\xc9\xce\xcf\xff\xc2\xd0\xd6\xff\xa1\xbb\xc2\xff\x8a\xad\xba\
\xff\x8b\xb9\xca\xff\x8f\xc1\xd8\xff\x92\xc8\xe7\xff\x7e\xb2\xdb\
\xff\x5c\x90\xbf\xff\x62\x94\xc9\xff\x60\x8f\xcc\xff\x5c\x8a\xca\
\xff\x62\x92\xd4\xff\x61\x90\xd4\xff\x65\x96\xdc\xff\x60\x93\xd9\
\xff\x5f\x93\xd9\xff\x62\x96\xdc\xff\x63\x99\xe0\xff\x62\x99\xe2\
\xff\x63\x9c\xe7\xff\x65\x9e\xe9\xff\x63\x9d\xe9\xff\x64\x9e\xea\
\xff\x62\x9d\xec\xff\x60\x9b\xea\xff\x5e\x99\xe8\xff\x5f\x98\xe7\
\xff\x5f\x98\xe7\xff\x60\x99\xe8\xff\x60\x98\xe7\xff\x61\x97\xe4\
\xff\x64\x98\xe5\xff\x68\x9a\xe6\xff\x66\x99\xe2\xff\x62\x94\xdc\
\xff\x5e\x8f\xd5\xff\x5f\x8c\xcf\xff\x63\x90\xcd\xff\x5f\x8e\xc4\
\xff\x5a\x8a\xba\xff\x58\x8b\xb3\xff\x68\x9a\xb8\xff\x8b\xba\xcf\
\xff\x88\xb2\xbf\xff\x85\xa4\xad\xff\x87\x9b\xa0\xff\xc2\xce\xd0\
\xff\xc7\xcc\xcd\xff\xc5\xc5\xc5\xff\xbc\xba\xba\xff\xb5\xb5\xb5\
\xff\x00\x40\x40\x08\x00\xff\xff\x02\x00\xff\xff\x02\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x80\xff\x00\x02\x80\xff\x00\x02\x00\x80\x80\
\x04\x00\xff\xff\x02\xff\xff\x00\x01\x24\x49\x00\x07\xb8\xb4\xb3\
\xff\xbe\xbc\xbb\xff\xc9\xca\xc8\xff\xc8\xce\xcd\xff\xc3\xcf\xd3\
\xff\xa5\xbd\xc3\xff\x84\xa8\xb2\xff\x89\xb6\xc4\xff\x8d\xbe\xd4\
\xff\x90\xc2\xe0\xff\x6e\x9f\xc7\xff\x5a\x8b\xb9\xff\x5e\x8d\xc3\
\xff\x5f\x8d\xc7\xff\x59\x85\xc4\xff\x62\x8f\xd2\xff\x5f\x8e\xd2\
\xff\x60\x91\xd5\xff\x5b\x8f\xd2\xff\x5b\x90\xd3\xff\x5f\x94\xd7\
\xff\x63\x9a\xdf\xff\x64\x9c\xe3\xff\x65\x9f\xe7\xff\x68\xa1\xec\
\xff\x69\xa3\xee\xff\x69\xa3\xee\xff\x68\xa2\xee\xff\x67\xa1\xed\
\xff\x64\x9e\xea\xff\x63\x9b\xe8\xff\x61\x99\xe6\xff\x60\x98\xe7\
\xff\x61\x96\xe6\xff\x5f\x94\xe4\xff\x61\x95\xe2\xff\x66\x98\xe4\
\xff\x66\x99\xe2\xff\x60\x92\xda\xff\x5c\x8d\xd3\xff\x5d\x8b\xcb\
\xff\x5e\x8b\xc8\xff\x5c\x8b\xbf\xff\x58\x86\xb5\xff\x57\x88\xae\
\xff\x62\x91\xb0\xff\x88\xb5\xca\xff\x87\xae\xbc\xff\x85\xa2\xa9\
\xff\x8d\xa2\xa4\xff\xc2\xce\xce\xff\xc6\xcb\xca\xff\xc5\xc3\xc3\
\xff\xbc\xba\xba\xff\xba\xba\xba\xff\x00\x00\x00\x01\x80\x00\xff\
\x02\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x80\xff\x00\
\x02\x55\xaa\x00\x03\x00\x66\x66\x05\x00\xff\xff\x02\xff\xff\x00\
\x01\x2a\x55\x00\x06\xb8\xb6\xb5\xff\xbd\xbb\xba\xff\xcb\xc9\xc8\
\xff\xc8\xcd\xcc\xff\xc2\xce\xd2\xff\xa7\xbd\xc3\xff\x82\xa5\xaf\
\xff\x88\xb3\xc2\xff\x8f\xbb\xd2\xff\x8a\xba\xd6\xff\x60\x8d\xb2\
\xff\x59\x86\xb2\xff\x5a\x86\xbb\xff\x5d\x88\xc1\xff\x54\x80\xbd\
\xff\x60\x8c\xcc\xff\x5d\x8a\xcd\xff\x5c\x8c\xce\xff\x59\x8b\xcd\
\xff\x59\x8d\xcf\xff\x5e\x94\xd5\xff\x63\x9a\xdd\xff\x64\x9d\xe2\
\xff\x67\x9f\xe6\xff\x69\xa0\xe9\xff\x6e\xa5\xee\xff\x6d\xa7\xef\
\xff\x6d\xa7\xef\xff\x6e\xa5\xf0\xff\x6b\xa2\xed\xff\x67\x9e\xe9\
\xff\x64\x9a\xe7\xff\x62\x98\xe5\xff\x62\x96\xe3\xff\x5f\x93\xe0\
\xff\x60\x92\xde\xff\x63\x94\xde\xff\x62\x94\xdc\xff\x5e\x8e\xd6\
\xff\x5b\x8a\xce\xff\x5c\x88\xc8\xff\x5a\x86\xc1\xff\x59\x85\xba\
\xff\x56\x83\xaf\xff\x56\x85\xab\xff\x5d\x8b\xaa\xff\x86\xb1\xc6\
\xff\x87\xac\xba\xff\x84\xa1\xa8\xff\x92\xa5\xa8\xff\xc4\xce\xce\
\xff\xc4\xc9\xc8\xff\xc3\xc1\xc1\xff\xbd\xbb\xbb\xff\xc2\xc0\xc0\
\xff\x00\x00\x00\x01\x55\x00\xaa\x03\xff\x00\xff\x01\xff\xff\xff\
\x00\x00\x00\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x03\xff\xff\x00\x01\x2a\x55\x00\x06\x00\xaa\xaa\
\x03\x00\x80\x80\x04\x00\xff\x80\x02\x80\xff\x00\x02\xc4\xc2\xc1\
\xff\xbe\xbc\xbb\xff\xc7\xc5\xc4\xff\xc6\xcb\xca\xff\xc7\xd3\xd5\
\xff\xae\xc2\xc7\xff\x7d\x9c\xa5\xff\x8a\xb1\xbf\xff\x8b\xb4\xca\
\xff\x78\xa5\xc0\xff\x5f\x89\xac\xff\x56\x80\xaa\xff\x5c\x85\xb6\
\xff\x58\x7f\xb6\xff\x56\x7e\xb9\xff\x5a\x83\xc2\xff\x57\x81\xc2\
\xff\x5d\x89\xc9\xff\x5a\x88\xc8\xff\x5a\x8a\xca\xff\x5d\x90\xcf\
\xff\x63\x98\xd7\xff\x68\x9e\xdf\xff\x6d\xa2\xe5\xff\x70\xa3\xe9\
\xff\x70\xa4\xea\xff\x6d\xa3\xea\xff\x6a\xa2\xe9\xff\x69\x9e\xe7\
\xff\x66\x9b\xe4\xff\x63\x98\xe1\xff\x61\x95\xe1\xff\x5f\x93\xdf\
\xff\x5e\x90\xdc\xff\x61\x92\xdc\xff\x61\x92\xdc\xff\x64\x94\xdc\
\xff\x5f\x8f\xd7\xff\x5c\x8a\xd1\xff\x58\x85\xc8\xff\x57\x82\xc1\
\xff\x55\x7d\xb8\xff\x57\x81\xb6\xff\x4f\x79\xa6\xff\x54\x81\xa7\
\xff\x4e\x7c\x9b\xff\x86\xb1\xc6\xff\x86\xac\xb8\xff\x80\x9c\xa3\
\xff\x91\xa4\xa7\xff\xc2\xcc\xcc\xff\xc2\xc4\xc4\xff\xbf\xbd\xbd\
\xff\xaf\xad\xad\xff\xe1\xdf\xdf\xff\x00\x00\x00\x08\x80\x00\xff\
\x02\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x01\xff\xff\xff\x00\x00\x00\x00\x03\xff\xff\x00\
\x01\x33\x66\x00\x05\x00\xff\xff\x02\x00\x80\x80\x04\x00\xff\x80\
\x02\xff\xff\x00\x01\xce\xcc\xcb\xff\xbe\xba\xb9\xff\xc4\xc2\xc1\
\xff\xc3\xc8\xc7\xff\xc4\xd0\xd2\xff\xaf\xc2\xc7\xff\x7f\x9c\xa5\
\xff\x8b\xae\xbb\xff\x8a\xb2\xc5\xff\x71\x9a\xb3\xff\x5b\x84\xa5\
\xff\x54\x7d\xa4\xff\x5a\x80\xb0\xff\x56\x7c\xb2\xff\x56\x7b\xb7\
\xff\x59\x80\xbe\xff\x57\x80\xbf\xff\x5c\x86\xc7\xff\x5c\x88\xc7\
\xff\x5b\x8a\xc8\xff\x5d\x8e\xcc\xff\x60\x93\xd1\xff\x65\x98\xd7\
\xff\x69\x9b\xdd\xff\x6b\x9d\xdf\xff\x69\x9a\xde\xff\x66\x9b\xde\
\xff\x65\x99\xdf\xff\x64\x97\xdd\xff\x62\x94\xdc\xff\x60\x92\xda\
\xff\x5e\x90\xd8\xff\x5d\x8e\xd8\xff\x5d\x8f\xd7\xff\x60\x90\xd8\
\xff\x63\x90\xd9\xff\x61\x8e\xd7\xff\x5f\x8b\xd2\xff\x59\x86\xca\
\xff\x55\x80\xc3\xff\x54\x7d\xbc\xff\x52\x7a\xb5\xff\x54\x7c\xb1\
\xff\x4c\x76\xa3\xff\x50\x7d\xa3\xff\x4b\x79\x98\xff\x78\xa3\xb8\
\xff\x87\xad\xb9\xff\x7e\x9a\xa1\xff\x8d\xa0\xa3\xff\xc1\xc9\xc9\
\xff\xbe\xc0\xc0\xff\xbc\xba\xba\xff\xb1\xaf\xaf\xff\xea\xe8\xe8\
\xff\x00\x00\x00\x03\xff\x00\xff\x01\xff\x00\xff\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x02\xff\xff\x00\x01\x33\x66\x00\x05\x00\xff\xff\
\x02\x00\x80\x80\x04\x00\xff\x55\x03\xff\xff\x00\x03\xe0\xdf\xdb\
\xff\xba\xb7\xb3\xff\xc0\xbf\xbb\xff\xc3\xc6\xc4\xff\xc2\xcb\xce\
\xff\xae\xc1\xc6\xff\x80\x9c\xa3\xff\x88\xaa\xb7\xff\x8a\xaf\xc3\
\xff\x64\x8c\xa5\xff\x55\x7e\x9f\xff\x53\x7a\xa1\xff\x59\x7d\xad\
\xff\x56\x7b\xaf\xff\x54\x7a\xb4\xff\x58\x7f\xbd\xff\x59\x82\xc1\
\xff\x5c\x86\xc7\xff\x5c\x88\xc7\xff\x5b\x8a\xc8\xff\x5c\x8b\xc9\
\xff\x5b\x8c\xca\xff\x5d\x8d\xcd\xff\x60\x90\xd0\xff\x62\x92\xd2\
\xff\x5f\x91\xd3\xff\x60\x92\xd4\xff\x5e\x92\xd5\xff\x60\x91\xd5\
\xff\x60\x91\xd7\xff\x5f\x90\xd6\xff\x5d\x8e\xd4\xff\x5d\x8d\xd5\
\xff\x5e\x8f\xd5\xff\x60\x8e\xd5\xff\x60\x8e\xd5\xff\x60\x8c\xd3\
\xff\x5e\x88\xcd\xff\x58\x82\xc7\xff\x54\x7e\xc1\xff\x52\x7b\xba\
\xff\x51\x79\xb4\xff\x50\x78\xad\xff\x4a\x74\xa3\xff\x4c\x7a\xa3\
\xff\x4a\x78\x97\xff\x66\x91\xa6\xff\x89\xae\xbc\xff\x7e\x99\xa3\
\xff\x87\x9a\x9f\xff\xbc\xc4\xc4\xff\xba\xbc\xbc\xff\xba\xb8\xb8\
\xff\xb5\xb3\xb3\xff\x46\x00\x2e\x0b\xff\x00\xff\x01\x80\x00\xff\
\x02\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x02\xff\xff\x00\
\x01\x40\x80\x00\x04\x00\xff\xff\x02\x00\x80\x80\x04\x00\x99\x33\
\x05\xff\xff\x00\x03\xa0\x99\x7e\x91\xb9\xb6\xb2\xff\xbc\xbb\xb7\
\xff\xc1\xc4\xc2\xff\xc0\xc9\xcc\xff\xac\xbc\xc2\xff\x82\x9c\xa3\
\xff\x87\xa7\xb4\xff\x8b\xaf\xc1\xff\x5a\x82\x9b\xff\x54\x7a\x9c\
\xff\x53\x7a\xa1\xff\x58\x7c\xac\xff\x58\x7c\xb2\xff\x57\x7d\xb7\
\xff\x5a\x81\xbf\xff\x5d\x85\xc6\xff\x5e\x88\xc9\xff\x5e\x8a\xca\
\xff\x5f\x8b\xcb\xff\x5b\x89\xc9\xff\x59\x87\xc7\xff\x56\x86\xc6\
\xff\x58\x88\xc8\xff\x5a\x8a\xca\xff\x5b\x8b\xcd\xff\x5c\x8c\xce\
\xff\x5b\x8d\xcf\xff\x5f\x8e\xd2\xff\x61\x90\xd4\xff\x5e\x8f\xd5\
\xff\x5d\x8e\xd4\xff\x5f\x8d\xd4\xff\x61\x8f\xd6\xff\x60\x8e\xd5\
\xff\x60\x8e\xd5\xff\x5f\x8c\xd0\xff\x5e\x88\xcd\xff\x5a\x83\xc8\
\xff\x56\x80\xc3\xff\x57\x7f\xc0\xff\x55\x7c\xba\xff\x52\x79\xb0\
\xff\x4d\x76\xa7\xff\x4d\x7b\xa4\xff\x4a\x77\x98\xff\x56\x80\x97\
\xff\x89\xad\xbd\xff\x7f\x9a\xa4\xff\x81\x94\x99\xff\xb7\xc0\xc3\
\xff\xb8\xba\xba\xff\xb7\xb5\xb5\xff\xbc\xba\xba\xff\x00\x00\x00\
\x01\x80\x00\xff\x02\x55\x00\xaa\x03\xff\x00\xff\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x01\x80\xff\x00\x02\x55\xaa\x00\x03\x00\xff\xff\
\x02\x00\xaa\xaa\x03\x00\x99\x33\x05\xff\xff\x00\x01\xb6\x92\x00\
\x07\xbf\xbc\xb8\xff\xb8\xb8\xb2\xff\xc3\xc7\xc2\xff\xc1\xc9\xc9\
\xff\xa7\xb8\xbb\xff\x81\x9b\xa2\xff\x86\xa6\xb3\xff\x87\xaa\xbe\
\xff\x53\x7a\x96\xff\x53\x7b\x9e\xff\x53\x7b\xa5\xff\x56\x7c\xac\
\xff\x58\x7e\xb4\xff\x59\x80\xbe\xff\x5a\x82\xc3\xff\x60\x87\xcc\
\xff\x61\x8a\xcf\xff\x61\x8b\xd0\xff\x5f\x8c\xcf\xff\x5c\x89\xcc\
\xff\x59\x86\xc9\xff\x57\x84\xc7\xff\x58\x85\xc8\xff\x59\x86\xc9\
\xff\x58\x88\xca\xff\x59\x89\xcb\xff\x5b\x8b\xcd\xff\x5c\x8d\xd1\
\xff\x5d\x8e\xd4\xff\x5d\x8e\xd4\xff\x5d\x8e\xd4\xff\x5c\x8c\xd4\
\xff\x5f\x8f\xd7\xff\x60\x8e\xd5\xff\x5f\x8d\xd4\xff\x5f\x8b\xd2\
\xff\x5d\x8a\xce\xff\x5c\x86\xcb\xff\x5a\x84\xc7\xff\x5a\x82\xc3\
\xff\x5c\x83\xc1\xff\x55\x7b\xb5\xff\x52\x7a\xae\xff\x51\x7c\xa7\
\xff\x4d\x7a\x9c\xff\x4c\x75\x8e\xff\x81\xa5\xb7\xff\x81\x9d\xa8\
\xff\x7f\x92\x97\xff\xb0\xb9\xbc\xff\xb9\xbe\xbd\xff\xb5\xb5\xb5\
\xff\xc8\xc6\xc5\xff\xff\xff\x00\x01\x00\xaa\xaa\x03\x00\x80\x80\
\x04\x00\xaa\xaa\x03\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x80\xff\x00\
\x02\x55\xaa\x00\x03\x00\xaa\xaa\x03\x00\xaa\xaa\x03\x00\xbf\x40\
\x04\x80\xff\x00\x02\xff\xff\x00\x04\xcf\xcc\xc8\xff\xb5\xb5\xaf\
\xff\xc1\xc5\xc0\xff\xbf\xc7\xc7\xff\xa1\xb0\xb3\xff\x83\x9a\xa2\
\xff\x86\xa6\xb3\xff\x7a\x9d\xb1\xff\x50\x77\x93\xff\x55\x7c\xa2\
\xff\x53\x7d\xa8\xff\x54\x7c\xad\xff\x59\x7f\xb9\xff\x5a\x83\xc2\
\xff\x5b\x85\xc8\xff\x61\x89\xd1\xff\x64\x8e\xd5\xff\x61\x8d\xd4\
\xff\x61\x8e\xd2\xff\x5f\x8c\xd0\xff\x5d\x8a\xce\xff\x5c\x89\xcd\
\xff\x5c\x89\xcd\xff\x5c\x89\xcd\xff\x5d\x8c\xd0\xff\x5e\x8d\xd1\
\xff\x5f\x90\xd4\xff\x61\x92\xd8\xff\x61\x92\xd8\xff\x61\x92\xd8\
\xff\x60\x90\xd8\xff\x5f\x8f\xd7\xff\x5e\x8e\xd6\xff\x5e\x8e\xd6\
\xff\x5e\x8c\xd3\xff\x5c\x8a\xd1\xff\x5c\x88\xcf\xff\x5a\x86\xcd\
\xff\x5a\x84\xc9\xff\x5c\x84\xc5\xff\x5e\x84\xc4\xff\x58\x7d\xb9\
\xff\x55\x7d\xb2\xff\x53\x7d\xaa\xff\x4e\x7a\x9f\xff\x48\x70\x8c\
\xff\x73\x97\xa9\xff\x83\x9f\xaa\xff\x7f\x92\x99\xff\xa4\xb0\xb2\
\xff\xba\xbf\xbe\xff\xb4\xb4\xb4\xff\xd7\xd8\xd6\xff\x00\xff\x80\
\x02\x00\x99\x33\x05\x00\xff\xff\x02\x00\xff\xff\x02\x00\x00\x00\
\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\
\x01\xff\xff\xff\x00\xff\xff\x00\x01\x80\xff\x00\x02\x00\x66\x66\
\x05\x00\xff\xff\x02\x00\xff\x55\x03\x40\x80\x00\x04\xff\xff\x00\
\x04\xe3\xe0\xdc\xff\xb2\xb1\xad\xff\xbc\xbf\xbd\xff\xbb\xc3\xc3\
\xff\x9b\xaa\xad\xff\x84\x9b\xa3\xff\x86\xa3\xb1\xff\x67\x8c\xa2\
\xff\x51\x79\x96\xff\x58\x81\xa8\xff\x55\x7f\xae\xff\x52\x7c\xb1\
\xff\x59\x82\xc0\xff\x5d\x87\xca\xff\x5d\x87\xce\xff\x60\x8a\xd5\
\xff\x60\x8d\xd7\xff\x60\x8d\xd7\xff\x60\x8d\xd6\xff\x60\x8d\xd6\
\xff\x60\x8e\xd5\xff\x60\x8e\xd5\xff\x60\x8e\xd5\xff\x5f\x8d\xd4\
\xff\x64\x92\xd9\xff\x64\x92\xd9\xff\x65\x96\xdc\xff\x66\x97\xdd\
\xff\x64\x96\xde\xff\x63\x95\xdd\xff\x61\x93\xdb\xff\x60\x91\xdb\
\xff\x5d\x8e\xd8\xff\x5c\x8e\xd6\xff\x5c\x8c\xd4\xff\x5b\x88\xd1\
\xff\x59\x87\xce\xff\x57\x85\xcc\xff\x58\x85\xc9\xff\x5a\x83\xc8\
\xff\x5c\x84\xc5\xff\x57\x7e\xbc\xff\x55\x7e\xb5\xff\x54\x7e\xad\
\xff\x50\x7b\xa2\xff\x4b\x73\x8f\xff\x64\x87\x9b\xff\x86\xa1\xaf\
\xff\x86\x99\xa0\xff\x9a\xa6\xa8\xff\xb7\xbd\xbc\xff\xb0\xb2\xb2\
\xff\xe8\xe9\xe7\xff\x00\xff\xff\x02\x00\x55\x55\x06\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\
\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\x80\xff\x00\x02\x00\x49\x49\x07\x00\xff\xff\x02\x00\xff\xff\
\x02\x33\x66\x00\x05\xff\xcc\x00\x05\xb5\xb0\x9d\xb1\xb2\xb1\xad\
\xff\xb8\xbb\xb9\xff\xb9\xc1\xc1\xff\x96\xa4\xaa\xff\x85\x9b\xa6\
\xff\x83\xa2\xb1\xff\x5a\x7e\x96\xff\x52\x7b\x9b\xff\x59\x85\xae\
\xff\x56\x81\xb2\xff\x52\x7d\xb6\xff\x5b\x86\xc5\xff\x60\x8a\xcf\
\xff\x5d\x88\xd1\xff\x60\x8d\xd7\xff\x5f\x8d\xda\xff\x5f\x8e\xd8\
\xff\x5e\x8d\xd7\xff\x5f\x8f\xd7\xff\x62\x92\xda\xff\x65\x93\xda\
\xff\x62\x93\xd9\xff\x62\x90\xd7\xff\x64\x95\xdb\xff\x65\x96\xdc\
\xff\x66\x97\xdd\xff\x65\x97\xdf\xff\x64\x96\xde\xff\x62\x94\xdc\
\xff\x60\x92\xda\xff\x5e\x8f\xd9\xff\x60\x91\xdb\xff\x5d\x8f\xd7\
\xff\x5a\x8d\xd3\xff\x59\x89\xd1\xff\x56\x86\xce\xff\x57\x84\xcd\
\xff\x55\x83\xca\xff\x59\x83\xca\xff\x5b\x85\xc8\xff\x56\x7f\xbe\
\xff\x56\x7e\xb8\xff\x55\x7d\xb1\xff\x52\x7c\xa6\xff\x4f\x75\x95\
\xff\x5c\x7e\x95\xff\x89\xa4\xb2\xff\x8a\x9d\xa5\xff\x92\x9f\xa1\
\xff\xb2\xba\xb9\xff\xad\xb2\xb0\xff\x00\x37\x12\x0e\x00\xaa\xaa\
\x03\x00\x55\x55\x06\x00\xff\xff\x02\x80\x00\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\x80\xff\x00\x02\x80\xff\x00\x02\x80\xff\x00\x02\x00\x00\x00\
\x01\x00\x00\x00\x01\x00\x00\x00\x01\x80\xff\x00\x02\xff\xff\x00\
\x04\xdb\xdb\x00\x07\xa3\xa7\xa1\xff\xb7\xbe\xbb\xff\xb8\xc4\xc4\
\xff\x83\x95\x9c\xff\x86\x9e\xaa\xff\x75\x95\xa8\xff\x52\x77\x93\
\xff\x54\x7e\xa1\xff\x53\x80\xab\xff\x52\x82\xb2\xff\x55\x82\xbb\
\xff\x59\x85\xc4\xff\x5f\x8c\xd0\xff\x63\x90\xd9\xff\x65\x92\xdc\
\xff\x62\x90\xdd\xff\x5d\x8e\xd8\xff\x5c\x8d\xd7\xff\x5c\x8e\xd6\
\xff\x5d\x8f\xd7\xff\x60\x91\xd7\xff\x5e\x91\xd7\xff\x60\x91\xd7\
\xff\x60\x93\xd9\xff\x62\x94\xdc\xff\x65\x97\xdf\xff\x66\x97\xe1\
\xff\x65\x96\xe0\xff\x62\x94\xdc\xff\x60\x92\xda\xff\x5f\x91\xd9\
\xff\x5c\x8f\xd5\xff\x59\x8d\xd0\xff\x59\x8d\xd0\xff\x5b\x8e\xd4\
\xff\x5b\x8d\xd5\xff\x59\x88\xd2\xff\x58\x87\xd1\xff\x5c\x89\xd3\
\xff\x5c\x86\xcd\xff\x5e\x88\xcb\xff\x58\x81\xbf\xff\x4f\x78\xaf\
\xff\x55\x7e\xab\xff\x50\x74\x98\xff\x54\x76\x8e\xff\x82\x9f\xae\
\xff\x86\x9d\xa5\xff\x8b\x9d\x9e\xff\xb3\xc0\xbe\xff\xab\xb2\xaf\
\xff\x00\x6d\x24\x07\x00\xff\xff\x02\x00\x00\x00\x04\x80\x00\xff\
\x02\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\x80\xff\x00\x02\x80\xff\x00\
\x02\x80\xff\x00\x02\x80\x00\xff\x02\x80\x00\xff\x02\x00\x00\x00\
\x01\x80\xff\x00\x02\xff\xff\x00\x04\xaa\xff\x00\x06\xa4\xab\xa4\
\xff\xb5\xbe\xbb\xff\xb5\xc2\xc4\xff\x7e\x91\x99\xff\x89\xa2\xb2\
\xff\x63\x83\x9a\xff\x50\x76\x96\xff\x52\x7d\xa4\xff\x52\x80\xaf\
\xff\x53\x83\xb7\xff\x56\x84\xbe\xff\x59\x87\xc7\xff\x5f\x8d\xd4\
\xff\x64\x94\xdc\xff\x66\x95\xdf\xff\x61\x92\xdc\xff\x5e\x91\xda\
\xff\x5c\x90\xd7\xff\x5c\x8e\xd6\xff\x5d\x8f\xd7\xff\x5d\x8f\xd7\
\xff\x5c\x8f\xd5\xff\x5c\x8e\xd6\xff\x5c\x8e\xd6\xff\x5f\x91\xd9\
\xff\x61\x93\xdb\xff\x62\x93\xdd\xff\x61\x92\xdc\xff\x61\x92\xdc\
\xff\x5f\x91\xd9\xff\x5e\x91\xd7\xff\x5d\x91\xd4\xff\x5a\x8e\xd1\
\xff\x5a\x8e\xd0\xff\x5d\x91\xd4\xff\x5e\x91\xd7\xff\x5c\x8d\xd7\
\xff\x5b\x8c\xd6\xff\x5f\x8e\xd8\xff\x5d\x8a\xd3\xff\x60\x8a\xcf\
\xff\x58\x83\xc2\xff\x50\x78\xb2\xff\x56\x7e\xaf\xff\x50\x75\x9b\
\xff\x53\x73\x90\xff\x7d\x99\xaa\xff\x84\x9e\xa5\xff\x87\x9b\x9c\
\xff\xb4\xc2\xc0\xff\xa9\xb2\xaf\xff\x00\xb6\x6d\x07\x00\xff\xff\
\x02\x00\x00\x00\x02\xff\x00\xff\x02\xff\x00\xff\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\xff\x80\x00\x02\x80\xff\x00\x02\x80\xff\x00\x02\x80\x00\xff\
\x02\x80\x00\xff\x02\x00\x00\x00\x01\x80\xff\x00\x02\xff\xff\x00\
\x04\xbf\xff\x00\x04\xa7\xad\xa8\xff\xb2\xbd\xba\xff\xad\xbc\xbf\
\xff\x7c\x90\x9b\xff\x88\xa2\xb3\xff\x53\x74\x8e\xff\x53\x79\x9c\
\xff\x53\x7e\xa9\xff\x52\x81\xb4\xff\x54\x85\xbd\xff\x56\x85\xc2\
\xff\x5b\x8b\xcd\xff\x60\x91\xd7\xff\x65\x97\xdf\xff\x68\x99\xe3\
\xff\x64\x95\xdf\xff\x61\x95\xdc\xff\x5e\x92\xd9\xff\x5d\x91\xd8\
\xff\x5c\x90\xd7\xff\x5b\x8f\xd6\xff\x59\x8d\xd4\xff\x59\x8a\xd4\
\xff\x5b\x8c\xd6\xff\x5c\x8d\xd7\xff\x5e\x8f\xd9\xff\x5e\x8f\xdb\
\xff\x5e\x8f\xdb\xff\x5d\x8e\xda\xff\x5e\x8f\xd9\xff\x5f\x91\xd9\
\xff\x5d\x91\xd4\xff\x5b\x8f\xd2\xff\x5c\x90\xd2\xff\x60\x94\xd7\
\xff\x63\x96\xdc\xff\x63\x95\xdd\xff\x63\x94\xde\xff\x64\x95\xdf\
\xff\x60\x8f\xd9\xff\x5f\x8d\xd4\xff\x5a\x86\xc6\xff\x53\x7d\xb8\
\xff\x58\x7f\xb3\xff\x50\x74\x9c\xff\x4f\x71\x8f\xff\x72\x90\xa3\
\xff\x85\x9e\xa8\xff\x85\x99\x9a\xff\xb7\xc5\xc3\xff\xaa\xb3\xb0\
\xff\x00\xd5\x80\x06\x00\x80\x80\x04\x00\x00\x00\x01\xff\x00\xff\
\x02\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\xff\x80\x00\x02\x80\xff\x00\
\x02\x80\xff\x00\x02\x80\x00\xff\x02\x80\x00\xff\x02\x00\x00\x00\
\x01\x00\x00\x00\x01\x80\xff\x00\x02\x2a\xd5\x00\x06\xa9\xb0\xab\
\xff\xae\xbb\xb9\xff\xa3\xb3\xb9\xff\x81\x97\xa3\xff\x79\x94\xa9\
\xff\x51\x73\x91\xff\x58\x7f\xa6\xff\x54\x7f\xb0\xff\x54\x83\xb9\
\xff\x54\x86\xc2\xff\x58\x88\xc8\xff\x5b\x8c\xd0\xff\x61\x94\xda\
\xff\x68\x9a\xe2\xff\x6a\x9c\xe4\xff\x66\x9a\xe1\xff\x64\x98\xde\
\xff\x62\x96\xdc\xff\x60\x94\xdb\xff\x60\x93\xdc\xff\x5d\x90\xd9\
\xff\x5b\x8e\xd7\xff\x5a\x8b\xd7\xff\x5c\x8d\xd7\xff\x5d\x8e\xd8\
\xff\x5e\x8f\xd9\xff\x5d\x8e\xda\xff\x5e\x8f\xdb\xff\x5d\x8e\xda\
\xff\x5f\x90\xda\xff\x61\x93\xdb\xff\x5e\x92\xd5\xff\x5e\x92\xd4\
\xff\x60\x94\xd6\xff\x64\x98\xdb\xff\x69\x9d\xe0\xff\x6a\x9c\xe4\
\xff\x69\x9b\xe3\xff\x69\x9a\xe6\xff\x63\x94\xde\xff\x5f\x8f\xd7\
\xff\x59\x89\xcb\xff\x55\x81\xbe\xff\x59\x81\xb6\xff\x51\x77\xa1\
\xff\x4b\x6f\x8d\xff\x67\x86\x9b\xff\x84\x9f\xa9\xff\x81\x96\x98\
\xff\xb5\xc6\xc3\xff\xa7\xb2\xaf\xff\x00\xb6\x6d\x07\x00\x49\x49\
\x07\xff\xff\xff\x00\xff\x00\xff\x02\xff\x00\xff\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\xff\x80\x00\x02\x80\xff\x00\x02\x80\xff\x00\x02\x80\x00\xff\
\x02\x80\x00\xff\x02\x80\x00\xff\x02\x00\x00\x00\x01\x80\xff\x00\
\x02\x00\x55\x33\x0f\xae\xb7\xb4\xff\xaf\xbd\xbc\xff\x98\xaa\xb1\
\xff\x87\x9e\xae\xff\x62\x7d\x97\xff\x59\x7c\x9e\xff\x5a\x81\xad\
\xff\x56\x82\xb7\xff\x55\x85\xbf\xff\x54\x87\xc6\xff\x57\x89\xcb\
\xff\x5d\x8e\xd4\xff\x63\x95\xdd\xff\x68\x9c\xe3\xff\x6b\x9f\xe6\
\xff\x6b\x9f\xe5\xff\x69\x9d\xe3\xff\x65\x9c\xe1\xff\x65\x99\xe0\
\xff\x64\x97\xe0\xff\x62\x94\xe0\xff\x5f\x91\xdd\xff\x5e\x8e\xdc\
\xff\x60\x91\xdd\xff\x60\x93\xdc\xff\x5f\x92\xdb\xff\x5d\x8f\xdb\
\xff\x5d\x8e\xdc\xff\x5e\x90\xdc\xff\x61\x94\xdd\xff\x62\x96\xdd\
\xff\x63\x98\xdb\xff\x64\x9a\xdb\xff\x66\x9c\xdd\xff\x6a\x9f\xe2\
\xff\x6f\xa4\xe7\xff\x71\xa5\xec\xff\x6f\xa3\xea\xff\x6c\x9e\xea\
\xff\x66\x98\xe4\xff\x5f\x90\xda\xff\x5b\x8c\xd2\xff\x58\x84\xc4\
\xff\x56\x82\xb8\xff\x4f\x79\xa4\xff\x4a\x70\x90\xff\x61\x81\x98\
\xff\x82\x9e\xa9\xff\x7c\x92\x97\xff\xaf\xc1\xc0\xff\xa8\xb3\xb0\
\xff\x00\x6a\x40\x0c\x00\x40\x40\x08\xff\xff\xff\x00\xff\x00\xaa\
\x03\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\xff\x80\x00\x02\x80\xff\x00\
\x02\x80\xff\x00\x02\x00\x00\x00\x01\x80\x00\xff\x02\x80\x00\xff\
\x02\x00\xaa\xff\x03\x00\xaa\xaa\x03\x5f\x8b\x79\x71\xb1\xb9\xb8\
\xff\xb2\xbf\xc1\xff\x91\xa3\xae\xff\x85\x9f\xb0\xff\x52\x70\x8d\
\xff\x5c\x80\xa6\xff\x58\x80\xb1\xff\x56\x83\xbc\xff\x55\x85\xc5\
\xff\x55\x86\xca\xff\x58\x89\xcf\xff\x5b\x8d\xd5\xff\x64\x98\xdf\
\xff\x6b\x9f\xe6\xff\x6f\xa3\xe9\xff\x6e\xa5\xea\xff\x6d\xa4\xe9\
\xff\x6b\xa2\xe7\xff\x6c\xa0\xe7\xff\x6a\x9c\xe8\xff\x68\x99\xe7\
\xff\x65\x96\xe4\xff\x64\x94\xe2\xff\x61\x92\xe0\xff\x61\x93\xdf\
\xff\x62\x95\xde\xff\x61\x92\xe0\xff\x61\x92\xe0\xff\x62\x94\xe0\
\xff\x65\x98\xe1\xff\x67\x9b\xe2\xff\x6e\xa3\xe6\xff\x6e\xa4\xe5\
\xff\x71\xa7\xe8\xff\x74\xaa\xeb\xff\x78\xad\xf0\xff\x7a\xae\xf4\
\xff\x76\xaa\xf1\xff\x6e\xa0\xec\xff\x68\x9a\xe6\xff\x5f\x91\xdd\
\xff\x5d\x8f\xd7\xff\x58\x88\xca\xff\x55\x82\xbb\xff\x51\x7b\xa8\
\xff\x4d\x73\x95\xff\x5e\x80\x98\xff\x82\x9f\xad\xff\x79\x91\x97\
\xff\xa6\xb8\xb7\xff\xa9\xb4\xb1\xff\x9d\xbf\xb0\xb1\x00\x99\x33\
\x05\xff\xff\xff\x00\xff\x00\xaa\x03\xff\x00\xff\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\xff\x80\x00\x02\x80\xff\x00\x02\x80\xff\x00\x02\x00\x00\x00\
\x01\x55\x00\xaa\x03\x00\xaa\xff\x03\x00\xaa\xff\x03\x00\xaa\xaa\
\x03\x9d\xba\xb5\xb1\xac\xb6\xb6\xff\xb2\xc1\xc4\xff\x8b\x9e\xab\
\xff\x7b\x96\xab\xff\x54\x73\x94\xff\x59\x7c\xa8\xff\x56\x7d\xb4\
\xff\x56\x82\xc1\xff\x55\x84\xc8\xff\x54\x86\xce\xff\x56\x87\xd1\
\xff\x5b\x8e\xd7\xff\x66\x9a\xe1\xff\x6d\xa3\xea\xff\x73\xaa\xef\
\xff\x74\xae\xf0\xff\x73\xad\xef\xff\x72\xa9\xec\xff\x72\xa6\xed\
\xff\x70\xa2\xee\xff\x6d\x9e\xee\xff\x6a\x9b\xeb\xff\x67\x98\xe8\
\xff\x63\x94\xe2\xff\x65\x97\xe3\xff\x64\x99\xe2\xff\x64\x98\xe5\
\xff\x65\x99\xe6\xff\x67\x9b\xe7\xff\x6a\x9f\xe8\xff\x6c\xa2\xe9\
\xff\x73\xaa\xef\xff\x76\xad\xf0\xff\x78\xb0\xf1\xff\x7a\xb2\xf3\
\xff\x7c\xb3\xf6\xff\x7d\xb4\xf9\xff\x75\xab\xf2\xff\x6b\x9f\xeb\
\xff\x68\x9c\xe9\xff\x5f\x93\xe0\xff\x5f\x92\xdb\xff\x59\x8b\xcd\
\xff\x57\x85\xbf\xff\x53\x7f\xae\xff\x50\x78\x9b\xff\x60\x83\x9d\
\xff\x7f\x9e\xad\xff\x74\x8e\x94\xff\x9b\xaf\xb0\xff\xa7\xb4\xb2\
\xff\xe4\xeb\xe8\xff\x00\xff\x55\x03\xff\x00\x00\x02\xff\x00\xaa\
\x03\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\xff\x80\x00\x02\x80\xff\x00\
\x02\x80\xff\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\xaa\xff\
\x03\x00\xaa\xff\x03\x00\xff\xcc\x05\xbb\xcf\xcb\xd0\xa5\xaf\xaf\
\xff\xaf\xc0\xc3\xff\x87\x9c\xab\xff\x70\x8c\xa4\xff\x5b\x7b\x9e\
\xff\x53\x77\xa5\xff\x57\x7f\xb9\xff\x57\x82\xc5\xff\x53\x83\xcb\
\xff\x53\x84\xce\xff\x55\x87\xd3\xff\x5c\x8f\xd8\xff\x66\x9b\xe4\
\xff\x71\xa8\xed\xff\x75\xaf\xf1\xff\x79\xb3\xf4\xff\x77\xb1\xf2\
\xff\x75\xaf\xf1\xff\x74\xaa\xf1\xff\x73\xa5\xf1\xff\x70\xa1\xf1\
\xff\x6c\x9c\xee\xff\x69\x9a\xea\xff\x64\x98\xe5\xff\x65\x99\xe5\
\xff\x67\x9c\xe5\xff\x69\x9d\xe9\xff\x6b\x9f\xec\xff\x6d\xa1\xed\
\xff\x71\xa5\xf1\xff\x73\xa8\xf1\xff\x76\xad\xf2\xff\x7a\xb1\xf4\
\xff\x7c\xb4\xf5\xff\x7e\xb6\xf7\xff\x80\xb7\xfa\xff\x7e\xb5\xfa\
\xff\x74\xaa\xf1\xff\x67\x9e\xe9\xff\x67\x9d\xea\xff\x60\x94\xe1\
\xff\x62\x95\xde\xff\x5d\x8e\xd2\xff\x57\x87\xc1\xff\x53\x81\xb1\
\xff\x52\x7c\xa1\xff\x61\x86\xa0\xff\x7f\x9d\xae\xff\x73\x8d\x94\
\xff\x96\xaa\xab\xff\xa7\xb5\xb3\xff\xdd\xe6\xe3\xff\x00\xff\x55\
\x03\x99\x33\x00\x05\xff\x00\x00\x02\xff\x00\xff\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\xff\x80\x00\x02\x80\xff\x00\x02\xff\xff\x00\x01\xff\xff\xff\
\x00\x00\x00\x00\x02\x00\x66\x66\x05\x00\xff\xff\x02\x00\xff\xff\
\x04\xe7\xed\xec\xff\xa6\xb2\xb2\xff\xb4\xc5\xc8\xff\x83\x98\xa7\
\xff\x67\x83\x9b\xff\x59\x79\x9d\xff\x56\x7c\xac\xff\x54\x7e\xb9\
\xff\x54\x81\xc4\xff\x54\x84\xcc\xff\x53\x84\xd0\xff\x56\x88\xd4\
\xff\x60\x93\xdc\xff\x6c\xa1\xea\xff\x77\xae\xf3\xff\x7d\xb4\xf7\
\xff\x7e\xb6\xf7\xff\x7a\xb4\xf5\xff\x77\xb1\xf2\xff\x75\xab\xf2\
\xff\x70\xa4\xf0\xff\x6d\xa0\xf0\xff\x6b\x9d\xef\xff\x6a\x9d\xed\
\xff\x6a\x9e\xea\xff\x6b\xa0\xe9\xff\x69\xa0\xe9\xff\x67\x9e\xe9\
\xff\x68\x9e\xeb\xff\x6d\xa4\xef\xff\x6e\xa5\xf0\xff\x6e\xa5\xee\
\xff\x75\xad\xf4\xff\x79\xb3\xf5\xff\x7d\xb7\xf8\xff\x7f\xb9\xfb\
\xff\x80\xba\xfc\xff\x7c\xb5\xfa\xff\x73\xab\xf2\xff\x6b\xa2\xeb\
\xff\x65\x9c\xe7\xff\x68\x9e\xeb\xff\x5f\x94\xdd\xff\x57\x8b\xce\
\xff\x62\x92\xcc\xff\x5b\x89\xb9\xff\x51\x7c\xa3\xff\x71\x98\xb4\
\xff\x80\xa1\xb1\xff\x80\x9c\xa3\xff\x87\x9c\x9e\xff\xa6\xb4\xb3\
\xff\xd8\xe0\xdf\xff\x00\x8e\x55\x09\xff\xff\x00\x01\x00\x00\x00\
\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\xff\xff\x00\x01\x80\xff\x00\
\x02\x80\xff\x00\x02\x00\x00\x00\x01\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\x80\x80\x04\x00\xd5\x80\x06\xdd\xe4\xe1\xff\xa7\xb3\xb3\
\xff\xa7\xb7\xbd\xff\x7a\x91\xa0\xff\x66\x82\x9a\xff\x53\x75\x99\
\xff\x56\x7c\xac\xff\x53\x7d\xb8\xff\x52\x7f\xc2\xff\x52\x82\xca\
\xff\x53\x84\xce\xff\x58\x89\xd3\xff\x62\x95\xde\xff\x6f\xa3\xea\
\xff\x78\xaf\xf4\xff\x7d\xb4\xf7\xff\x7d\xb5\xf6\xff\x7a\xb5\xf3\
\xff\x76\xb0\xf1\xff\x73\xaa\xef\xff\x6f\xa3\xef\xff\x6d\xa0\xf0\
\xff\x6c\x9f\xef\xff\x6c\xa0\xed\xff\x6c\xa3\xee\xff\x6e\xa6\xed\
\xff\x6d\xa5\xec\xff\x6a\xa1\xec\xff\x69\xa0\xeb\xff\x6c\xa3\xee\
\xff\x6e\xa5\xf0\xff\x6f\xa6\xef\xff\x73\xae\xf4\xff\x79\xb3\xf5\
\xff\x7d\xb7\xf9\xff\x7f\xb9\xfb\xff\x80\xba\xfc\xff\x7d\xb6\xfb\
\xff\x75\xad\xf4\xff\x6d\xa4\xed\xff\x63\x9a\xe5\xff\x65\x9c\xe7\
\xff\x60\x95\xde\xff\x5b\x8f\xd2\xff\x62\x94\xce\xff\x57\x86\xb9\
\xff\x51\x7c\xa3\xff\x71\x98\xb4\xff\x7f\xa0\xb3\xff\x7c\x99\xa2\
\xff\x80\x97\x99\xff\xa6\xb4\xb3\xff\xcc\xd4\xd3\xff\x00\xb6\x6d\
\x07\x00\xff\x55\x03\x00\x00\x00\x01\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x80\xff\x00\x02\x40\x80\x00\x04\x00\x00\x00\
\x01\xff\x00\xff\x01\x00\xff\xff\x02\x00\xaa\xaa\x03\x00\x9f\x60\
\x08\xd5\xdc\xd9\xff\xaf\xbb\xbb\xff\x99\xac\xb1\xff\x7c\x91\xa0\
\xff\x69\x84\x9e\xff\x50\x72\x96\xff\x57\x7d\xad\xff\x55\x7d\xb7\
\xff\x55\x81\xc1\xff\x55\x84\xc8\xff\x58\x88\xd0\xff\x5e\x8e\xd6\
\xff\x66\x98\xe0\xff\x72\xa6\xed\xff\x7c\xb0\xf6\xff\x7d\xb4\xf7\
\xff\x7e\xb6\xf7\xff\x7a\xb5\xf3\xff\x78\xb2\xf3\xff\x73\xac\xf1\
\xff\x6f\xa6\xef\xff\x6d\xa3\xf0\xff\x6c\xa1\xf1\xff\x6d\xa4\xef\
\xff\x71\xa8\xf1\xff\x74\xac\xf3\xff\x72\xad\xf3\xff\x6d\xa7\xef\
\xff\x69\xa2\xed\xff\x69\xa2\xed\xff\x6b\xa4\xef\xff\x6d\xa7\xef\
\xff\x6f\xab\xf1\xff\x73\xae\xf3\xff\x77\xb2\xf7\xff\x79\xb4\xf9\
\xff\x7a\xb5\xfa\xff\x78\xb3\xf9\xff\x71\xac\xf2\xff\x6b\xa2\xeb\
\xff\x64\x9b\xe6\xff\x65\x99\xe5\xff\x63\x97\xde\xff\x5e\x92\xd4\
\xff\x63\x95\xcf\xff\x58\x87\xba\xff\x56\x83\xa9\xff\x7a\xa2\xbf\
\xff\x82\xa6\xb8\xff\x78\x97\xa0\xff\x7c\x95\x99\xff\xa6\xb6\xb5\
\xff\xbc\xc7\xc5\xff\x00\xff\xff\x04\x00\xaa\xaa\x03\x00\x00\x00\
\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x02\x80\xff\x00\
\x02\x00\x00\x00\x01\x80\x00\xff\x02\xff\x00\xff\x01\x00\xaa\xaa\
\x03\x00\xaa\xaa\x03\x00\xd5\x80\x06\xcb\xd2\xcf\xff\xb6\xc3\xc1\
\xff\x8e\xa1\xa4\xff\x7e\x93\xa2\xff\x6f\x88\xa2\xff\x50\x70\x94\
\xff\x58\x7c\xaa\xff\x53\x7c\xb3\xff\x54\x7f\xbe\xff\x56\x83\xc6\
\xff\x5a\x89\xcd\xff\x60\x8e\xd5\xff\x68\x99\xdf\xff\x71\xa4\xea\
\xff\x77\xab\xf1\xff\x77\xae\xf1\xff\x77\xb1\xf3\xff\x76\xb0\xf1\
\xff\x76\xb0\xf1\xff\x73\xad\xef\xff\x70\xa8\xef\xff\x6e\xa5\xf0\
\xff\x6e\xa5\xf0\xff\x6f\xa6\xef\xff\x74\xaf\xf5\xff\x77\xb2\xf7\
\xff\x78\xb3\xf8\xff\x71\xac\xf2\xff\x6b\xa5\xed\xff\x6a\xa3\xee\
\xff\x6b\xa4\xef\xff\x6b\xa6\xee\xff\x6b\xa6\xee\xff\x6e\xaa\xf0\
\xff\x70\xad\xf1\xff\x72\xad\xf2\xff\x73\xae\xf4\xff\x71\xac\xf2\
\xff\x6b\xa5\xed\xff\x65\x9c\xe7\xff\x64\x9b\xe6\xff\x61\x96\xdf\
\xff\x60\x94\xda\xff\x5e\x90\xd2\xff\x61\x91\xcb\xff\x56\x86\xb6\
\xff\x5b\x88\xae\xff\x80\xa8\xc5\xff\x86\xaa\xbc\xff\x79\x97\xa2\
\xff\x78\x91\x95\xff\xa0\xb2\xb1\xff\xae\xbb\xb9\xff\x00\xff\xff\
\x05\x00\x55\x55\x06\x00\x00\x00\x01\x00\x00\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x04\x00\xff\xff\x02\x00\xff\xff\x02\x00\x80\xbf\
\x04\x00\x66\x99\x05\x00\xaa\xaa\x03\x00\xaa\xaa\x03\x00\xff\x99\
\x05\xbd\xc4\xc1\xff\xb4\xc1\xbf\xff\x84\x95\x98\xff\x7e\x93\xa2\
\xff\x6c\x86\x9e\xff\x4e\x6e\x92\xff\x54\x78\xa6\xff\x53\x79\xaf\
\xff\x52\x7c\xb7\xff\x55\x81\xc0\xff\x59\x86\xc9\xff\x5f\x8c\xd0\
\xff\x64\x92\xd9\xff\x69\x9c\xe2\xff\x6e\xa2\xe8\xff\x6c\xa3\xe6\
\xff\x67\xa1\xe3\xff\x6b\xa5\xe7\xff\x6c\xa8\xea\xff\x6d\xa9\xeb\
\xff\x6b\xa6\xeb\xff\x6a\xa5\xeb\xff\x6d\xa7\xef\xff\x6f\xaa\xf0\
\xff\x78\xb3\xf8\xff\x7c\xb9\xf9\xff\x7c\xb9\xfb\xff\x76\xb3\xf7\
\xff\x6f\xaa\xf2\xff\x6d\xa6\xf1\xff\x6d\xa6\xf1\xff\x6c\xa6\xf1\
\xff\x6a\xa5\xed\xff\x6c\xa8\xee\xff\x6d\xa9\xef\xff\x6c\xa8\xee\
\xff\x6c\xa8\xee\xff\x6b\xa6\xee\xff\x66\x9f\xea\xff\x62\x99\xe4\
\xff\x62\x97\xe0\xff\x5d\x91\xd8\xff\x5e\x92\xd5\xff\x5f\x8f\xcf\
\xff\x5c\x8c\xc6\xff\x53\x83\xb3\xff\x54\x81\xa7\xff\x72\x9c\xb9\
\xff\x84\xaa\xbc\xff\x78\x98\xa3\xff\x73\x8e\x92\xff\x98\xaa\xab\
\xff\xa6\xb2\xb2\xff\x00\x80\x6a\x0c\x00\x55\x55\x06\x33\x00\x66\
\x05\xff\x00\xff\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x04\x00\xff\xff\
\x02\x00\xaa\xaa\x03\x00\x80\xbf\x04\x00\x55\x80\x06\x00\xaa\xaa\
\x03\x00\xff\x55\x03\x00\xff\x2a\x06\xb1\xb8\xb3\xff\xad\xba\xb8\
\xff\x7b\x8c\x8f\xff\x7d\x93\x9f\xff\x64\x7e\x96\xff\x4d\x6a\x8f\
\xff\x53\x75\xa3\xff\x54\x78\xae\xff\x54\x7c\xb6\xff\x55\x81\xbe\
\xff\x59\x85\xc4\xff\x5b\x88\xcb\xff\x60\x8f\xd3\xff\x66\x97\xdd\
\xff\x68\x9b\xe1\xff\x64\x9b\xde\xff\x60\x9a\xdc\xff\x63\x9d\xdf\
\xff\x66\xa2\xe4\xff\x67\xa3\xe5\xff\x68\xa4\xe6\xff\x6b\xa6\xeb\
\xff\x71\xac\xf1\xff\x74\xaf\xf4\xff\x79\xb5\xf7\xff\x7d\xbc\xf9\
\xff\x7e\xbc\xfc\xff\x7a\xb7\xfb\xff\x74\xb0\xf6\xff\x71\xab\xf3\
\xff\x6e\xa7\xf2\xff\x6c\xa5\xf0\xff\x6b\xa6\xee\xff\x6c\xa8\xee\
\xff\x6b\xa7\xed\xff\x69\xa5\xeb\xff\x69\xa5\xeb\xff\x68\xa3\xeb\
\xff\x64\x9d\xe8\xff\x61\x98\xe3\xff\x60\x95\xde\xff\x5c\x8e\xd6\
\xff\x61\x92\xd6\xff\x5f\x8f\xcf\xff\x5d\x8b\xc5\xff\x54\x82\xb2\
\xff\x4c\x79\x9f\xff\x60\x88\xa5\xff\x80\xa5\xb9\xff\x7e\x9e\xa9\
\xff\x77\x92\x96\xff\x8c\xa0\xa1\xff\xa7\xb3\xb3\xff\xe3\xe9\xe8\
\xff\x00\xff\xff\x02\x33\x00\x66\x05\xff\x00\xff\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xaa\xaa\x03\xff\xff\xff\x00\xff\x00\xff\
\x01\x55\x00\xaa\x03\x00\xff\xff\x02\x00\xff\xff\x02\x00\x66\x99\
\x05\x00\xaa\xff\x03\x00\xff\xff\x02\x00\x80\x2a\x06\x3f\x83\x4b\
\x50\xad\xb4\xaf\xff\xa5\xb2\xb0\xff\x76\x87\x8a\xff\x79\x8f\x9b\
\xff\x58\x70\x88\xff\x49\x65\x88\xff\x4c\x6d\x9a\xff\x4f\x74\xa8\
\xff\x53\x79\xb3\xff\x53\x7d\xb8\xff\x55\x82\xbf\xff\x57\x85\xc5\
\xff\x5b\x8a\xce\xff\x61\x92\xd8\xff\x62\x95\xdb\xff\x62\x97\xda\
\xff\x62\x99\xdc\xff\x62\x9c\xdd\xff\x63\x9d\xde\xff\x60\x9d\xdd\
\xff\x5f\x9b\xdd\xff\x63\x9e\xe3\xff\x6b\xa6\xeb\xff\x71\xad\xef\
\xff\x76\xb3\xf3\xff\x79\xb8\xf4\xff\x7c\xbb\xf8\xff\x7a\xb6\xf8\
\xff\x76\xb1\xf7\xff\x71\xac\xf4\xff\x6d\xa7\xef\xff\x68\xa1\xec\
\xff\x68\xa2\xea\xff\x68\xa3\xe9\xff\x67\xa2\xe7\xff\x65\xa0\xe5\
\xff\x65\xa0\xe6\xff\x65\x9f\xe7\xff\x63\x9a\xe5\xff\x60\x94\xe0\
\xff\x5f\x92\xdb\xff\x58\x8b\xd1\xff\x5b\x8d\xcf\xff\x56\x87\xc5\
\xff\x56\x85\xbd\xff\x51\x7f\xaf\xff\x49\x74\x9b\xff\x51\x79\x96\
\xff\x75\x9a\xae\xff\x81\xa1\xac\xff\x7a\x97\x9c\xff\x81\x96\x97\
\xff\xa7\xb5\xb4\xff\xcb\xd3\xd3\xff\x00\xaa\xff\x03\x80\x00\xff\
\x02\xff\x00\xff\x01\xff\xff\xff\x00\x00\x00\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\x00\x00\x01\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\
\x02\x00\x80\x80\x04\x00\x80\xbf\x04\x00\xaa\xff\x03\x00\xff\xff\
\x02\x00\x4c\x19\x0a\xe5\xea\xe8\xff\xa9\xb2\xaf\xff\x9c\xac\xab\
\xff\x6f\x83\x88\xff\x73\x88\x97\xff\x49\x60\x7a\xff\x3e\x59\x7e\
\xff\x43\x62\x8f\xff\x48\x6a\x9f\xff\x4d\x73\xad\xff\x4d\x77\xb2\
\xff\x4f\x7a\xb9\xff\x52\x7d\xc0\xff\x56\x83\xc7\xff\x5b\x89\xd0\
\xff\x5d\x8e\xd4\xff\x5c\x8f\xd5\xff\x66\x9b\xde\xff\x63\x9b\xdc\
\xff\x60\x99\xd7\xff\x58\x92\xd3\xff\x55\x8f\xd1\xff\x58\x91\xd6\
\xff\x61\x9a\xdf\xff\x69\xa3\xe4\xff\x71\xae\xec\xff\x75\xb3\xef\
\xff\x77\xb6\xf2\xff\x77\xb4\xf4\xff\x74\xaf\xf4\xff\x70\xab\xf0\
\xff\x6a\xa5\xeb\xff\x66\x9e\xe5\xff\x65\x9d\xe4\xff\x64\x9e\xe0\
\xff\x63\x9d\xdf\xff\x61\x9b\xdd\xff\x63\x9a\xdf\xff\x64\x99\xe2\
\xff\x63\x95\xe1\xff\x5f\x90\xdc\xff\x5e\x8d\xd7\xff\x53\x84\xca\
\xff\x50\x80\xc2\xff\x49\x78\xb6\xff\x4a\x77\xb0\xff\x4d\x78\xa9\
\xff\x48\x70\x9a\xff\x4c\x72\x92\xff\x66\x8b\xa1\xff\x7c\x9d\xac\
\xff\x78\x95\x9c\xff\x72\x8b\x8d\xff\xa0\xb2\xb3\xff\xb7\xbf\xbf\
\xff\x00\xcc\xff\x05\x00\x66\x99\x05\xff\x00\xff\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x55\x55\x06\x00\xff\xcc\x05\x00\xff\xcc\x05\x00\x66\x99\
\x05\x00\x80\xbf\x04\x00\x00\x00\x01\x00\x00\x00\x01\xcd\xd2\xd1\
\xff\xb0\xba\xba\xff\x7a\x8d\x90\xff\x79\x90\x98\xff\x65\x7d\x8f\
\xff\x40\x58\x76\xff\x40\x5a\x82\xff\x4c\x6a\x99\xff\x49\x6b\xa1\
\xff\x4a\x6f\xab\xff\x4a\x73\xb1\xff\x4e\x78\xb9\xff\x52\x7c\xc1\
\xff\x58\x84\xcb\xff\x5d\x8b\xd2\xff\x61\x92\xd8\xff\x64\x95\xdb\
\xff\x67\x9b\xde\xff\x65\x99\xdb\xff\x5a\x90\xd1\xff\x55\x89\xcb\
\xff\x56\x8a\xcd\xff\x57\x8b\xd1\xff\x5e\x92\xd8\xff\x69\xa1\xe2\
\xff\x69\xa5\xe1\xff\x6a\xa9\xe3\xff\x6f\xae\xe8\xff\x75\xb0\xee\
\xff\x75\xaf\xf0\xff\x6d\xa7\xe8\xff\x62\x9c\xdd\xff\x5b\x93\xd4\
\xff\x57\x8d\xcc\xff\x55\x8b\xca\xff\x53\x89\xc8\xff\x54\x89\xc8\
\xff\x59\x8a\xce\xff\x5b\x8b\xd3\xff\x5d\x8a\xd4\xff\x5d\x8a\xd4\
\xff\x5a\x84\xcf\xff\x5c\x88\xcf\xff\x5a\x87\xcb\xff\x51\x7f\xbf\
\xff\x4c\x78\xb3\xff\x4e\x76\xaa\xff\x4d\x72\x9e\xff\x4c\x70\x94\
\xff\x61\x84\x9e\xff\x7a\x9e\xb0\xff\x77\x9a\xa4\xff\x76\x94\x99\
\xff\x90\xa5\xa7\xff\xa8\xb4\xb4\xff\x00\xff\xff\x04\x00\xaa\xaa\
\x03\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\x80\x02\x00\xbf\x40\x04\x00\xff\xff\
\x04\x00\xff\xd5\x06\x00\x66\x99\x05\x80\x00\xff\x02\xff\x00\x00\
\x01\x00\x00\x00\x02\xbd\xc1\xc2\xff\xa5\xb1\xb3\xff\x73\x89\x8e\
\xff\x73\x8d\x99\xff\x69\x81\x97\xff\x46\x5d\x7d\xff\x44\x5d\x87\
\xff\x50\x6e\x9f\xff\x50\x71\xa9\xff\x56\x7b\xb7\xff\x55\x7d\xbe\
\xff\x57\x82\xc5\xff\x5c\x86\xcd\xff\x5f\x8a\xd3\xff\x63\x90\xd9\
\xff\x69\x96\xdf\xff\x6b\x9b\xe3\xff\x6c\x9d\xe3\xff\x6b\x9f\xe2\
\xff\x65\x99\xdb\xff\x62\x94\xd6\xff\x64\x95\xd9\xff\x63\x96\xdc\
\xff\x68\x9b\xe1\xff\x71\xa7\xe8\xff\x72\xab\xe9\xff\x72\xae\xe9\
\xff\x73\xb2\xec\xff\x77\xb2\xf0\xff\x75\xaf\xf0\xff\x6e\xa8\xe9\
\xff\x68\xa1\xdf\xff\x64\x9a\xd9\xff\x5d\x94\xd1\xff\x5d\x93\xd0\
\xff\x5b\x91\xce\xff\x5f\x8f\xcf\xff\x60\x8f\xd3\xff\x61\x8c\xd5\
\xff\x5d\x87\xd2\xff\x5c\x84\xcf\xff\x60\x88\xd3\xff\x61\x8c\xd5\
\xff\x60\x8d\xd1\xff\x5b\x87\xc7\xff\x55\x81\xbc\xff\x56\x7c\xb2\
\xff\x53\x77\xa5\xff\x51\x72\x99\xff\x69\x8b\xa9\xff\x7a\x9f\xb5\
\xff\x75\x9a\xa8\xff\x74\x95\x9e\xff\x85\x9e\xa2\xff\xaa\xb6\xb6\
\xff\x3f\x78\x6d\x50\x00\x80\x80\x04\x00\x00\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\xff\xff\x02\x00\xff\xff\x04\x00\xaa\x8e\x09\x00\xaa\xaa\
\x03\xff\xff\xff\x00\xff\x00\x00\x01\x00\x00\x00\x08\xaa\xaf\xb0\
\xff\x9c\xa9\xab\xff\x71\x89\x8f\xff\x72\x8e\x99\xff\x71\x89\x9f\
\xff\x4c\x65\x85\xff\x48\x61\x8b\xff\x53\x71\xa2\xff\x54\x77\xaf\
\xff\x56\x7d\xbb\xff\x55\x7f\xc2\xff\x59\x83\xc8\xff\x5e\x87\xd0\
\xff\x61\x8b\xd6\xff\x66\x93\xdd\xff\x6a\x9a\xe2\xff\x6e\x9e\xe6\
\xff\x74\xa5\xeb\xff\x76\xaa\xed\xff\x73\xa7\xe9\xff\x70\xa4\xe7\
\xff\x71\xa5\xe8\xff\x70\xa3\xe9\xff\x71\xa4\xea\xff\x76\xac\xed\
\xff\x79\xb2\xf0\xff\x78\xb3\xf1\xff\x7a\xb5\xf3\xff\x7c\xb3\xf6\
\xff\x79\xb0\xf3\xff\x74\xab\xee\xff\x6f\xa6\xe9\xff\x6f\xa4\xe7\
\xff\x6b\xa1\xe2\xff\x69\x9d\xdf\xff\x69\x9d\xdf\xff\x6b\x9a\xde\
\xff\x68\x95\xde\xff\x64\x8e\xd9\xff\x5c\x86\xd3\xff\x57\x81\xce\
\xff\x5a\x84\xd1\xff\x5c\x87\xd0\xff\x5d\x89\xd0\xff\x5d\x88\xcb\
\xff\x5a\x86\xc3\xff\x5a\x7f\xb7\xff\x56\x7a\xaa\xff\x51\x74\x9c\
\xff\x70\x94\xb2\xff\x7a\xa1\xb7\xff\x76\x9b\xa9\xff\x74\x98\xa0\
\xff\x7b\x96\x9a\xff\xad\xbb\xba\xff\xdc\xe1\xe0\xff\x00\x80\x80\
\x04\x00\x00\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\xff\xff\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\xff\xff\
\x04\x00\x99\x7f\x0a\x00\xaa\xaa\x03\xff\xff\xff\x00\xff\x00\x00\
\x01\x00\x00\x00\x0f\xa6\xab\xac\xff\x96\xa5\xa7\xff\x75\x8f\x95\
\xff\x75\x93\x9e\xff\x79\x93\xab\xff\x52\x6b\x8b\xff\x49\x62\x8c\
\xff\x52\x70\xa1\xff\x56\x78\xb3\xff\x51\x77\xb7\xff\x51\x7b\xbe\
\xff\x53\x7f\xc6\xff\x59\x83\xce\xff\x5f\x8c\xd6\xff\x65\x94\xde\
\xff\x6a\x9b\xe5\xff\x70\xa0\xe8\xff\x7b\xae\xf4\xff\x80\xb4\xf7\
\xff\x7d\xb1\xf3\xff\x79\xad\xf0\xff\x79\xad\xf0\xff\x77\xaa\xf0\
\xff\x74\xa7\xed\xff\x74\xa9\xec\xff\x79\xb0\xf3\xff\x7c\xb4\xf5\
\xff\x7d\xb7\xf9\xff\x7e\xb5\xfa\xff\x7b\xb1\xf8\xff\x76\xac\xf3\
\xff\x74\xaa\xf1\xff\x76\xaa\xf0\xff\x76\xaa\xf0\xff\x74\xa9\xec\
\xff\x72\xa5\xeb\xff\x70\xa0\xe8\xff\x6c\x9a\xe7\xff\x66\x91\xe0\
\xff\x5d\x88\xd9\xff\x58\x80\xd2\xff\x52\x7b\xca\xff\x52\x7c\xc7\
\xff\x55\x81\xc8\xff\x57\x82\xc5\xff\x57\x83\xc0\xff\x59\x7e\xb6\
\xff\x55\x79\xa9\xff\x51\x74\x9c\xff\x75\x98\xb9\xff\x7a\xa1\xb7\
\xff\x76\x9d\xab\xff\x77\x9b\xa3\xff\x73\x8e\x92\xff\xb2\xbf\xc1\
\xff\xcc\xd2\xd1\xff\x00\xaa\xaa\x03\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\x00\x00\x01\xff\xff\x00\x01\xff\xff\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\xaa\xaa\x03\x00\xff\xff\x04\x00\xbf\x9f\x08\x00\xaa\xaa\
\x03\x00\x00\x00\x01\xff\x00\x00\x01\xe3\xe5\xe5\xff\xa9\xb1\xb1\
\xff\x8c\x9d\xa0\xff\x78\x95\x9a\xff\x75\x95\xa0\xff\x7c\x98\xb0\
\xff\x51\x6d\x8c\xff\x48\x64\x8d\xff\x4f\x6e\xa1\xff\x53\x77\xb3\
\xff\x53\x7b\xbc\xff\x54\x7e\xc3\xff\x57\x84\xcd\xff\x5e\x8b\xd5\
\xff\x65\x93\xe0\xff\x69\x9a\xe6\xff\x6f\xa2\xeb\xff\x74\xa6\xee\
\xff\x7b\xaf\xf5\xff\x80\xb5\xf8\xff\x7d\xb3\xf4\xff\x76\xab\xee\
\xff\x77\xac\xef\xff\x75\xa9\xef\xff\x71\xa5\xec\xff\x72\xa6\xec\
\xff\x75\xac\xf1\xff\x7a\xb0\xf7\xff\x7a\xb2\xf9\xff\x7d\xb2\xfb\
\xff\x78\xac\xf8\xff\x73\xa7\xf3\xff\x70\xa4\xf0\xff\x70\xa4\xf0\
\xff\x6f\xa4\xed\xff\x6d\xa3\xea\xff\x6c\x9f\xe8\xff\x69\x9b\xe7\
\xff\x67\x96\xe7\xff\x64\x90\xe3\xff\x5d\x89\xdd\xff\x5a\x84\xd7\
\xff\x51\x7c\xcd\xff\x4e\x7a\xc7\xff\x52\x7d\xc6\xff\x54\x7e\xc3\
\xff\x53\x7e\xbd\xff\x54\x79\xb3\xff\x52\x75\xa7\xff\x50\x73\x9b\
\xff\x72\x95\xb6\xff\x78\xa1\xb7\xff\x78\x9f\xad\xff\x79\x9d\xa5\
\xff\x70\x8b\x8f\xff\xb7\xc4\xc6\xff\xc3\xc8\xc9\xff\x00\xaa\xaa\
\x03\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x80\x80\x04\x00\xff\xff\
\x04\x00\xff\xcc\x05\x00\xd5\xaa\x06\x00\xff\xff\x02\x00\x00\x00\
\x01\xd1\xd3\xd3\xff\xad\xb6\xb9\xff\x7d\x90\x93\xff\x7a\x97\x9c\
\xff\x73\x95\xa2\xff\x7d\x99\xb1\xff\x4e\x6a\x8c\xff\x48\x65\x91\
\xff\x51\x72\xa4\xff\x54\x79\xb5\xff\x5c\x86\xc9\xff\x5e\x8a\xd1\
\xff\x60\x8f\xd9\xff\x66\x94\xe1\xff\x6a\x9b\xe7\xff\x6f\xa1\xed\
\xff\x74\xa6\xf2\xff\x77\xaa\xf3\xff\x76\xaa\xf0\xff\x7d\xb2\xf5\
\xff\x7a\xb0\xf1\xff\x72\xa7\xea\xff\x73\xa8\xeb\xff\x73\xa7\xee\
\xff\x6f\xa3\xea\xff\x6e\xa2\xe9\xff\x74\xa8\xef\xff\x76\xab\xf4\
\xff\x77\xae\xf9\xff\x78\xac\xf9\xff\x71\xa5\xf2\xff\x6d\xa0\xf0\
\xff\x68\x9c\xe9\xff\x67\x9b\xe8\xff\x63\x97\xe4\xff\x62\x96\xe2\
\xff\x60\x94\xe0\xff\x60\x93\xe3\xff\x63\x93\xe5\xff\x60\x8f\xe5\
\xff\x5f\x8c\xe3\xff\x5e\x89\xe0\xff\x57\x81\xd4\xff\x54\x7f\xce\
\xff\x52\x7f\xc8\xff\x52\x7f\xc3\xff\x50\x7b\xba\xff\x4d\x73\xad\
\xff\x4b\x6e\xa0\xff\x4b\x6e\x96\xff\x69\x8c\xad\xff\x79\xa2\xb8\
\xff\x7c\xa3\xb1\xff\x7c\x9f\xa9\xff\x70\x8a\x90\xff\xb5\xc4\xc6\
\xff\xbe\xc3\xc4\xff\x00\x80\x80\x04\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\xaa\xaa\x03\x00\xb6\x92\x07\x00\xff\xcc\x05\x00\xb6\x92\
\x07\x00\xff\xff\x02\x00\x00\x00\x01\xb9\xbe\xbd\xff\xac\xb8\xba\
\xff\x74\x89\x8b\xff\x7e\x9b\xa2\xff\x74\x97\xa4\xff\x7c\x9b\xb2\
\xff\x4d\x6c\x8d\xff\x49\x6b\x96\xff\x54\x79\xab\xff\x57\x7f\xba\
\xff\x60\x8b\xce\xff\x61\x8e\xd7\xff\x64\x95\xe1\xff\x6b\x9c\xe8\
\xff\x70\xa0\xee\xff\x72\xa3\xf1\xff\x73\xa7\xf3\xff\x75\xaa\xf3\
\xff\x76\xad\xf2\xff\x7d\xb4\xf7\xff\x79\xb1\xf2\xff\x71\xa8\xeb\
\xff\x72\xa9\xec\xff\x72\xa8\xef\xff\x6d\xa3\xea\xff\x6c\x9f\xe8\
\xff\x72\xa5\xee\xff\x72\xa6\xf2\xff\x74\xa8\xf4\xff\x71\xa6\xf6\
\xff\x6c\xa1\xf1\xff\x69\x9e\xef\xff\x65\x9a\xea\xff\x64\x99\xe9\
\xff\x61\x97\xe4\xff\x60\x97\xe2\xff\x60\x97\xe2\xff\x62\x97\xe7\
\xff\x65\x97\xe9\xff\x65\x96\xec\xff\x61\x92\xe8\xff\x62\x8f\xe6\
\xff\x5a\x86\xd9\xff\x57\x85\xd3\xff\x57\x84\xce\xff\x56\x83\xc7\
\xff\x50\x7b\xba\xff\x4a\x71\xa8\xff\x46\x6c\x9c\xff\x49\x6c\x94\
\xff\x60\x83\xa4\xff\x7c\xa5\xbb\xff\x80\xa7\xb5\xff\x7d\xa0\xaa\
\xff\x70\x8d\x92\xff\xae\xbf\xc2\xff\xb8\xc0\xc0\xff\x00\x33\x33\
\x0a\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\
\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\x49\x49\
\x07\x00\xff\xcc\x05\x00\x8e\x71\x09\x00\xff\xff\x04\x00\xaa\xaa\
\x03\xa9\xae\xad\xff\xaa\xb6\xb8\xff\x70\x87\x89\xff\x80\xa0\xa6\
\xff\x77\x9c\xaa\xff\x7b\x9d\xb5\xff\x4c\x6d\x8e\xff\x4c\x6f\x9a\
\xff\x57\x7e\xb2\xff\x59\x82\xc0\xff\x60\x8d\xd1\xff\x61\x90\xda\
\xff\x67\x98\xe4\xff\x6e\x9e\xec\xff\x72\xa3\xf1\xff\x73\xa7\xf4\
\xff\x75\xa9\xf5\xff\x76\xab\xf4\xff\x7c\xb3\xf8\xff\x82\xb9\xfc\
\xff\x7e\xb6\xf7\xff\x76\xad\xf0\xff\x76\xad\xf0\xff\x74\xab\xf0\
\xff\x6c\xa2\xe9\xff\x6b\x9e\xe7\xff\x70\xa3\xec\xff\x6f\xa3\xef\
\xff\x71\xa5\xf1\xff\x6e\xa3\xf3\xff\x6c\xa1\xf1\xff\x6a\x9f\xef\
\xff\x69\x9e\xee\xff\x68\x9d\xed\xff\x69\x9f\xec\xff\x69\xa0\xeb\
\xff\x6a\xa1\xec\xff\x6b\xa0\xf0\xff\x6c\xa1\xf2\xff\x6a\x9d\xf3\
\xff\x65\x98\xee\xff\x65\x94\xea\xff\x5c\x8a\xde\xff\x5c\x8a\xd8\
\xff\x5b\x8a\xd4\xff\x5c\x88\xcf\xff\x54\x80\xbf\xff\x4c\x75\xac\
\xff\x49\x6f\x9f\xff\x4c\x6f\x97\xff\x59\x7f\x9f\xff\x7d\xa6\xbc\
\xff\x82\xab\xba\xff\x7c\xa0\xaa\xff\x6e\x8c\x91\xff\xa8\xbb\xbe\
\xff\xb4\xbd\xc0\xff\x5f\x70\x79\x71\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\xff\xff\x02\x00\xff\xff\x02\x00\x7f\x66\x0a\x00\xd5\xaa\
\x06\x00\xff\xff\x04\xe4\xe6\xe6\xff\xb2\xb8\xb7\xff\xa0\xad\xaf\
\xff\x7a\x93\x95\xff\x83\xa5\xab\xff\x79\xa0\xae\xff\x7d\xa1\xb9\
\xff\x50\x73\x94\xff\x52\x78\xa2\xff\x5b\x82\xb6\xff\x5f\x8a\xc9\
\xff\x65\x91\xd8\xff\x66\x95\xdf\xff\x6b\x9b\xe9\xff\x6f\xa0\xf0\
\xff\x72\xa3\xf1\xff\x73\xa7\xf4\xff\x75\xaa\xf3\xff\x76\xae\xf5\
\xff\x80\xba\xfc\xff\x7e\xb8\xf9\xff\x7c\xb7\xf5\xff\x7d\xb5\xf6\
\xff\x7b\xb2\xf5\xff\x78\xaf\xf4\xff\x73\xa9\xf0\xff\x71\xa4\xed\
\xff\x73\xa6\xef\xff\x72\xa7\xf0\xff\x72\xa6\xf2\xff\x71\xa5\xf1\
\xff\x71\xa5\xf2\xff\x6f\xa5\xf2\xff\x6e\xa5\xf0\xff\x6e\xa5\xf0\
\xff\x72\xa9\xf2\xff\x71\xab\xf3\xff\x73\xad\xf5\xff\x74\xad\xf8\
\xff\x73\xab\xf8\xff\x6e\xa5\xf6\xff\x69\x9d\xf0\xff\x68\x97\xeb\
\xff\x64\x93\xe5\xff\x65\x92\xe3\xff\x62\x91\xdb\xff\x60\x8c\xd3\
\xff\x58\x84\xc4\xff\x52\x7b\xb2\xff\x4a\x72\xa2\xff\x4a\x6e\x96\
\xff\x5b\x81\xa1\xff\x7a\xa3\xb9\xff\x7f\xaa\xb9\xff\x82\xa9\xb2\
\xff\x71\x91\x97\xff\x99\xad\xb2\xff\xbf\xcb\xcd\xff\xc9\xcd\xce\
\xff\xff\x00\xff\x01\x00\x00\x00\x01\xff\xff\xff\x00\x00\x00\x00\
\x04\x00\x00\x00\x03\xff\xff\xff\x00\x00\x00\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\
\x02\x00\xb6\x92\x07\x00\xff\xcc\x05\x00\xff\xcc\x05\xd4\xd9\xd8\
\xff\xb2\xba\xba\xff\x9d\xac\xae\xff\x7c\x97\x9b\xff\x85\xaa\xb2\
\xff\x7c\xa6\xb3\xff\x7f\xa5\xbd\xff\x54\x79\x9b\xff\x56\x7b\xa7\
\xff\x5d\x85\xba\xff\x61\x8b\xcc\xff\x64\x92\xd9\xff\x67\x95\xe2\
\xff\x6b\x9b\xe9\xff\x6f\xa0\xf0\xff\x72\xa3\xf3\xff\x73\xa7\xf4\
\xff\x75\xac\xf5\xff\x77\xaf\xf6\xff\x7f\xb9\xfb\xff\x7d\xb7\xf8\
\xff\x7a\xb5\xf3\xff\x7b\xb3\xf4\xff\x7a\xb2\xf3\xff\x79\xb0\xf3\
\xff\x77\xae\xf3\xff\x77\xab\xf2\xff\x79\xad\xf4\xff\x76\xac\xf3\
\xff\x75\xaa\xf3\xff\x74\xa9\xf2\xff\x73\xa8\xf1\xff\x71\xa8\xf3\
\xff\x71\xa8\xf1\xff\x6f\xa7\xee\xff\x73\xab\xf2\xff\x73\xae\xf3\
\xff\x75\xb0\xf6\xff\x75\xaf\xf7\xff\x71\xaa\xf5\xff\x6c\xa4\xf3\
\xff\x69\x9e\xef\xff\x68\x9a\xec\xff\x67\x97\xe9\xff\x66\x95\xe6\
\xff\x64\x92\xdf\xff\x60\x8d\xd6\xff\x5c\x88\xc8\xff\x57\x80\xb7\
\xff\x4f\x77\xa7\xff\x4e\x72\x9a\xff\x5c\x82\xa2\xff\x7b\xa6\xbb\
\xff\x81\xac\xbb\xff\x82\xab\xb4\xff\x73\x95\x9b\xff\x8c\xa2\xa7\
\xff\xb9\xc4\xc8\xff\xbe\xc1\xc5\xff\x2a\x00\x55\x06\x00\x00\x00\
\x02\x00\x00\x00\x01\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\
\x00\x00\x00\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x03\x00\xff\x80\x02\x00\xb6\x92\x07\x00\xff\xd5\
\x06\x00\x6a\x80\x0c\xc0\xc5\xc6\xff\xb5\xbf\xbf\xff\x96\xaa\xab\
\xff\x81\x9e\xa2\xff\x89\xb0\xb8\xff\x80\xac\xb9\xff\x81\xaa\xc1\
\xff\x5c\x82\xa4\xff\x58\x7f\xab\xff\x5d\x87\xbc\xff\x63\x8f\xcf\
\xff\x63\x93\xdb\xff\x66\x96\xe4\xff\x6a\x9b\xeb\xff\x6f\x9f\xf1\
\xff\x72\xa3\xf3\xff\x74\xa8\xf5\xff\x77\xae\xf7\xff\x7a\xb3\xf8\
\xff\x7c\xb9\xf9\xff\x7c\xb7\xf5\xff\x7a\xb6\xf2\xff\x7a\xb3\xf1\
\xff\x7b\xb3\xf4\xff\x7c\xb3\xf6\xff\x7d\xb4\xf9\xff\x7f\xb3\xf9\
\xff\x7f\xb3\xf9\xff\x7c\xb3\xf8\xff\x7a\xb1\xf6\xff\x78\xae\xf5\
\xff\x76\xac\xf3\xff\x73\xaa\xf3\xff\x74\xac\xf3\xff\x73\xac\xf1\
\xff\x74\xae\xf0\xff\x76\xb0\xf2\xff\x78\xb2\xf4\xff\x77\xb0\xf5\
\xff\x72\xaa\xf1\xff\x6d\xa4\xef\xff\x6b\xa1\xee\xff\x6c\x9e\xf0\
\xff\x6b\x9b\xed\xff\x67\x96\xe7\xff\x65\x93\xe0\xff\x62\x8f\xd8\
\xff\x61\x8d\xcd\xff\x5b\x84\xbb\xff\x53\x7b\xab\xff\x50\x74\x9c\
\xff\x5d\x83\xa3\xff\x7e\xa8\xbf\xff\x84\xb1\xbf\xff\x83\xae\xb7\
\xff\x79\x9d\xa3\xff\x7d\x95\x9b\xff\xb7\xc3\xc7\xff\xb6\xbb\xbe\
\xff\x7e\x8c\x92\x91\x00\x00\x00\x04\xff\xff\xff\x00\x00\x00\x00\
\x03\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x03\x00\xff\x80\
\x02\x00\xd5\x80\x06\x00\xd5\xff\x06\x3f\x78\x83\x50\xb4\xb9\xba\
\xff\xb9\xc5\xc5\xff\x8c\xa1\xa3\xff\x88\xa7\xaa\xff\x8c\xb6\xbd\
\xff\x83\xb0\xbe\xff\x81\xaa\xc3\xff\x61\x89\xac\xff\x5b\x82\xaf\
\xff\x5f\x88\xbf\xff\x66\x92\xd2\xff\x65\x95\xdd\xff\x67\x97\xe5\
\xff\x6b\x9c\xec\xff\x6f\x9f\xf1\xff\x70\xa3\xf3\xff\x74\xa8\xf5\
\xff\x78\xaf\xf8\xff\x7a\xb5\xfa\xff\x7d\xba\xfa\xff\x7e\xba\xf6\
\xff\x7d\xb9\xf5\xff\x7e\xb7\xf4\xff\x7f\xb8\xf6\xff\x80\xb8\xf9\
\xff\x84\xb9\xfc\xff\x85\xb9\xff\xff\x82\xb6\xfc\xff\x7f\xb6\xf9\
\xff\x7c\xb3\xf6\xff\x79\xaf\xf6\xff\x77\xad\xf4\xff\x74\xac\xf3\
\xff\x74\xad\xf2\xff\x74\xae\xf0\xff\x73\xad\xee\xff\x74\xae\xef\
\xff\x75\xaf\xf0\xff\x75\xaf\xf1\xff\x73\xac\xf1\xff\x70\xa7\xf0\
\xff\x71\xa5\xf2\xff\x72\xa3\xf3\xff\x6d\x9d\xef\xff\x68\x97\xe8\
\xff\x63\x93\xe1\xff\x61\x91\xd9\xff\x60\x8d\xd0\xff\x5e\x86\xc0\
\xff\x55\x7d\xad\xff\x52\x76\x9e\xff\x60\x87\xa7\xff\x80\xaa\xc1\
\xff\x87\xb3\xc4\xff\x87\xb1\xbd\xff\x82\xa6\xac\xff\x76\x8e\x94\
\xff\xb9\xc7\xcd\xff\xb8\xbc\xc1\xff\xdf\xe1\xe2\xff\x2a\x00\x55\
\x06\xff\xff\xff\x00\x00\x00\x00\x01\xff\xff\xff\x00\x00\x00\x00\
\x02\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\x00\x00\x00\x02\x00\xff\x80\x02\x00\xff\x99\x05\x00\xd5\xff\
\x06\xe8\xed\xee\xff\xb4\xb9\xba\xff\xc0\xcc\xcc\xff\x83\x98\x9a\
\xff\x8d\xae\xb1\xff\x90\xbc\xc3\xff\x85\xb4\xc2\xff\x7f\xab\xc3\
\xff\x65\x8f\xb2\xff\x5a\x83\xb0\xff\x5d\x89\xbf\xff\x65\x93\xd3\
\xff\x66\x96\xde\xff\x68\x98\xe6\xff\x6b\x9b\xed\xff\x6f\xa0\xf0\
\xff\x70\xa4\xf1\xff\x75\xa9\xf5\xff\x78\xb0\xf7\xff\x7c\xb8\xfa\
\xff\x7e\xbb\xf9\xff\x81\xbd\xf8\xff\x83\xbf\xfa\xff\x84\xbe\xf9\
\xff\x87\xbe\xfb\xff\x85\xbb\xfc\xff\x85\xbb\xfc\xff\x86\xbb\xfe\
\xff\x82\xb7\xfa\xff\x7f\xb6\xf9\xff\x7c\xb3\xf6\xff\x7a\xb0\xf7\
\xff\x77\xad\xf4\xff\x75\xac\xf1\xff\x75\xac\xf1\xff\x75\xac\xef\
\xff\x74\xac\xed\xff\x72\xaa\xeb\xff\x73\xab\xec\xff\x75\xad\xee\
\xff\x78\xaf\xf2\xff\x79\xaf\xf6\xff\x75\xa9\xf5\xff\x74\xa5\xf5\
\xff\x6f\x9f\xf1\xff\x6a\x9b\xeb\xff\x65\x95\xe3\xff\x62\x92\xda\
\xff\x61\x8e\xd1\xff\x60\x88\xc3\xff\x58\x80\xb1\xff\x55\x79\xa1\
\xff\x69\x90\xb0\xff\x83\xad\xc4\xff\x8b\xb7\xc8\xff\x88\xb5\xc0\
\xff\x86\xab\xb3\xff\x71\x8b\x92\xff\xb8\xc8\xce\xff\xbb\xc1\xc6\
\xff\xd4\xd6\xd7\xff\x33\x00\x66\x05\xff\xff\xff\x00\x00\x00\x00\
\x01\xff\xff\xff\x00\x00\x00\x00\x03\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x01\x80\xff\x00\
\x02\x00\xff\x99\x05\x00\xcc\xff\x05\xdd\xe2\xe3\xff\xba\xbf\xc0\
\xff\xc6\xd2\xd2\xff\x7e\x93\x95\xff\x90\xb1\xb4\xff\x94\xc0\xc7\
\xff\x89\xb8\xc6\xff\x81\xad\xc5\xff\x6c\x96\xb9\xff\x5c\x84\xb4\
\xff\x5d\x89\xbf\xff\x64\x91\xd4\xff\x65\x97\xdf\xff\x68\x97\xe8\
\xff\x6c\x9c\xee\xff\x6f\xa0\xf0\xff\x72\xa3\xf1\xff\x74\xa8\xf4\
\xff\x78\xb0\xf7\xff\x7c\xb6\xf8\xff\x80\xbb\xf9\xff\x82\xbe\xf9\
\xff\x84\xc1\xf9\xff\x88\xc0\xfb\xff\x87\xbf\xfa\xff\x86\xbb\xfa\
\xff\x85\xb9\xfb\xff\x84\xb8\xfa\xff\x80\xb5\xf8\xff\x7d\xb4\xf7\
\xff\x7c\xb3\xf8\xff\x7a\xb1\xf6\xff\x78\xae\xf5\xff\x76\xad\xf2\
\xff\x75\xac\xf1\xff\x74\xab\xee\xff\x74\xac\xed\xff\x72\xab\xe9\
\xff\x73\xac\xea\xff\x78\xb0\xf1\xff\x7d\xb4\xf7\xff\x7e\xb4\xfb\
\xff\x78\xac\xf8\xff\x75\xa6\xf6\xff\x71\xa1\xf3\xff\x6d\x9e\xee\
\xff\x68\x98\xe6\xff\x65\x95\xdd\xff\x64\x91\xd4\xff\x60\x8a\xc5\
\xff\x5b\x83\xb4\xff\x5a\x7e\xa6\xff\x73\x9a\xba\xff\x83\xaf\xc6\
\xff\x8d\xbb\xcc\xff\x8d\xba\xc5\xff\x87\xac\xb4\xff\x73\x8d\x94\
\xff\xb2\xc2\xc8\xff\xbe\xc4\xc9\xff\xcc\xcf\xd3\xff\x55\x00\xaa\
\x03\xff\xff\xff\x00\x00\x00\x00\x02\xff\xff\xff\x00\x00\x00\x00\
\x05\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x55\xaa\x00\x03\x00\xff\x99\x05\x00\xcc\xff\
\x05\xd0\xd5\xd8\xff\xbf\xc7\xc7\xff\xcb\xd7\xd7\xff\x7f\x94\x96\
\xff\x92\xb3\xb6\xff\x99\xc3\xc8\xff\x8e\xbe\xca\xff\x87\xb3\xcb\
\xff\x75\xa2\xc4\xff\x5e\x88\xb7\xff\x5c\x88\xbe\xff\x61\x8e\xd1\
\xff\x65\x97\xdf\xff\x69\x98\xe9\xff\x6d\x9d\xef\xff\x70\xa1\xf1\
\xff\x73\xa4\xf2\xff\x74\xa8\xf4\xff\x78\xae\xf5\xff\x7c\xb3\xf6\
\xff\x7e\xb9\xf7\xff\x80\xbc\xf7\xff\x82\xbf\xf7\xff\x85\xbe\xf6\
\xff\x84\xbc\xf7\xff\x83\xb9\xf6\xff\x82\xb7\xf6\xff\x81\xb5\xf7\
\xff\x7e\xb3\xf6\xff\x7c\xb3\xf6\xff\x7a\xb3\xf8\xff\x7b\xb2\xf7\
\xff\x7a\xb1\xf6\xff\x78\xaf\xf4\xff\x76\xad\xf2\xff\x77\xac\xef\
\xff\x78\xae\xef\xff\x79\xaf\xee\xff\x7b\xb1\xf0\xff\x80\xb6\xf7\
\xff\x83\xb8\xfb\xff\x81\xb5\xfc\xff\x7a\xad\xf6\xff\x75\xa6\xf4\
\xff\x71\xa2\xf2\xff\x6e\x9f\xef\xff\x6b\x9b\xe9\xff\x68\x98\xe0\
\xff\x65\x92\xd5\xff\x61\x8c\xc5\xff\x5e\x86\xb7\xff\x5c\x83\xaa\
\xff\x7b\xa2\xc2\xff\x80\xaf\xc5\xff\x8f\xc0\xd0\xff\x91\xbf\xca\
\xff\x87\xae\xb6\xff\x78\x94\x9b\xff\xae\xbe\xc4\xff\xbf\xc5\xca\
\xff\xc9\xcc\xd0\xff\xff\x00\xff\x01\x00\x00\x00\x01\x00\x00\x00\
\x04\xff\xff\xff\x00\x00\x00\x00\x06\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\x00\x00\x01\xff\xff\xff\x00\x00\x80\x2a\
\x06\x00\xff\xbf\x04\x00\x92\xb6\x07\xc4\xc9\xcc\xff\xc2\xc9\xcc\
\xff\xcc\xd9\xdb\xff\x82\x97\x99\xff\x95\xb4\xb7\xff\x9c\xc4\xc9\
\xff\x94\xc1\xce\xff\x8b\xb7\xcf\xff\x7d\xaa\xcc\xff\x60\x8d\xb9\
\xff\x5a\x89\xbd\xff\x60\x8e\xce\xff\x67\x97\xdf\xff\x69\x99\xe7\
\xff\x6d\x9e\xee\xff\x70\xa1\xf1\xff\x72\xa3\xf3\xff\x74\xa8\xf4\
\xff\x77\xac\xf5\xff\x7a\xb1\xf6\xff\x7e\xb6\xf7\xff\x7e\xba\xf6\
\xff\x80\xbd\xf5\xff\x80\xbb\xf3\xff\x81\xba\xf2\xff\x80\xb6\xf2\
\xff\x7f\xb5\xf2\xff\x80\xb5\xf4\xff\x7d\xb3\xf4\xff\x7c\xb3\xf6\
\xff\x7b\xb5\xf7\xff\x7e\xb5\xf8\xff\x7c\xb3\xf6\xff\x7a\xb1\xf4\
\xff\x77\xae\xf1\xff\x77\xad\xee\xff\x7a\xb0\xf1\xff\x7d\xb3\xf2\
\xff\x82\xb8\xf7\xff\x85\xbb\xfc\xff\x85\xba\xfd\xff\x80\xb4\xfa\
\xff\x79\xad\xf4\xff\x74\xa6\xf2\xff\x70\xa1\xef\xff\x6f\xa0\xee\
\xff\x6b\x9d\xe9\xff\x68\x99\xdf\xff\x61\x91\xd1\xff\x5f\x8c\xc5\
\xff\x5d\x87\xb6\xff\x5b\x84\xab\xff\x7a\xa6\xc5\xff\x7f\xae\xc4\
\xff\x91\xc2\xd2\xff\x97\xc4\xcf\xff\x8c\xb1\xb9\xff\x82\x9c\xa3\
\xff\xab\xbb\xc1\xff\xc3\xc9\xd0\xff\xc9\xcc\xd0\xff\xff\x00\xff\
\x01\x00\x00\x00\x01\x00\x00\x00\x05\xff\xff\xff\x00\x00\x00\x00\
\x07\xff\xff\xff\x00\x00\x00\x00\x01\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\x00\x00\x01\x39\x00\x00\x09\xff\x00\xff\
\x02\x55\x00\xaa\x03\x00\x27\x3b\x0d\x00\x40\x60\x08\x00\xff\xff\
\x02\x00\xaa\xaa\x03\x00\x40\x40\x08\x00\xff\xff\x04\x00\x55\x6a\
\x0c\xcd\xd4\xd7\xff\xc6\xcf\xd2\xff\xc1\xcd\xd1\xff\x9a\xaf\xb1\
\xff\x81\x9e\xa3\xff\xa2\xc6\xce\xff\x9b\xc5\xd2\xff\x80\xaa\xc1\
\xff\x8a\xb7\xd8\xff\x66\x94\xbd\xff\x63\x93\xc3\xff\x5a\x89\xc6\
\xff\x64\x95\xd9\xff\x66\x97\xe3\xff\x6a\x9b\xeb\xff\x6e\x9f\xef\
\xff\x70\xa1\xf1\xff\x71\xa4\xf4\xff\x73\xaa\xf5\xff\x77\xaf\xf6\
\xff\x7a\xb4\xf6\xff\x7c\xb7\xf5\xff\x7f\xbb\xf6\xff\x82\xbd\xf5\
\xff\x84\xbd\xf5\xff\x83\xbc\xf4\xff\x81\xb9\xf4\xff\x83\xba\xf7\
\xff\x84\xba\xf9\xff\x81\xb9\xfa\xff\x7e\xb8\xf9\xff\x7c\xb6\xf7\
\xff\x7b\xb5\xf6\xff\x79\xb3\xf4\xff\x78\xb2\xf3\xff\x79\xb1\xf2\
\xff\x7a\xb2\xf3\xff\x7c\xb5\xf3\xff\x80\xb9\xf7\xff\x83\xbb\xfc\
\xff\x82\xb9\xfc\xff\x7e\xb5\xf8\xff\x77\xae\xf3\xff\x72\xa7\xf0\
\xff\x6f\xa3\xef\xff\x6e\xa1\xea\xff\x68\x9c\xe3\xff\x69\x9a\xde\
\xff\x63\x94\xd2\xff\x5d\x8f\xc4\xff\x5c\x8d\xb9\xff\x54\x84\xa8\
\xff\x84\xb3\xcf\xff\x72\xa3\xb9\xff\x9a\xc8\xd9\xff\xa1\xc9\xd5\
\xff\x84\xa5\xae\xff\x7a\x91\x99\xff\xbf\xcf\xd5\xff\xbc\xc2\xc9\
\xff\xc7\xca\xce\xff\x80\x00\xff\x02\x80\x00\xff\x02\x00\x66\x99\
\x05\x00\xaa\xff\x03\x00\x66\x66\x05\x00\xff\xff\x02\x00\x00\x00\
\x04\x00\x00\x00\x01\xff\xff\x00\x01\x17\x2e\x00\x0b\x55\x1c\x00\
\x09\xff\xff\x00\x01\x80\xff\x00\x02\x00\x99\x33\x05\x00\x66\x66\
\x05\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\x00\xaa\xaa\
\x03\x00\x66\x66\x05\x00\x00\x00\x04\x00\x00\x00\x05\xff\x00\x00\
\x01\xff\x00\x00\x01\x4c\x00\x33\x0a\x20\x00\x40\x08\x00\xff\xff\
\x04\x00\xff\xff\x05\x00\x7f\xcc\x0a\x00\xff\xff\x05\x00\x8e\xaa\
\x09\x00\xff\xff\x05\x3f\x8e\xb0\x50\xcf\xd8\xdc\xff\xc4\xd0\xd4\
\xff\xbd\xcd\xd3\xff\xa1\xb7\xbc\xff\x91\xad\xb4\xff\x91\xb5\xbd\
\xff\x9c\xc3\xd1\xff\x73\x9e\xb3\xff\x8f\xbe\xda\xff\x6a\x9b\xc1\
\xff\x60\x92\xc0\xff\x5c\x8f\xc7\xff\x65\x98\xd7\xff\x65\x96\xe0\
\xff\x6a\x9b\xe9\xff\x6d\x9e\xee\xff\x6e\xa0\xf2\xff\x71\xa4\xf4\
\xff\x73\xa9\xf6\xff\x76\xad\xf6\xff\x7a\xb3\xf8\xff\x7c\xb6\xf7\
\xff\x7e\xba\xf6\xff\x82\xbc\xf7\xff\x82\xbd\xf5\xff\x84\xbd\xf5\
\xff\x84\xbd\xf5\xff\x85\xbc\xf9\xff\x84\xbd\xfb\xff\x81\xbb\xfc\
\xff\x7f\xb9\xfa\xff\x7c\xb6\xf7\xff\x79\xb4\xf2\xff\x76\xb1\xef\
\xff\x73\xae\xec\xff\x73\xab\xec\xff\x75\xad\xee\xff\x78\xb0\xf1\
\xff\x7b\xb3\xf4\xff\x7f\xb7\xf8\xff\x80\xb7\xfa\xff\x7d\xb4\xf7\
\xff\x77\xae\xf3\xff\x73\xa8\xf1\xff\x6f\xa4\xed\xff\x6d\xa0\xe9\
\xff\x69\x9d\xe3\xff\x6a\x9c\xde\xff\x62\x96\xd2\xff\x5d\x8f\xc3\
\xff\x5b\x8e\xb9\xff\x56\x88\xac\xff\x82\xb4\xd0\xff\x73\xa4\xba\
\xff\x94\xc0\xd1\xff\x9f\xc5\xd1\xff\x80\x9f\xa8\xff\x8c\xa3\xab\
\xff\xb8\xc8\xcf\xff\xc4\xcc\xd3\xff\xca\xce\xd3\xff\x00\x60\xdf\
\x08\x00\xcc\xff\x05\x00\xb6\xdb\x07\x00\xd5\xff\x06\x00\xaa\xd5\
\x06\x00\xff\xff\x04\x00\xff\xff\x02\x00\x00\x00\x02\x00\x00\x00\
\x01\xff\xff\x00\x01\xff\xff\x00\x01\x33\x66\x00\x05\x80\xff\x00\
\x02\x00\xff\x55\x03\x00\xaa\xaa\x03\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xd5\xaa\x06\x00\xff\xff\x02\x00\x00\x00\
\x04\x00\x00\x00\x01\x17\x00\x2e\x0b\x11\x00\x22\x0f\xe5\xe4\xe8\
\xff\xd0\xd3\xd7\xff\xc0\xc7\xca\xff\xb4\xbd\xc0\xff\xb0\xbb\xbf\
\xff\xac\xb8\xba\xff\xab\xb7\xb9\xff\xaf\xba\xbe\xff\xab\xb5\xbc\
\xff\x9a\xa8\xae\xff\x92\xa5\xaa\xff\x8e\xa4\xa9\xff\x83\x9d\xa4\
\xff\x7f\x9e\xa7\xff\x6f\x93\x9d\xff\x84\xb0\xbd\xff\x6e\x9d\xb2\
\xff\x8e\xc1\xdb\xff\x7e\xb2\xd6\xff\x5c\x91\xbc\xff\x5f\x95\xca\
\xff\x61\x97\xd4\xff\x64\x98\xdf\xff\x69\x9b\xe7\xff\x6d\x9e\xee\
\xff\x6e\xa1\xf1\xff\x70\xa3\xf3\xff\x71\xa7\xf4\xff\x75\xac\xf7\
\xff\x78\xb0\xf7\xff\x79\xb3\xf4\xff\x7d\xb9\xf5\xff\x80\xba\xf5\
\xff\x81\xbc\xf4\xff\x83\xbc\xf4\xff\x84\xbd\xf5\xff\x83\xbc\xf9\
\xff\x84\xbd\xfb\xff\x80\xba\xfb\xff\x7e\xb8\xf9\xff\x7c\xb6\xf7\
\xff\x78\xb2\xf3\xff\x75\xaf\xf0\xff\x72\xac\xed\xff\x72\xaa\xeb\
\xff\x76\xae\xef\xff\x78\xb0\xf1\xff\x7b\xb2\xf5\xff\x7e\xb5\xfa\
\xff\x7f\xb6\xfb\xff\x7c\xb3\xf8\xff\x77\xad\xf4\xff\x73\xa7\xf3\
\xff\x6f\xa3\xef\xff\x6d\x9f\xeb\xff\x6a\x9e\xe5\xff\x69\x9b\xdd\
\xff\x61\x95\xd1\xff\x5e\x90\xc4\xff\x5e\x91\xbc\xff\x61\x95\xb9\
\xff\x89\xbe\xd9\xff\x78\xab\xbf\xff\x89\xb8\xc6\xff\x93\xba\xc8\
\xff\x6e\x8e\x99\xff\x88\xa1\xab\xff\x93\xa6\xad\xff\xab\xb6\xbe\
\xff\xbd\xc5\xcc\xff\xdc\xe2\xe7\xff\xe1\xe8\xeb\xff\xdc\xe5\xe8\
\xff\xdf\xeb\xeb\xff\xe1\xeb\xeb\xff\x9d\xc4\xc4\xb1\x00\x55\x6a\
\x0c\x00\xff\xff\x02\x00\x00\x00\x06\xff\x00\x00\x02\xff\x00\x00\
\x01\x1c\x39\x00\x09\x40\x80\x00\x04\x00\xff\x55\x03\x00\x00\x00\
\x0a\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xd5\xaa\
\x06\x00\xff\xcc\x05\x00\x66\x66\x05\x12\x00\x24\x0e\xc4\xc3\xc5\
\xff\xb1\xb3\xb4\xff\xb0\xb3\xb7\xff\xa6\xad\xb0\xff\x8d\x98\x9c\
\xff\x70\x7f\x82\xff\x69\x79\x7f\xff\x6a\x7a\x80\xff\x6c\x7c\x82\
\xff\x6c\x7c\x82\xff\x6b\x7d\x84\xff\x69\x7c\x84\xff\x63\x7b\x81\
\xff\x5f\x7b\x82\xff\x64\x81\x8a\xff\x5f\x82\x8c\xff\x5f\x87\x93\
\xff\x74\xa1\xaf\xff\x82\xb4\xc8\xff\x8e\xc4\xdd\xff\x96\xcc\xed\
\xff\x60\x98\xc1\xff\x5e\x95\xc8\xff\x61\x97\xd4\xff\x64\x98\xde\
\xff\x6b\x9e\xe7\xff\x6e\x9f\xed\xff\x6e\xa1\xf1\xff\x70\xa3\xf3\
\xff\x72\xa8\xf5\xff\x74\xab\xf6\xff\x76\xae\xf5\xff\x78\xb2\xf4\
\xff\x79\xb4\xf2\xff\x7e\xb7\xf4\xff\x7e\xb8\xf3\xff\x80\xb9\xf1\
\xff\x81\xba\xf2\xff\x80\xb9\xf6\xff\x7f\xb8\xf6\xff\x7c\xb6\xf8\
\xff\x7b\xb5\xf7\xff\x79\xb3\xf4\xff\x78\xb2\xf3\xff\x77\xb1\xf2\
\xff\x77\xb1\xf2\xff\x79\xb1\xf2\xff\x7e\xb6\xf7\xff\x7e\xb5\xf8\
\xff\x80\xb7\xfc\xff\x81\xb7\xfe\xff\x80\xb5\xfe\xff\x7c\xb1\xfa\
\xff\x77\xac\xf5\xff\x73\xa7\xf4\xff\x70\xa4\xf1\xff\x6d\x9e\xec\
\xff\x69\x9c\xe5\xff\x66\x97\xdb\xff\x61\x95\xd1\xff\x5f\x94\xc7\
\xff\x62\x97\xc2\xff\x73\xa9\xcc\xff\x87\xbc\xd7\xff\x7b\xae\xc2\
\xff\x73\xa4\xb2\xff\x75\x9f\xac\xff\x56\x78\x85\xff\x62\x7f\x88\
\xff\x5e\x75\x7d\xff\x6f\x80\x89\xff\x5b\x68\x70\xff\x6d\x79\x7f\
\xff\x72\x7e\x82\xff\x74\x83\x86\xff\x7f\x8e\x90\xff\x89\x98\x9a\
\xff\x9b\xa7\xa7\xff\xae\xb6\xb6\xff\xb4\xb8\xb9\xff\xc6\xc8\xc9\
\xff\x7e\x7e\x7e\x91\xff\xff\xff\x00\xff\xff\xff\x00\x00\x55\x55\
\x06\x00\x46\x17\x0b\x00\x00\x00\x04\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xd5\xaa\x06\x00\xff\xcc\x05\xbb\xc2\xc5\
\xd0\xb0\xb2\xb3\xff\xb5\xb7\xb8\xff\xaf\xb3\xb4\xff\x92\x99\x9c\
\xff\x71\x7c\x80\xff\x64\x77\x7c\xff\x68\x7e\x83\xff\x6c\x84\x8a\
\xff\x68\x80\x86\xff\x67\x81\x87\xff\x63\x7d\x84\xff\x62\x7b\x85\
\xff\x62\x7d\x87\xff\x5c\x79\x82\xff\x57\x78\x81\xff\x5f\x82\x8c\
\xff\x62\x8b\x94\xff\x75\xa3\xae\xff\x80\xb1\xbf\xff\x95\xca\xde\
\xff\x94\xcc\xe5\xff\x9b\xd4\xf3\xff\x73\xae\xd5\xff\x5b\x95\xc6\
\xff\x62\x98\xd4\xff\x65\x9a\xdd\xff\x6b\x9e\xe7\xff\x6e\x9f\xed\
\xff\x6f\xa3\xf0\xff\x71\xa4\xf4\xff\x72\xa8\xf5\xff\x74\xab\xf6\
\xff\x76\xae\xf5\xff\x78\xb2\xf4\xff\x7b\xb6\xf4\xff\x7e\xb7\xf4\
\xff\x7e\xb8\xf3\xff\x7d\xb7\xf2\xff\x7c\xb6\xf1\xff\x7d\xb6\xf3\
\xff\x7b\xb4\xf2\xff\x78\xb2\xf4\xff\x77\xb1\xf3\xff\x76\xb0\xf1\
\xff\x77\xb1\xf2\xff\x78\xb2\xf3\xff\x79\xb3\xf4\xff\x7c\xb3\xf6\
\xff\x80\xb7\xfa\xff\x80\xb7\xfc\xff\x80\xb7\xfc\xff\x7f\xb4\xfd\
\xff\x7e\xb2\xfe\xff\x7a\xae\xfa\xff\x76\xaa\xf7\xff\x72\xa5\xf5\
\xff\x72\xa5\xf5\xff\x6d\x9e\xec\xff\x6b\x9e\xe7\xff\x63\x94\xd8\
\xff\x60\x94\xd0\xff\x62\x97\xc9\xff\x69\x9f\xc8\xff\x88\xbe\xdf\
\xff\x95\xcd\xe6\xff\x91\xc6\xda\xff\x7d\xb1\xbe\xff\x7c\xa8\xb5\
\xff\x6d\x93\x9f\xff\x61\x81\x8c\xff\x63\x7e\x88\xff\x63\x77\x82\
\xff\x6f\x82\x8a\xff\x6e\x82\x87\xff\x6c\x80\x85\xff\x68\x7c\x81\
\xff\x68\x7d\x7f\xff\x66\x7b\x7c\xff\x6c\x7e\x7f\xff\x7c\x88\x8a\
\xff\xaa\xb2\xb2\xff\xab\xaf\xb0\xff\xac\xae\xaf\xff\xba\xb9\xbb\
\xff\xe7\xe7\xe7\xff\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xd5\xaa\
\x06\x00\x8e\x71\x09\xcb\xcc\xd0\xff\xb5\xb8\xbc\xff\xb9\xbd\xbe\
\xff\x95\x9c\x9f\xff\x75\x80\x84\xff\x70\x80\x86\xff\x73\x8c\x90\
\xff\x75\x92\x97\xff\x68\x85\x8c\xff\x6e\x8e\x94\xff\x6f\x8f\x95\
\xff\x6b\x8c\x95\xff\x6d\x8b\x96\xff\x6f\x8f\x9a\xff\x6e\x91\x9b\
\xff\x6f\x93\x9d\xff\x72\x9a\xa6\xff\x7c\xa9\xb4\xff\x8d\xbf\xcb\
\xff\x93\xc8\xd5\xff\x93\xcb\xde\xff\x99\xd2\xeb\xff\x8f\xcb\xe8\
\xff\x8f\xcb\xef\xff\x62\x9d\xcb\xff\x61\x98\xd1\xff\x64\x9a\xdb\
\xff\x6a\x9e\xe5\xff\x6e\xa0\xec\xff\x6e\xa2\xef\xff\x70\xa3\xf3\
\xff\x70\xa6\xf3\xff\x73\xaa\xf5\xff\x76\xae\xf5\xff\x79\xb3\xf5\
\xff\x7c\xb7\xf5\xff\x80\xb9\xf6\xff\x80\xba\xf5\xff\x7e\xb8\xf3\
\xff\x7d\xb7\xf2\xff\x7c\xb5\xf2\xff\x7a\xb3\xf1\xff\x78\xb2\xf3\
\xff\x77\xb1\xf2\xff\x77\xb1\xf2\xff\x77\xb2\xf0\xff\x77\xb1\xf2\
\xff\x77\xb1\xf2\xff\x79\xb1\xf2\xff\x7a\xb1\xf4\xff\x7a\xb1\xf6\
\xff\x79\xaf\xf6\xff\x78\xad\xf6\xff\x79\xad\xf9\xff\x77\xab\xf8\
\xff\x74\xa8\xf5\xff\x71\xa4\xf4\xff\x70\xa3\xf3\xff\x6b\x9c\xea\
\xff\x6c\x9f\xe8\xff\x63\x95\xd7\xff\x61\x96\xcf\xff\x64\x99\xcb\
\xff\x6a\xa3\xca\xff\x95\xce\xee\xff\x96\xcf\xe8\xff\x95\xcd\xe0\
\xff\x87\xbc\xc9\xff\x82\xb1\xbf\xff\x80\xaa\xb7\xff\x6a\x8c\x99\
\xff\x70\x8e\x99\xff\x63\x7d\x89\xff\x67\x80\x8a\xff\x62\x7c\x82\
\xff\x65\x82\x87\xff\x68\x85\x8a\xff\x67\x84\x88\xff\x6b\x87\x88\
\xff\x6b\x82\x84\xff\x76\x87\x8a\xff\x77\x83\x85\xff\x9d\xa4\xa7\
\xff\xbe\xc2\xc3\xff\xb4\xb6\xb7\xff\xc8\xc7\xc9\xff\x7e\x8c\x8c\
\x91\x00\x24\x24\x0e\x00\x00\x00\x04\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x04\x00\x5b\x49\x0e\xb2\xb5\xb9\
\xff\xb5\xb8\xbc\xff\xb7\xbe\xc1\xff\x85\x91\x93\xff\x7c\x8b\x8e\
\xff\x82\x98\x9d\xff\x74\x92\x97\xff\x7d\x9f\xa5\xff\x7e\xa2\xa8\
\xff\x76\x9c\xa1\xff\x78\x9d\xa5\xff\x79\xa0\xa8\xff\x7e\xa2\xac\
\xff\x7f\xa5\xb1\xff\x85\xad\xb9\xff\x8a\xb4\xc0\xff\x89\xb5\xc2\
\xff\x8c\xbc\xc8\xff\x93\xc7\xd3\xff\x94\xcb\xd8\xff\x8e\xc9\xd9\
\xff\x95\xd1\xe7\xff\x8f\xcc\xe6\xff\x98\xd5\xf7\xff\x64\x9f\xcc\
\xff\x60\x98\xcf\xff\x63\x99\xd8\xff\x69\x9d\xe3\xff\x6d\xa0\xe9\
\xff\x6d\xa1\xed\xff\x6e\xa2\xef\xff\x6f\xa5\xf2\xff\x71\xa8\xf3\
\xff\x74\xac\xf3\xff\x77\xb1\xf3\xff\x7c\xb7\xf5\xff\x81\xba\xf7\
\xff\x81\xba\xf7\xff\x7f\xb9\xf4\xff\x7d\xb7\xf2\xff\x7c\xb5\xf2\
\xff\x7a\xb3\xf1\xff\x7a\xb5\xf3\xff\x7b\xb6\xf4\xff\x7c\xb7\xf5\
\xff\x7c\xb8\xf4\xff\x79\xb4\xf2\xff\x75\xb0\xee\xff\x75\xad\xee\
\xff\x76\xad\xf0\xff\x76\xad\xf2\xff\x75\xab\xf2\xff\x75\xaa\xf3\
\xff\x75\xa9\xf5\xff\x74\xa8\xf5\xff\x71\xa5\xf2\xff\x70\xa3\xf3\
\xff\x6d\xa0\xf0\xff\x6a\x9c\xe8\xff\x6e\xa2\xe9\xff\x64\x97\xd6\
\xff\x62\x98\xcf\xff\x65\x9b\xca\xff\x69\xa3\xc7\xff\x99\xd5\xf3\
\xff\x9c\xd8\xee\xff\x98\xd3\xe3\xff\x98\xcf\xdc\xff\x95\xc6\xd4\
\xff\x94\xc1\xce\xff\x89\xaf\xbb\xff\x86\xa8\xb5\xff\x7c\x9c\xa9\
\xff\x7b\x9b\xa6\xff\x70\x91\x9a\xff\x77\x99\x9f\xff\x75\x97\x9d\
\xff\x72\x94\x9a\xff\x77\x97\x9c\xff\x70\x8d\x91\xff\x7a\x8e\x93\
\xff\x79\x88\x8b\xff\x88\x91\x94\xff\xb0\xb5\xb8\xff\xab\xaf\xb0\
\xff\xb2\xb4\xb5\xff\xe8\xe7\xe9\xff\x80\x00\xff\x02\x00\x00\x00\
\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x04\x00\x6a\x55\x0c\xab\xaf\xb0\xff\xba\xbf\xc2\xff\xc3\xca\xcd\
\xff\x87\x93\x97\xff\x85\x98\x9d\xff\x96\xb0\xb6\xff\x7e\xa0\xa6\
\xff\x83\xab\xb0\xff\x7e\xa8\xaf\xff\x87\xb1\xb8\xff\x85\xb1\xb8\
\xff\x89\xb4\xbd\xff\x8a\xb4\xc0\xff\x88\xb2\xbf\xff\x8c\xb8\xc5\
\xff\x92\xbf\xcd\xff\x8c\xbb\xc9\xff\x90\xc1\xcf\xff\x92\xc7\xd4\
\xff\x8d\xc6\xd5\xff\x95\xcf\xe2\xff\x90\xcc\xe2\xff\x97\xd4\xee\
\xff\x8e\xcb\xeb\xff\x61\x9b\xc5\xff\x5e\x97\xcb\xff\x62\x99\xd6\
\xff\x68\x9d\xe0\xff\x6c\xa0\xe7\xff\x6c\xa0\xec\xff\x6d\xa1\xed\
\xff\x6d\xa4\xef\xff\x6f\xa6\xf1\xff\x70\xa8\xef\xff\x75\xaf\xf1\
\xff\x7b\xb5\xf6\xff\x81\xba\xf8\xff\x81\xba\xf7\xff\x7f\xb8\xf5\
\xff\x7d\xb7\xf2\xff\x7b\xb4\xf1\xff\x79\xb2\xef\xff\x7b\xb7\xf3\
\xff\x7f\xbb\xf7\xff\x82\xbe\xfa\xff\x81\xbd\xf8\xff\x7c\xb8\xf4\
\xff\x76\xb2\xee\xff\x74\xad\xeb\xff\x78\xb0\xf1\xff\x77\xae\xf1\
\xff\x74\xab\xf0\xff\x75\xab\xf2\xff\x75\xaa\xf3\xff\x73\xa7\xf3\
\xff\x71\xa5\xf2\xff\x6f\xa3\xf0\xff\x6b\x9f\xeb\xff\x69\x9c\xe5\
\xff\x70\xa5\xe8\xff\x66\x99\xd7\xff\x65\x9b\xd0\xff\x65\x9c\xc9\
\xff\x66\xa0\xc4\xff\x9a\xd6\xf3\xff\x96\xd2\xe8\xff\x8d\xc8\xdb\
\xff\x98\xd3\xe2\xff\x98\xcd\xda\xff\x95\xc7\xd3\xff\x9a\xc6\xd3\
\xff\x8e\xb5\xc3\xff\x8d\xb2\xc0\xff\x89\xaf\xbb\xff\x7a\xa0\xac\
\xff\x7d\xa6\xaf\xff\x77\xa0\xa9\xff\x76\x9d\xa5\xff\x7f\xa5\xaa\
\xff\x7b\x9e\xa2\xff\x85\x9d\xa3\xff\x8c\x9f\xa4\xff\x79\x85\x89\
\xff\xa9\xb0\xb3\xff\xb9\xbc\xc0\xff\xb2\xb4\xb5\xff\xdd\xdc\xde\
\xff\x33\x00\x66\x05\x00\x00\x00\x06\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xbf\x40\x04\x20\x3c\x3c\x30\xac\xb1\xb0\
\xff\xc4\xc9\xca\xff\xb0\xb9\xbc\xff\x85\x94\x97\xff\x8e\xa1\xa8\
\xff\x8c\xa8\xaf\xff\x86\xab\xb3\xff\x82\xae\xb5\xff\x85\xb3\xbb\
\xff\x83\xb2\xba\xff\x87\xb6\xbe\xff\x8f\xbe\xc6\xff\x92\xbf\xcc\
\xff\x91\xbe\xcc\xff\x8d\xbb\xcc\xff\x8c\xbd\xcd\xff\x8c\xbd\xcd\
\xff\x8a\xbd\xcd\xff\x89\xbf\xd0\xff\x8b\xc3\xd6\xff\x90\xc9\xde\
\xff\x95\xd1\xe7\xff\x8e\xcb\xe5\xff\x73\xb0\xd0\xff\x5a\x95\xbd\
\xff\x5c\x96\xc7\xff\x5e\x97\xcf\xff\x64\x9a\xd9\xff\x6b\x9f\xe5\
\xff\x6c\xa1\xea\xff\x6c\xa0\xec\xff\x6c\xa3\xee\xff\x6e\xa5\xf0\
\xff\x6f\xa6\xef\xff\x75\xae\xf3\xff\x7d\xb7\xf8\xff\x83\xbc\xfa\
\xff\x80\xb9\xf6\xff\x7d\xb6\xf3\xff\x7d\xb6\xf3\xff\x81\xba\xf7\
\xff\x79\xb2\xef\xff\x81\xbd\xf8\xff\x85\xc2\xfa\xff\x83\xc0\xf8\
\xff\x85\xc2\xfa\xff\x81\xbd\xf8\xff\x78\xb4\xf0\xff\x78\xb1\xee\
\xff\x7a\xb3\xf1\xff\x78\xb0\xf1\xff\x78\xaf\xf2\xff\x77\xae\xf3\
\xff\x76\xac\xf3\xff\x75\xaa\xf3\xff\x71\xa5\xf1\xff\x6e\xa2\xee\
\xff\x6b\xa0\xe9\xff\x63\x97\xdd\xff\x63\x99\xd8\xff\x6b\xa0\xd9\
\xff\x61\x98\xcb\xff\x55\x8c\xb7\xff\x63\x9d\xc1\xff\x80\xbc\xd9\
\xff\x8e\xcd\xe2\xff\x8d\xcb\xdd\xff\x8c\xc9\xd7\xff\x8f\xc8\xd7\
\xff\x94\xc9\xd6\xff\x99\xc8\xd6\xff\x9a\xc5\xd4\xff\x99\xc2\xd1\
\xff\x93\xbc\xcb\xff\x8d\xba\xc8\xff\x85\xb2\xbf\xff\x7e\xab\xb8\
\xff\x7d\xaa\xb5\xff\x7f\xaa\xb3\xff\x7f\xa4\xac\xff\x7e\x9b\xa2\
\xff\x81\x97\x9d\xff\x83\x91\x97\xff\x8c\x95\x99\xff\xc8\xcd\xd0\
\xff\xb1\xb2\xb6\xff\xcf\xce\xd0\xff\x80\x00\xff\x02\xff\x00\x00\
\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\x55\
\x03\x00\x4c\x19\x0a\xbc\xc1\xc0\xff\xbb\xc0\xc1\xff\xb7\xc0\xc3\
\xff\x93\xa2\xa5\xff\x91\xa7\xad\xff\x8f\xae\xb7\xff\x8f\xb6\xbf\
\xff\x88\xb7\xbf\xff\x8a\xbb\xc5\xff\x89\xbd\xc4\xff\x8c\xbf\xc9\
\xff\x8f\xc2\xcc\xff\x8b\xbc\xca\xff\x84\xb4\xc6\xff\x80\xaf\xc4\
\xff\x7f\xb1\xc5\xff\x81\xb4\xc8\xff\x83\xb8\xcd\xff\x8c\xc3\xd8\
\xff\x92\xcb\xe1\xff\x8b\xc4\xdd\xff\x7a\xb5\xcf\xff\x67\xa3\xc1\
\xff\x5c\x96\xb9\xff\x54\x8e\xb8\xff\x63\x9d\xce\xff\x63\x9b\xd2\
\xff\x64\x9b\xd8\xff\x66\x9b\xde\xff\x65\x9b\xe2\xff\x66\x9a\xe6\
\xff\x67\x9e\xe9\xff\x6c\xa3\xee\xff\x71\xa8\xf1\xff\x73\xac\xf1\
\xff\x77\xb1\xf3\xff\x7c\xb4\xf5\xff\x7e\xb7\xf5\xff\x7e\xb7\xf5\
\xff\x7f\xb8\xf5\xff\x80\xb9\xf6\xff\x7d\xb6\xf3\xff\x84\xc0\xfb\
\xff\x85\xc2\xfa\xff\x83\xc0\xf8\xff\x85\xc2\xfa\xff\x83\xbf\xfa\
\xff\x7d\xb9\xf4\xff\x7d\xb6\xf3\xff\x80\xb9\xf6\xff\x80\xb9\xf7\
\xff\x81\xb9\xfa\xff\x80\xb7\xfa\xff\x7d\xb4\xf9\xff\x78\xae\xf5\
\xff\x71\xa6\xef\xff\x6d\xa3\xea\xff\x67\x9e\xe3\xff\x64\x9a\xdb\
\xff\x66\x9e\xd9\xff\x6a\xa0\xd7\xff\x63\x9b\xcc\xff\x5b\x92\xbd\
\xff\x5e\x98\xbc\xff\x66\xa3\xc3\xff\x81\xc1\xda\xff\x85\xc6\xdb\
\xff\x88\xc7\xdb\xff\x86\xc1\xd4\xff\x81\xb9\xcc\xff\x84\xb6\xca\
\xff\x8c\xbb\xd0\xff\x96\xc4\xd6\xff\x90\xbe\xd0\xff\x8f\xbf\xd1\
\xff\x8c\xbf\xcf\xff\x88\xbc\xc9\xff\x84\xb6\xc2\xff\x82\xb0\xbb\
\xff\x82\xab\xb4\xff\x88\xa9\xb2\xff\x87\xa1\xa8\xff\x88\x9a\xa1\
\xff\x8b\x96\x9a\xff\xc6\xca\xcf\xff\xb4\xb5\xb9\xff\xe0\xdf\xe1\
\xff\x80\x00\xff\x02\x40\x00\x00\x08\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\x55\x03\x00\xff\x55\x03\xd7\xdc\xdb\
\xff\xb9\xbe\xbf\xff\xc2\xce\xd0\xff\xa6\xb6\xbc\xff\x9a\xb1\xb9\
\xff\x9a\xbb\xc4\xff\x9b\xc5\xd1\xff\x96\xc7\xd1\xff\x94\xc8\xd4\
\xff\x8f\xc6\xcf\xff\x8e\xc2\xce\xff\x8b\xbf\xcc\xff\x88\xba\xcc\
\xff\x85\xb7\xcd\xff\x87\xb9\xd0\xff\x8a\xbd\xd7\xff\x8b\xc1\xda\
\xff\x8b\xc0\xdb\xff\x7f\xb5\xd3\xff\x6f\xa6\xc5\xff\x61\x97\xb8\
\xff\x56\x8e\xb1\xff\x54\x8d\xb4\xff\x5f\x97\xc0\xff\x68\xa1\xce\
\xff\x69\xa2\xd5\xff\x66\x9e\xd5\xff\x64\x9b\xd8\xff\x65\x9a\xdd\
\xff\x63\x99\xe0\xff\x64\x98\xe4\xff\x68\x9f\xea\xff\x6d\xa4\xef\
\xff\x74\xab\xf4\xff\x74\xac\xf3\xff\x76\xb0\xf2\xff\x7b\xb3\xf4\
\xff\x80\xb9\xf7\xff\x83\xbc\xfa\xff\x84\xbd\xfb\xff\x85\xbe\xfb\
\xff\x82\xbb\xf8\xff\x85\xc1\xfc\xff\x83\xbf\xfa\xff\x82\xbe\xf9\
\xff\x86\xc2\xfd\xff\x85\xc1\xfc\xff\x80\xbc\xf8\xff\x82\xbb\xf8\
\xff\x83\xbc\xf9\xff\x82\xbb\xf9\xff\x81\xb9\xfa\xff\x7f\xb6\xf9\
\xff\x7b\xb2\xf7\xff\x75\xab\xf2\xff\x6f\xa5\xec\xff\x6b\xa2\xe7\
\xff\x63\x9b\xdc\xff\x66\x9e\xd9\xff\x65\x9e\xd5\xff\x66\x9d\xd0\
\xff\x65\x9d\xce\xff\x63\x9c\xc9\xff\x61\x9c\xc4\xff\x5a\x98\xbc\
\xff\x53\x92\xb4\xff\x67\xa7\xc5\xff\x80\xbf\xdb\xff\x8a\xc7\xe1\
\xff\x88\xc1\xda\xff\x83\xb9\xd2\xff\x87\xb8\xd2\xff\x8b\xbb\xd3\
\xff\x8d\xbe\xd4\xff\x8a\xbc\xd2\xff\x8a\xbf\xd3\xff\x8c\xc2\xd3\
\xff\x8b\xbf\xcf\xff\x89\xba\xc8\xff\x8b\xb7\xc4\xff\x94\xb8\xc2\
\xff\x93\xb0\xb9\xff\x99\xac\xb3\xff\x98\xa4\xa8\xff\xc5\xcb\xd0\
\xff\xbe\xbf\xc3\xff\x1c\x00\x39\x09\x80\x00\x00\x04\x66\x00\x00\
\x05\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x99\x33\
\x05\x00\xbf\x40\x04\x20\x3c\x3c\x30\xb3\xb8\xb9\xff\xbb\xc7\xc9\
\xff\xa7\xb7\xbd\xff\x93\xad\xb4\xff\x98\xbb\xc5\xff\x97\xc3\xd0\
\xff\x8d\xc1\xce\xff\x8e\xc5\xd2\xff\x8c\xc6\xd2\xff\x8c\xc3\xd0\
\xff\x8a\xc0\xd1\xff\x8c\xc1\xd6\xff\x90\xc3\xdd\xff\x8e\xc2\xe0\
\xff\x88\xbe\xdd\xff\x84\xb9\xda\xff\x73\xa9\xcc\xff\x63\x9a\xbf\
\xff\x5a\x91\xb8\xff\x5d\x95\xbe\xff\x67\x9e\xc9\xff\x6a\xa0\xcf\
\xff\x69\xa1\xd2\xff\x6a\xa1\xd4\xff\x67\xa0\xd4\xff\x64\x9d\xd4\
\xff\x63\x9b\xd6\xff\x65\x9a\xdd\xff\x66\x9c\xe3\xff\x67\x9e\xe7\
\xff\x6c\xa3\xee\xff\x71\xa8\xf3\xff\x74\xab\xf4\xff\x76\xae\xf5\
\xff\x79\xb3\xf5\xff\x7e\xb5\xf8\xff\x82\xba\xfb\xff\x84\xbd\xfb\
\xff\x83\xbc\xfa\xff\x81\xba\xf8\xff\x7f\xb8\xf5\xff\x81\xbd\xf9\
\xff\x7f\xbb\xf7\xff\x7e\xba\xf6\xff\x83\xbf\xfb\xff\x82\xbe\xfa\
\xff\x7d\xb8\xf6\xff\x80\xb9\xf7\xff\x7b\xb4\xf2\xff\x7a\xb2\xf3\
\xff\x76\xae\xef\xff\x73\xaa\xef\xff\x70\xa7\xec\xff\x6d\xa3\xea\
\xff\x69\xa0\xe5\xff\x66\x9d\xe0\xff\x62\x9b\xd8\xff\x62\x9b\xd2\
\xff\x5f\x9a\xcc\xff\x60\x97\xca\xff\x65\x9d\xce\xff\x6c\xa4\xd5\
\xff\x6e\xa6\xd5\xff\x68\xa3\xd0\xff\x68\xa6\xcf\xff\x5f\x9e\xc4\
\xff\x5a\x9a\xbd\xff\x6a\xa7\xc9\xff\x82\xbd\xdd\xff\x91\xc7\xe8\
\xff\x8f\xc2\xe2\xff\x86\xb8\xd6\xff\x8e\xc3\xde\xff\x8c\xc2\xdb\
\xff\x86\xbf\xd5\xff\x85\xbe\xd3\xff\x8a\xc2\xd5\xff\x91\xc5\xd5\
\xff\x93\xc2\xd0\xff\x94\xbc\xc8\xff\x9a\xb9\xc2\xff\x9f\xb7\xbd\
\xff\xa6\xb4\xba\xff\xbd\xc4\xc7\xff\xc9\xcb\xcc\xff\x00\x00\x00\
\x01\xaa\x00\x00\x03\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\x33\x66\x00\x05\x00\xff\x80\x02\x00\x80\x80\
\x04\xbb\xc0\xc1\xff\xb2\xbe\xc0\xff\x9e\xae\xb4\xff\x8e\xa7\xb1\
\xff\x97\xb9\xc6\xff\x93\xc0\xce\xff\x89\xbd\xcd\xff\x8d\xc6\xd5\
\xff\x89\xc4\xd4\xff\x8c\xc7\xd7\xff\x8f\xc8\xdd\xff\x8c\xc3\xde\
\xff\x84\xb9\xda\xff\x78\xad\xd2\xff\x68\x9e\xc7\xff\x60\x95\xc0\
\xff\x61\x98\xc5\xff\x63\x9c\xc9\xff\x6e\xa6\xd5\xff\x7b\xb3\xe4\
\xff\x7e\xb5\xe8\xff\x75\xab\xe2\xff\x69\xa0\xd9\xff\x65\x9d\xd4\
\xff\x60\x98\xcf\xff\x5e\x97\xce\xff\x61\x9a\xd2\xff\x64\x9d\xdb\
\xff\x69\xa0\xe5\xff\x6a\xa1\xea\xff\x6d\xa4\xef\xff\x70\xa7\xf2\
\xff\x70\xa7\xf0\xff\x77\xaf\xf6\xff\x7c\xb6\xf8\xff\x7c\xb6\xf8\
\xff\x7b\xb5\xf6\xff\x7a\xb3\xf1\xff\x76\xaf\xed\xff\x71\xaa\xe8\
\xff\x74\xac\xed\xff\x76\xb0\xf1\xff\x76\xb0\xf1\xff\x77\xb1\xf2\
\xff\x7c\xb6\xf7\xff\x78\xb2\xf3\xff\x73\xad\xee\xff\x77\xaf\xf0\
\xff\x75\xad\xee\xff\x74\xab\xee\xff\x72\xa9\xee\xff\x70\xa7\xec\
\xff\x6e\xa4\xeb\xff\x6a\x9f\xe8\xff\x66\x9c\xe3\xff\x65\x9b\xdc\
\xff\x62\x99\xd6\xff\x5c\x95\xcc\xff\x58\x93\xc5\xff\x5d\x93\xc8\
\xff\x63\x99\xce\xff\x68\x9e\xd3\xff\x6f\xa5\xda\xff\x72\xab\xdf\
\xff\x72\xad\xdf\xff\x76\xb1\xe3\xff\x75\xb0\xe1\xff\x6e\xa9\xd6\
\xff\x67\xa1\xcb\xff\x6e\xa5\xcc\xff\x7f\xb4\xd9\xff\x8e\xc2\xe6\
\xff\x8c\xc2\xe3\xff\x92\xca\xe7\xff\x90\xcb\xe5\xff\x8b\xc4\xdd\
\xff\x8d\xc6\xdb\xff\x91\xc9\xda\xff\x93\xc6\xd6\xff\x92\xbc\xc9\
\xff\x9b\xbb\xc6\xff\x98\xaf\xb7\xff\xa5\xb5\xbb\xff\xac\xb5\xb8\
\xff\xd5\xd9\xda\xff\x00\x00\x00\x03\xbf\x40\x00\x04\xff\x80\x00\
\x02\xff\xff\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\x40\x80\x00\
\x04\x80\xff\x00\x02\x00\xaa\xaa\x03\xd4\xd8\xd9\xff\xb6\xbf\xc2\
\xff\xa1\xb1\xb7\xff\x8d\xa6\xb0\xff\x94\xb7\xc4\xff\x92\xbf\xcd\
\xff\x8f\xc3\xd3\xff\x90\xcb\xdb\xff\x8c\xc7\xda\xff\x8d\xc8\xdc\
\xff\x86\xc1\xdb\xff\x79\xb2\xd2\xff\x68\x9f\xc6\xff\x5d\x94\xbf\
\xff\x5d\x95\xc4\xff\x61\x98\xcb\xff\x70\xa6\xdb\xff\x71\xa9\xe0\
\xff\x74\xad\xe4\xff\x78\xb1\xe9\xff\x75\xad\xe8\xff\x6d\xa3\xe2\
\xff\x68\x9e\xdd\xff\x67\x9e\xdb\xff\x5d\x96\xce\xff\x5c\x95\xcc\
\xff\x5f\x98\xcf\xff\x63\x9c\xda\xff\x68\x9f\xe2\xff\x68\x9f\xe8\
\xff\x6b\xa2\xed\xff\x6d\xa4\xef\xff\x75\xac\xf5\xff\x7c\xb4\xfb\
\xff\x80\xb9\xfe\xff\x7b\xb5\xf7\xff\x75\xaf\xf0\xff\x72\xaa\xeb\
\xff\x6b\xa3\xe4\xff\x63\x9b\xdc\xff\x69\xa0\xe3\xff\x6b\xa4\xe9\
\xff\x6e\xa7\xec\xff\x70\xa9\xee\xff\x73\xad\xef\xff\x6d\xa7\xe9\
\xff\x68\xa2\xe4\xff\x6e\xa5\xea\xff\x75\xac\xf1\xff\x75\xac\xf1\
\xff\x76\xac\xf3\xff\x77\xad\xf4\xff\x75\xaa\xf3\xff\x71\xa6\xef\
\xff\x6b\xa1\xe8\xff\x68\x9d\xe0\xff\x63\x9a\xd7\xff\x5c\x95\xcc\
\xff\x5a\x94\xc8\xff\x5f\x97\xce\xff\x62\x97\xd0\xff\x60\x95\xce\
\xff\x64\x98\xd4\xff\x6b\xa1\xde\xff\x70\xa7\xe4\xff\x6f\xa9\xe4\
\xff\x71\xac\xe4\xff\x76\xaf\xe6\xff\x7a\xb1\xe4\xff\x78\xae\xdd\
\xff\x71\xa6\xd1\xff\x6b\xa1\xca\xff\x73\xaa\xcf\xff\x81\xbc\xdc\
\xff\x8e\xca\xe8\xff\x90\xcb\xe5\xff\x8e\xc7\xe0\xff\x90\xc8\xdb\
\xff\x92\xc5\xd5\xff\x96\xc0\xcd\xff\x98\xbb\xc5\xff\x8d\xa4\xac\
\xff\x9e\xae\xb4\xff\xab\xb4\xb7\xff\xe7\xec\xeb\xff\xff\xff\x00\
\x01\xff\x80\x00\x02\x60\x20\x00\x08\xff\xff\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\x00\x01\x40\x80\x00\x04\x00\x00\x00\
\x01\xbb\xc8\xcb\xd0\xbb\xc4\xc7\xff\xa4\xb4\xba\xff\x85\x9f\xa6\
\xff\x84\xa8\xb2\xff\x87\xb4\xc2\xff\x8c\xc0\xd1\xff\x87\xc1\xd4\
\xff\x8e\xc9\xdd\xff\x81\xbd\xd5\xff\x6f\xab\xc9\xff\x61\x9a\xc1\
\xff\x5a\x93\xc0\xff\x5e\x97\xca\xff\x6a\xa2\xd9\xff\x70\xa8\xe3\
\xff\x6a\xa2\xdd\xff\x6b\xa4\xe1\xff\x6d\xa6\xe4\xff\x6f\xa8\xe6\
\xff\x70\xa8\xe9\xff\x6c\xa5\xea\xff\x65\x9e\xe3\xff\x60\x98\xd9\
\xff\x64\x9c\xd7\xff\x60\x99\xd0\xff\x60\x99\xd0\xff\x62\x9b\xd9\
\xff\x68\x9f\xe2\xff\x6a\xa1\xea\xff\x6e\xa5\xf0\xff\x72\xa9\xf4\
\xff\x7e\xb5\xfe\xff\x7d\xb8\xfe\xff\x7c\xb7\xfc\xff\x77\xb1\xf3\
\xff\x73\xad\xee\xff\x72\xaa\xeb\xff\x6c\xa4\xe5\xff\x62\x99\xde\
\xff\x65\x9c\xe1\xff\x69\xa1\xe8\xff\x6c\xa4\xeb\xff\x6d\xa5\xec\
\xff\x6e\xa7\xec\xff\x65\xa0\xe5\xff\x62\x9d\xe2\xff\x6a\xa2\xe9\
\xff\x72\xaa\xf1\xff\x72\xaa\xf1\xff\x72\xa9\xf2\xff\x75\xaa\xf3\
\xff\x76\xaa\xf6\xff\x74\xab\xf6\xff\x71\xa9\xf0\xff\x71\xa8\xeb\
\xff\x68\xa1\xde\xff\x64\xa0\xd6\xff\x64\x9d\xd4\xff\x67\x9d\xd9\
\xff\x65\x9b\xd8\xff\x63\x96\xd5\xff\x62\x94\xd6\xff\x65\x98\xde\
\xff\x64\x98\xde\xff\x66\x9d\xe0\xff\x6b\xa3\xe4\xff\x6f\xa5\xe4\
\xff\x6e\xa4\xe0\xff\x6e\xa4\xdb\xff\x6e\xa3\xd6\xff\x6f\xa5\xd4\
\xff\x61\x98\xc3\xff\x69\xa2\xc9\xff\x77\xb4\xd6\xff\x88\xc4\xe2\
\xff\x8f\xc8\xe2\xff\x8c\xc3\xd8\xff\x8d\xbd\xcf\xff\x93\xbd\xca\
\xff\x8d\xb0\xba\xff\x89\xa1\xa7\xff\xa0\xb1\xb4\xff\xc1\xca\xcd\
\xff\x00\xb6\x92\x07\x55\xff\x00\x03\xbf\xff\x00\x04\x80\x2a\x00\
\x06\xff\xff\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\x00\
\x01\x33\x66\x00\x05\x00\x00\x00\x01\x00\xff\xff\x02\xcb\xd2\xd5\
\xff\xb2\xc1\xc4\xff\x89\xa1\xa7\xff\x80\xa1\xaa\xff\x87\xb1\xbd\
\xff\x8f\xc0\xce\xff\x89\xbd\xce\xff\x89\xc2\xd8\xff\x77\xaf\xcc\
\xff\x61\x99\xbc\xff\x5c\x93\xbe\xff\x64\x9b\xce\xff\x6a\xa1\xda\
\xff\x66\x9d\xda\xff\x5e\x97\xd5\xff\x63\x9b\xdc\xff\x67\xa2\xe0\
\xff\x69\xa4\xe2\xff\x65\x9f\xe0\xff\x64\x9e\xe0\xff\x67\xa2\xe8\
\xff\x69\xa4\xea\xff\x6d\xa5\xe6\xff\x6a\xa3\xe0\xff\x66\x9f\xd7\
\xff\x63\x9c\xd4\xff\x63\x9c\xda\xff\x67\xa1\xe3\xff\x6e\xa6\xed\
\xff\x75\xac\xf7\xff\x7b\xb2\xfd\xff\x7c\xb5\xff\xff\x7c\xb6\xfe\
\xff\x76\xb1\xf7\xff\x70\xa9\xee\xff\x70\xaa\xec\xff\x74\xab\xee\
\xff\x6f\xa6\xe9\xff\x66\x9d\xe2\xff\x67\x9e\xe3\xff\x6b\xa3\xea\
\xff\x6d\xa6\xeb\xff\x6e\xa7\xec\xff\x6b\xa6\xeb\xff\x64\x9f\xe4\
\xff\x62\x9d\xe3\xff\x6b\xa6\xec\xff\x6e\xa8\xf0\xff\x6b\xa5\xed\
\xff\x68\xa2\xea\xff\x6a\xa1\xea\xff\x6d\xa4\xed\xff\x71\xa8\xf1\
\xff\x76\xae\xf5\xff\x79\xb3\xf5\xff\x6f\xaa\xe8\xff\x6c\xa8\xe3\
\xff\x6b\xa5\xe0\xff\x6d\xa3\xe2\xff\x6b\xa0\xe3\xff\x6c\x9f\xe5\
\xff\x6a\x9c\xe4\xff\x68\x9a\xe6\xff\x66\x9a\xe6\xff\x64\x99\xe2\
\xff\x61\x97\xde\xff\x64\x98\xde\xff\x66\x9a\xdc\xff\x66\x9c\xd9\
\xff\x63\x99\xd5\xff\x62\x98\xcf\xff\x66\x9d\xd0\xff\x5b\x93\xc2\
\xff\x64\x9f\xc7\xff\x7f\xba\xda\xff\x90\xc7\xe2\xff\x8c\xbf\xd3\
\xff\x86\xb3\xc1\xff\x8c\xb2\xbe\xff\x81\xa0\xa9\xff\x89\x9f\xa5\
\xff\xa8\xb7\xba\xff\xda\xe1\xe4\xff\x00\xff\xff\x04\x80\xaa\x00\
\x06\x6d\x92\x00\x07\xff\xff\x00\x04\xff\xff\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\x80\x02\x80\xff\x00\x02\x80\xff\x00\
\x02\x00\x00\x00\x01\x00\x55\x66\x0f\xbd\xc9\xcb\xff\x96\xa9\xae\
\xff\x7c\x99\x9e\xff\x89\xad\xb5\xff\x85\xaf\xbb\xff\x89\xb6\xc4\
\xff\x88\xb9\xcf\xff\x6d\x9e\xbe\xff\x63\x96\xbe\xff\x6b\x9e\xd0\
\xff\x69\x9b\xd5\xff\x65\x99\xdb\xff\x67\x9b\xe1\xff\x61\x98\xdb\
\xff\x67\x9f\xe0\xff\x6a\xa3\xe1\xff\x6e\xa7\xe4\xff\x73\xac\xe9\
\xff\x75\xb0\xee\xff\x73\xb0\xf0\xff\x72\xae\xf0\xff\x74\xac\xed\
\xff\x6f\xa8\xe6\xff\x6f\xa6\xe3\xff\x6f\xa6\xe3\xff\x6d\xa5\xe6\
\xff\x6f\xa9\xeb\xff\x75\xad\xf4\xff\x7a\xb4\xfc\xff\x7d\xb6\xff\
\xff\x78\xb2\xfd\xff\x76\xb0\xfb\xff\x71\xac\xf4\xff\x70\xaa\xf2\
\xff\x6f\xaa\xf0\xff\x72\xaa\xf1\xff\x72\xab\xf0\xff\x75\xac\xf1\
\xff\x6d\xa4\xe7\xff\x6e\xa8\xe9\xff\x6f\xac\xec\xff\x70\xad\xed\
\xff\x6a\xa7\xe9\xff\x65\xa2\xe4\xff\x66\xa3\xe7\xff\x68\xa6\xec\
\xff\x6b\xa9\xef\xff\x6b\xa7\xed\xff\x68\xa4\xea\xff\x67\xa2\xe8\
\xff\x65\xa0\xe6\xff\x65\xa0\xe6\xff\x66\xa1\xe6\xff\x65\xa0\xe5\
\xff\x66\xa2\xe4\xff\x66\xa3\xe5\xff\x6a\xa6\xe8\xff\x6f\xa7\xee\
\xff\x73\xa8\xf1\xff\x73\xa6\xef\xff\x70\xa2\xee\xff\x6b\x9f\xec\
\xff\x69\x9d\xea\xff\x68\x9c\xe9\xff\x66\x9a\xe6\xff\x65\x9a\xe3\
\xff\x66\x9a\xe0\xff\x66\x9b\xde\xff\x64\x9a\xdb\xff\x62\x98\xd9\
\xff\x5e\x95\xd2\xff\x63\x9c\xd3\xff\x55\x8d\xbc\xff\x74\xaa\xcd\
\xff\x87\xbb\xd2\xff\x82\xb1\xbf\xff\x91\xbb\xc2\xff\x84\xa4\xa9\
\xff\x7e\x97\x9b\xff\x94\xa4\xaa\xff\xbb\xc6\xca\xff\x00\x99\xff\
\x05\x00\x00\x00\x03\x40\x80\x00\x04\xff\xff\x00\x04\x6a\x55\x00\
\x0c\xff\xff\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\x55\
\x03\x80\xff\x00\x02\x80\xff\x00\x02\xff\xff\x00\x01\x00\xb6\x92\
\x07\xda\xe3\xe6\xff\xa2\xb1\xb4\xff\x8e\xa4\xa9\xff\x82\xa0\xa5\
\xff\x89\xae\xb6\xff\x86\xb0\xbd\xff\x89\xb6\xcb\xff\x73\xa2\xc2\
\xff\x65\x97\xc1\xff\x67\x99\xcd\xff\x6a\x9d\xdb\xff\x6c\xa0\xe6\
\xff\x71\xa7\xee\xff\x74\xaa\xf1\xff\x77\xae\xf1\xff\x77\xb0\xee\
\xff\x79\xb2\xef\xff\x7f\xb8\xf5\xff\x82\xbd\xfb\xff\x81\xbe\xfe\
\xff\x7d\xb9\xfb\xff\x7b\xb5\xf7\xff\x75\xac\xef\xff\x73\xab\xec\
\xff\x71\xa9\xea\xff\x6e\xa8\xea\xff\x6d\xa8\xed\xff\x70\xab\xf1\
\xff\x74\xae\xf6\xff\x75\xaf\xfa\xff\x6d\xa7\xf2\xff\x6e\xa8\xf3\
\xff\x6e\xa8\xf3\xff\x70\xa9\xf4\xff\x72\xac\xf4\xff\x74\xac\xf3\
\xff\x76\xad\xf2\xff\x77\xae\xf3\xff\x7b\xb3\xf4\xff\x79\xb4\xf2\
\xff\x79\xb5\xf1\xff\x75\xb2\xf2\xff\x6f\xac\xec\xff\x69\xa5\xe7\
\xff\x68\xa5\xe9\xff\x69\xa7\xed\xff\x69\xa7\xed\xff\x67\xa5\xeb\
\xff\x67\xa3\xe9\xff\x64\xa0\xe6\xff\x62\x9f\xe3\xff\x62\x9d\xe2\
\xff\x62\x9d\xe2\xff\x61\x9e\xe2\xff\x64\xa2\xe8\xff\x63\xa4\xe9\
\xff\x68\xa6\xec\xff\x6d\xa8\xf0\xff\x70\xaa\xf2\xff\x72\xac\xf4\
\xff\x73\xac\xf7\xff\x75\xac\xf7\xff\x6f\xa8\xf3\xff\x6c\xa6\xee\
\xff\x68\xa2\xea\xff\x67\x9f\xe6\xff\x64\x9d\xe2\xff\x5f\x9a\xdf\
\xff\x5e\x9a\xdc\xff\x5e\x98\xda\xff\x5d\x95\xd6\xff\x61\x99\xd4\
\xff\x5b\x90\xc2\xff\x72\xa6\xca\xff\x81\xb2\xc8\xff\x7f\xa9\xb5\
\xff\x84\xa9\xad\xff\x7c\x98\x99\xff\x8d\xa0\xa3\xff\xa5\xb1\xb5\
\xff\xdb\xe2\xe5\xff\x00\x55\xff\x03\x80\x00\xff\x02\xff\x00\x00\
\x02\xff\x80\x00\x02\xb6\x92\x00\x07\xff\xff\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\x80\xff\x00\x02\xff\xff\x00\x01\x80\xff\x00\
\x02\x80\xff\x00\x02\x00\xff\xff\x04\x00\x92\x92\x0e\xbd\xc9\xcd\
\xff\x9c\xb0\xb5\xff\x7e\x9b\xa0\xff\x80\xa4\xac\xff\x86\xad\xbb\
\xff\x86\xb3\xc8\xff\x7d\xaf\xcd\xff\x6d\xa1\xc9\xff\x65\x9a\xcd\
\xff\x6a\xa0\xdd\xff\x71\xa8\xeb\xff\x75\xad\xf4\xff\x7c\xb4\xfb\
\xff\x75\xae\xf3\xff\x74\xae\xef\xff\x75\xb0\xee\xff\x7b\xb6\xf4\
\xff\x80\xba\xfb\xff\x7f\xba\xff\xff\x79\xb3\xfb\xff\x73\xad\xf5\
\xff\x73\xab\xf2\xff\x71\xaa\xef\xff\x6f\xa9\xeb\xff\x6c\xa5\xea\
\xff\x68\xa3\xe9\xff\x68\xa2\xea\xff\x68\xa2\xed\xff\x68\xa2\xed\
\xff\x68\xa2\xee\xff\x69\xa3\xee\xff\x6b\xa5\xf0\xff\x6e\xa7\xf2\
\xff\x74\xab\xf6\xff\x7a\xaf\xf8\xff\x7d\xb4\xf9\xff\x81\xb5\xfb\
\xff\x85\xbb\xfc\xff\x83\xb9\xf8\xff\x80\xb9\xf6\xff\x7f\xb7\xf8\
\xff\x79\xb1\xf2\xff\x71\xab\xed\xff\x69\xa4\xe9\xff\x68\xa4\xea\
\xff\x68\xa3\xeb\xff\x66\xa1\xe9\xff\x65\x9f\xe7\xff\x62\x9c\xe4\
\xff\x61\x9c\xe2\xff\x61\x9c\xe2\xff\x62\x9d\xe3\xff\x61\x9d\xe3\
\xff\x62\xa2\xe9\xff\x62\xa4\xeb\xff\x65\xa8\xed\xff\x68\xa9\xee\
\xff\x69\xaa\xee\xff\x6c\xad\xf1\xff\x71\xb3\xf4\xff\x77\xb7\xf8\
\xff\x75\xb7\xf8\xff\x72\xb5\xf4\xff\x6e\xb1\xf0\xff\x6c\xac\xec\
\xff\x68\xa8\xe8\xff\x63\xa6\xe5\xff\x5f\xa2\xe1\xff\x61\x9f\xdf\
\xff\x5f\x9a\xd8\xff\x5f\x96\xcf\xff\x63\x97\xc6\xff\x71\xa2\xc2\
\xff\x7a\xa7\xbc\xff\x7b\xa2\xab\xff\x79\x9a\x9d\xff\x7d\x94\x96\
\xff\x9a\xa9\xab\xff\xb9\xc2\xc5\xff\x00\x8e\xe3\x09\x00\x2a\xd5\
\x06\xff\x00\xff\x01\x00\x00\x00\x02\x33\x66\x00\x05\xff\x80\x00\
\x02\xff\xff\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\x80\xff\x00\
\x02\x80\xff\x00\x02\x80\xff\x00\x02\x24\x49\x00\x07\x00\xff\xff\
\x02\x00\x9f\xbf\x08\xe0\xeb\xef\xff\xae\xbe\xc4\xff\x8e\xa7\xab\
\xff\x7c\x9c\xa2\xff\x89\xaf\xbb\xff\x84\xb2\xc4\xff\x84\xb6\xd2\
\xff\x76\xab\xd0\xff\x65\x9d\xce\xff\x69\xa1\xdc\xff\x6c\xa6\xe7\
\xff\x6b\xa6\xec\xff\x72\xad\xf3\xff\x71\xac\xf2\xff\x73\xad\xef\
\xff\x75\xaf\xf1\xff\x79\xb3\xf5\xff\x7c\xb5\xfa\xff\x79\xb2\xfd\
\xff\x73\xad\xf9\xff\x6e\xa8\xf4\xff\x6c\xa5\xf0\xff\x6b\xa5\xed\
\xff\x6a\xa5\xeb\xff\x67\xa1\xe9\xff\x64\x9f\xe7\xff\x63\x9d\xe8\
\xff\x62\x9c\xe8\xff\x62\x9c\xe8\xff\x67\xa1\xed\xff\x67\xa1\xec\
\xff\x68\xa1\xec\xff\x6c\xa3\xee\xff\x72\xa6\xf2\xff\x78\xad\xf6\
\xff\x7f\xb3\xf9\xff\x84\xb7\xfd\xff\x79\xad\xef\xff\x79\xae\xed\
\xff\x7b\xb0\xef\xff\x7d\xb1\xf3\xff\x7a\xaf\xf2\xff\x70\xa7\xec\
\xff\x68\xa0\xe7\xff\x63\x9d\xe5\xff\x66\xa0\xe8\xff\x64\x9d\xe8\
\xff\x62\x9b\xe6\xff\x60\x99\xe4\xff\x60\x9a\xe2\xff\x61\x9b\xe3\
\xff\x64\x9e\xe6\xff\x65\x9f\xea\xff\x63\xa0\xea\xff\x63\xa2\xec\
\xff\x63\xa4\xee\xff\x65\xa5\xec\xff\x64\xa5\xe9\xff\x66\xa8\xe9\
\xff\x6b\xae\xed\xff\x70\xb3\xf0\xff\x74\xb8\xf3\xff\x72\xb8\xf3\
\xff\x72\xb8\xf3\xff\x74\xb8\xf3\xff\x72\xb6\xf1\xff\x6b\xb1\xed\
\xff\x68\xab\xe8\xff\x67\xa6\xe3\xff\x64\x9e\xd9\xff\x5f\x95\xca\
\xff\x6e\xa0\xca\xff\x73\xa1\xc0\xff\x79\xa1\xb3\xff\x7d\xa1\xa7\
\xff\x7b\x97\x98\xff\x8a\x9e\x9f\xff\xa4\xb0\xb2\xff\xd0\xd7\xda\
\xff\x00\x99\xff\x05\x2a\x00\xaa\x06\x55\x00\xaa\x03\x00\x00\x00\
\x02\x33\x66\x00\x05\x80\xff\x00\x02\xff\xff\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\x55\x03\xff\xff\x00\x01\x80\xff\x00\
\x02\x40\x80\x00\x04\xff\xff\xff\x00\x00\xff\xff\x04\x00\xdf\xff\
\x08\xcc\xd8\xdc\xff\xa0\xb3\xb8\xff\x7f\x9b\xa2\xff\x85\xa9\xb3\
\xff\x80\xac\xbd\xff\x84\xb8\xcf\xff\x78\xae\xd1\xff\x65\x9e\xcb\
\xff\x66\x9f\xd6\xff\x69\xa4\xe2\xff\x6a\xa6\xe8\xff\x6f\xac\xf0\
\xff\x73\xb0\xf4\xff\x76\xb1\xf6\xff\x78\xb3\xf8\xff\x7a\xb2\xf9\
\xff\x75\xaf\xf7\xff\x70\xa9\xf8\xff\x6a\xa5\xf5\xff\x68\xa3\xf3\
\xff\x69\xa2\xf1\xff\x69\xa3\xef\xff\x68\xa2\xee\xff\x67\xa1\xed\
\xff\x64\xa0\xec\xff\x62\x9e\xea\xff\x61\x9c\xeb\xff\x63\x9c\xeb\
\xff\x64\x9e\xea\xff\x65\x9e\xe9\xff\x68\x9f\xea\xff\x69\xa0\xeb\
\xff\x6d\xa2\xeb\xff\x70\xa4\xeb\xff\x72\xa5\xeb\xff\x74\xa5\xeb\
\xff\x69\x98\xdc\xff\x68\x98\xda\xff\x69\x99\xdb\xff\x6c\x9e\xe0\
\xff\x6e\x9f\xe3\xff\x6b\x9e\xe4\xff\x68\x9c\xe3\xff\x66\x9b\xe4\
\xff\x68\x9f\xea\xff\x66\x9d\xe8\xff\x63\x9a\xe5\xff\x5f\x98\xe3\
\xff\x5f\x98\xe3\xff\x61\x9a\xe5\xff\x63\x9c\xe7\xff\x65\x9d\xea\
\xff\x63\x9b\xea\xff\x66\x9d\xee\xff\x68\xa0\xf1\xff\x68\xa0\xed\
\xff\x66\xa0\xe8\xff\x66\xa3\xe7\xff\x6a\xa8\xe8\xff\x6b\xac\xe9\
\xff\x6e\xb0\xeb\xff\x71\xb3\xee\xff\x77\xba\xf3\xff\x7b\xbe\xf7\
\xff\x7a\xbd\xf6\xff\x75\xb7\xf2\xff\x6c\xae\xe9\xff\x6b\xa7\xe2\
\xff\x65\x9d\xd4\xff\x65\x97\xc5\xff\x7b\xa8\xcd\xff\x7d\xa6\xbf\
\xff\x7c\xa1\xaf\xff\x81\x9f\xa4\xff\x80\x95\x96\xff\x98\xa5\xa7\
\xff\xb8\xc0\xc0\xff\x7e\x92\xad\x91\x2a\x00\xaa\x06\x55\x00\xaa\
\x03\x40\x00\x80\x04\xff\xff\xff\x00\x00\xaa\xaa\x03\x00\xbf\x40\
\x04\xff\xff\x00\x01\xff\xff\xff\x00\xff\x00\xff\x01\x00\xff\x55\
\x03\x00\xff\x80\x02\xff\x80\x00\x02\xff\xff\x00\x01\x00\x00\x00\
\x05\x00\x80\x80\x04\x00\x92\xb6\x07\xe8\xf1\xf4\xff\xaf\xbe\xc1\
\xff\x8e\xa4\xaa\xff\x7d\x9e\xa7\xff\x82\xac\xb9\xff\x85\xb4\xc9\
\xff\x7d\xb1\xcf\xff\x6e\xa2\xca\xff\x6a\xa2\xd3\xff\x71\xaa\xe2\
\xff\x77\xb3\xef\xff\x7c\xb6\xf7\xff\x7a\xb4\xf6\xff\x7c\xb6\xf8\
\xff\x7d\xb6\xfb\xff\x7a\xb2\xf9\xff\x71\xaa\xf5\xff\x68\xa3\xf2\
\xff\x63\xa0\xf0\xff\x64\x9e\xf0\xff\x64\x9e\xf0\xff\x63\x9e\xee\
\xff\x63\x9e\xee\xff\x63\x9e\xee\xff\x62\x9d\xed\xff\x61\x9c\xeb\
\xff\x63\x9c\xeb\xff\x63\x9c\xeb\xff\x64\x9c\xe9\xff\x68\x9f\xea\
\xff\x6c\xa1\xea\xff\x6d\xa2\xeb\xff\x6d\xa1\xe8\xff\x68\x9a\xe2\
\xff\x63\x94\xda\xff\x61\x90\xd4\xff\x59\x88\xcc\xff\x57\x84\xc7\
\xff\x56\x83\xc6\xff\x57\x86\xca\xff\x5c\x8a\xd1\xff\x60\x91\xd7\
\xff\x65\x97\xdf\xff\x6a\x9d\xe6\xff\x6a\x9e\xea\xff\x67\x9e\xe9\
\xff\x66\x9d\xe8\xff\x63\x9b\xe8\xff\x62\x9a\xe7\xff\x61\x9a\xe5\
\xff\x61\x99\xe6\xff\x63\x98\xe8\xff\x64\x98\xeb\xff\x66\x99\xef\
\xff\x68\x9b\xf1\xff\x68\x9d\xee\xff\x68\x9e\xeb\xff\x69\xa0\xe9\
\xff\x69\xa4\xea\xff\x6b\xa7\xe9\xff\x6d\xaa\xea\xff\x72\xaf\xed\
\xff\x77\xb4\xf2\xff\x7d\xbb\xf7\xff\x7d\xbb\xf7\xff\x78\xb6\xf2\
\xff\x70\xac\xe7\xff\x70\xa8\xdf\xff\x6b\x9e\xd0\xff\x6e\x9c\xc5\
\xff\x82\xae\xcb\xff\x87\xac\xc0\xff\x83\xa3\xae\xff\x85\x9e\xa2\
\xff\x8a\x9c\x9b\xff\xa8\xb0\xb0\xff\xd7\xdb\xdc\xff\x80\x00\xff\
\x02\x5d\x00\x74\x0b\xff\x00\xff\x02\x80\x00\xff\x02\x00\xff\xff\
\x02\x00\xaa\xaa\x03\x00\x99\x33\x05\x00\xff\xff\x02\x00\x00\x00\
\x01\xff\x00\xff\x01\x00\xff\x80\x02\x00\xff\x80\x02\xff\x00\x00\
\x01\xff\x00\x00\x01\x66\x00\x00\x05\x00\x00\x00\x01\x00\x40\x60\
\x08\x00\xb6\xff\x07\xd0\xdc\xde\xff\xa8\xbb\xc0\xff\x88\xa2\xa9\
\xff\x8a\xad\xb7\xff\x88\xb0\xc2\xff\x84\xb2\xca\xff\x7c\xaa\xcc\
\xff\x71\xa3\xcd\xff\x77\xab\xda\xff\x7e\xb4\xe9\xff\x7d\xb4\xed\
\xff\x7e\xb5\xf2\xff\x7e\xb4\xf3\xff\x7d\xb3\xf4\xff\x7a\xae\xf4\
\xff\x71\xa8\xf1\xff\x6a\xa4\xf0\xff\x65\xa0\xef\xff\x62\x9f\xef\
\xff\x5d\x99\xeb\xff\x5d\x99\xeb\xff\x5d\x99\xeb\xff\x5e\x9a\xec\
\xff\x61\x9b\xed\xff\x62\x9d\xed\xff\x65\x9d\xee\xff\x67\x9f\xee\
\xff\x69\x9f\xec\xff\x6a\xa1\xec\xff\x6c\xa1\xea\xff\x6c\x9f\xe8\
\xff\x67\x99\xe1\xff\x5e\x90\xd8\xff\x58\x89\xcf\xff\x55\x84\xc8\
\xff\x51\x7e\xc2\xff\x4c\x7c\xbe\xff\x4b\x7b\xbd\xff\x4f\x7c\xc0\
\xff\x53\x81\xc8\xff\x58\x89\xcf\xff\x5f\x91\xd9\xff\x66\x99\xe2\
\xff\x67\x9c\xe5\xff\x67\x9e\xe9\xff\x6a\xa1\xec\xff\x6b\xa3\xf0\
\xff\x6a\xa2\xef\xff\x67\xa1\xed\xff\x64\x9c\xe9\xff\x64\x99\xe9\
\xff\x64\x98\xeb\xff\x66\x96\xee\xff\x66\x96\xee\xff\x66\x96\xee\
\xff\x66\x97\xed\xff\x68\x9a\xec\xff\x6a\x9d\xed\xff\x6b\x9e\xee\
\xff\x6f\xa3\xef\xff\x70\xa7\xf0\xff\x73\xaa\xef\xff\x78\xad\xf0\
\xff\x78\xae\xef\xff\x74\xaa\xe9\xff\x72\xa8\xe4\xff\x74\xa6\xda\
\xff\x75\xa5\xcf\xff\x7c\xa7\xc8\xff\x89\xb2\xc9\xff\x90\xb1\xc0\
\xff\x8c\xa8\xaf\xff\x90\xa5\xa7\xff\xa8\xb4\xb4\xff\xcb\xd0\xd1\
\xff\x00\x00\x00\x0a\xff\x00\xff\x02\xbf\x00\x80\x04\xff\x00\xaa\
\x03\x80\x00\xff\x02\x00\xff\xff\x02\x00\xff\xcc\x05\x00\xaa\xaa\
\x03\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\x80\
\x02\x00\xff\x55\x03\xff\x00\x00\x01\x80\x00\x00\x04\xff\x00\x00\
\x01\x66\x00\x00\x05\x55\x00\xaa\x03\x00\xcc\xff\x05\x00\xc6\xff\
\x09\xc5\xd1\xd5\xff\xa0\xb4\xb9\xff\x92\xac\xb3\xff\x8d\xad\xba\
\xff\x90\xb4\xc6\xff\x89\xae\xc8\xff\x78\xa1\xc2\xff\x76\xa3\xc9\
\xff\x79\xa8\xd4\xff\x70\xa1\xd3\xff\x73\xa5\xda\xff\x6f\xa1\xdb\
\xff\x6d\x9f\xdb\xff\x6a\x9d\xdc\xff\x67\x9c\xdf\xff\x62\x9a\xe1\
\xff\x5d\x98\xe0\xff\x5b\x95\xe1\xff\x5c\x97\xe7\xff\x5d\x98\xe8\
\xff\x5e\x98\xea\xff\x61\x9b\xed\xff\x66\x9e\xef\xff\x6a\xa2\xf1\
\xff\x6f\xa5\xf2\xff\x72\xa6\xf3\xff\x6f\xa3\xef\xff\x6e\xa1\xea\
\xff\x6a\x9d\xe3\xff\x65\x96\xdc\xff\x5c\x8d\xd1\xff\x58\x87\xcb\
\xff\x53\x82\xc6\xff\x52\x7f\xc2\xff\x4c\x79\xbc\xff\x4b\x78\xbb\
\xff\x4d\x7a\xbd\xff\x51\x7e\xc2\xff\x55\x82\xc6\xff\x57\x86\xca\
\xff\x5c\x8d\xd1\xff\x60\x93\xd9\xff\x65\x99\xe0\xff\x68\x9e\xe5\
\xff\x6e\xa5\xee\xff\x74\xab\xf4\xff\x73\xac\xf7\xff\x70\xa9\xf4\
\xff\x6a\xa3\xee\xff\x68\x9e\xeb\xff\x64\x99\xe9\xff\x62\x96\xe9\
\xff\x5f\x92\xe8\xff\x5e\x91\xe7\xff\x5f\x91\xe9\xff\x60\x93\xe9\
\xff\x64\x95\xeb\xff\x64\x95\xeb\xff\x6b\x9b\xed\xff\x6a\x9b\xe9\
\xff\x69\x9c\xe5\xff\x6c\x9d\xe1\xff\x6f\x9f\xdf\xff\x70\xa2\xde\
\xff\x73\xa2\xd8\xff\x78\xa5\xd1\xff\x84\xae\xd1\xff\x89\xb1\xca\
\xff\x91\xb3\xc3\xff\x96\xb2\xbd\xff\x94\xaa\xb0\xff\xa0\xaf\xb1\
\xff\xc8\xd0\xd0\xff\x00\x2b\x2b\x0c\xff\x00\x00\x02\x40\x00\x2b\
\x0c\xff\x00\xff\x02\x46\x00\x2e\x0b\x80\x00\xff\x02\x00\xff\xff\
\x02\x00\x9f\x7f\x08\x00\xaa\xaa\x03\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xaa\x00\x00\x03\xff\x00\x00\x02\xff\x00\x00\x02\x80\x00\x00\
\x04\x00\xaa\xaa\x03\x00\xff\xff\x05\x9d\xc9\xd8\xb1\xd8\xe4\xe8\
\xff\xaf\xbf\xc5\xff\xa1\xb4\xbc\xff\x91\xa9\xb5\xff\x8c\xa7\xbb\
\xff\x8e\xad\xc6\xff\x8c\xb0\xce\xff\x84\xaa\xcc\xff\x79\xa2\xc9\
\xff\x73\x9d\xca\xff\x71\x9c\xcd\xff\x6c\x9b\xce\xff\x69\x98\xce\
\xff\x62\x97\xd0\xff\x5f\x96\xd3\xff\x5c\x96\xd7\xff\x5e\x97\xdc\
\xff\x60\x97\xe0\xff\x61\x99\xe6\xff\x63\x9b\xe8\xff\x65\x9b\xe8\
\xff\x6a\x9e\xea\xff\x6d\xa2\xeb\xff\x70\xa4\xeb\xff\x6f\xa1\xe9\
\xff\x6c\x9d\xe3\xff\x6b\x9a\xde\xff\x64\x94\xd6\xff\x5d\x8a\xcd\
\xff\x57\x85\xc5\xff\x57\x83\xc3\xff\x56\x82\xc1\xff\x51\x7d\xbd\
\xff\x54\x80\xbf\xff\x53\x7f\xbe\xff\x52\x7e\xbd\xff\x52\x7e\xbe\
\xff\x53\x7f\xbf\xff\x54\x82\xc2\xff\x58\x86\xc6\xff\x58\x88\xca\
\xff\x62\x94\xd6\xff\x69\x9d\xdf\xff\x72\xa7\xea\xff\x78\xac\xf2\
\xff\x78\xaf\xf4\xff\x76\xad\xf2\xff\x6f\xa8\xed\xff\x6b\xa4\xe9\
\xff\x63\x9e\xe4\xff\x5f\x9a\xe2\xff\x5b\x95\xe0\xff\x58\x91\xe0\
\xff\x58\x90\xe2\xff\x5a\x90\xe5\xff\x5b\x90\xe7\xff\x60\x90\xe8\
\xff\x68\x97\xeb\xff\x66\x97\xe5\xff\x65\x97\xdf\xff\x69\x99\xd9\
\xff\x70\x9e\xd8\xff\x71\xa2\xd4\xff\x7a\xa7\xd2\xff\x80\xaa\xcd\
\xff\x8a\xaf\xc9\xff\x92\xb2\xc5\xff\x94\xb0\xbb\xff\x95\xac\xb4\
\xff\xa6\xb9\xbe\xff\xca\xd6\xd8\xff\x5f\x8b\x94\x71\xff\xff\xff\
\x00\xff\x00\x00\x01\xff\x00\x00\x01\xff\x00\x00\x01\xff\x00\x00\
\x01\x00\xaa\xaa\x03\x00\xff\xff\x04\x00\xff\xff\x04\x00\xff\x80\
\x02\x00\xff\x80\x02\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\x00\x00\x01\xff\x00\x00\
\x01\xff\x00\x00\x01\x80\x00\x00\x04\x00\x00\x00\x02\x00\xff\xff\
\x04\x00\xd5\xff\x06\x00\x62\x76\x0d\xdd\xe6\xe9\xff\xcb\xd6\xda\
\xff\xb2\xbf\xc7\xff\x9f\xb0\xbd\xff\x94\xab\xbb\xff\x90\xab\xbf\
\xff\x8c\xaa\xc3\xff\x87\xa9\xc7\xff\x84\xa7\xc9\xff\x7f\xa4\xca\
\xff\x79\xa1\xcb\xff\x74\x9e\xcb\xff\x6f\x9d\xcc\xff\x6a\x9d\xcf\
\xff\x68\x9c\xd1\xff\x66\x9a\xd6\xff\x65\x98\xd7\xff\x66\x9a\xdd\
\xff\x6b\x9c\xe0\xff\x6f\x9e\xe2\xff\x73\xa3\xe5\xff\x77\xa5\xe5\
\xff\x74\xa2\xe2\xff\x72\x9e\xde\xff\x72\x9d\xdc\xff\x6d\x99\xd6\
\xff\x67\x91\xcc\xff\x5c\x86\xc1\xff\x56\x7e\xb8\xff\x53\x7b\xb5\
\xff\x55\x7d\xb7\xff\x59\x81\xbc\xff\x5c\x86\xc1\xff\x5c\x87\xc0\
\xff\x5b\x88\xc1\xff\x5c\x86\xc1\xff\x5a\x84\xbf\xff\x5a\x85\xbe\
\xff\x58\x85\xbe\xff\x59\x85\xc0\xff\x59\x87\xc1\xff\x61\x91\xcb\
\xff\x6c\x9e\xd8\xff\x76\xa8\xe4\xff\x78\xab\xe9\xff\x7a\xad\xeb\
\xff\x76\xac\xe9\xff\x72\xaa\xe5\xff\x70\xa9\xe1\xff\x6a\xa4\xdf\
\xff\x61\x9d\xd9\xff\x5d\x97\xd9\xff\x5d\x95\xdc\xff\x5f\x93\xe0\
\xff\x62\x93\xe3\xff\x64\x93\xe4\xff\x64\x8f\xde\xff\x68\x93\xdc\
\xff\x6e\x9a\xda\xff\x76\x9e\xd8\xff\x7b\xa5\xd4\xff\x7d\xa8\xcf\
\xff\x84\xab\xcb\xff\x89\xad\xc5\xff\x90\xae\xbf\xff\x96\xb0\xbc\
\xff\xa1\xb6\xbe\xff\xb4\xc7\xcc\xff\xd6\xe2\xe6\xff\x00\xa2\xe8\
\x0b\x00\xff\xff\x04\x00\x00\x00\x06\xff\x00\x00\x01\xff\x00\x00\
\x01\xff\x00\x00\x01\x00\x00\x00\x02\x00\xaa\xaa\x03\x00\xff\xff\
\x04\x00\xff\xff\x04\x00\xff\x80\x02\x00\xff\x80\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x04\x00\xff\xff\x02\xff\xff\xff\
\x00\xaa\x00\x00\x03\x00\x00\x00\x01\x00\x00\x00\x01\x55\x00\x00\
\x06\x00\x00\x00\x06\x00\xaa\xaa\x03\x00\xff\xff\x02\x00\xff\xff\
\x02\x00\xff\xff\x04\x00\x6a\x80\x0c\xe1\xe8\xeb\xff\xcc\xd6\xdd\
\xff\xb7\xc6\xcf\xff\xa7\xb8\xc5\xff\x99\xb0\xc0\xff\x92\xaa\xbe\
\xff\x97\xb1\xc9\xff\x92\xad\xc8\xff\x8b\xa8\xc7\xff\x84\xa7\xc8\
\xff\x80\xa6\xc9\xff\x7a\xa6\xcb\xff\x77\xa3\xcc\xff\x73\x9f\xce\
\xff\x76\xa1\xd4\xff\x78\xa1\xd8\xff\x79\xa1\xdb\xff\x7c\xa2\xdc\
\xff\x7c\xa3\xda\xff\x7a\xa0\xd6\xff\x72\x99\xcd\xff\x69\x90\xc4\
\xff\x63\x88\xba\xff\x5f\x84\xb6\xff\x5d\x80\xb2\xff\x5c\x7f\xb1\
\xff\x57\x7b\xab\xff\x57\x7b\xab\xff\x5f\x83\xb3\xff\x68\x8d\xbf\
\xff\x61\x89\xba\xff\x63\x8b\xbc\xff\x61\x8a\xbb\xff\x5d\x85\xb6\
\xff\x56\x7e\xaf\xff\x50\x78\xa8\xff\x4d\x75\xa5\xff\x4d\x75\xa6\
\xff\x61\x8a\xbb\xff\x6a\x94\xc3\xff\x70\x9c\xcb\xff\x77\xa2\xd3\
\xff\x7e\xa9\xda\xff\x86\xb1\xe4\xff\x88\xb6\xe6\xff\x88\xb9\xe7\
\xff\x7b\xad\xd7\xff\x75\xa9\xd1\xff\x6e\xa2\xcb\xff\x6a\x9b\xcb\
\xff\x69\x98\xce\xff\x6b\x97\xd4\xff\x6d\x98\xd7\xff\x71\x99\xda\
\xff\x76\x9a\xda\xff\x7a\x9c\xd7\xff\x7e\xa1\xd3\xff\x83\xa3\xce\
\xff\x85\xa5\xc8\xff\x85\xa4\xbd\xff\x8a\xa8\xb9\xff\x90\xaa\xb8\
\xff\xa6\xbd\xc5\xff\xb8\xcb\xd0\xff\xce\xdd\xe0\xff\xe2\xed\xf1\
\xff\x00\xd1\xff\x0b\x00\xcc\xff\x05\x00\xff\xff\x02\x20\x00\x40\
\x08\xff\x00\xff\x02\x00\x00\x00\x01\x00\x00\x00\x01\xff\xff\x00\
\x01\xff\xff\x00\x01\x00\xff\xff\x02\x00\xff\xff\x02\x00\xff\xff\
\x02\x00\xff\xff\x02\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xcc\
\x05\x00\xff\xff\x04\xff\xff\xff\x00\x55\x00\x00\x06\x00\x00\x00\
\x03\x00\x00\x00\x01\xff\x00\x00\x02\x80\x00\x00\x04\x00\x00\x00\
\x03\x00\x80\x80\x04\x00\x00\x00\x03\x00\x00\x00\x01\x00\x00\x00\
\x01\x00\xff\xff\x02\x00\x9f\xff\x08\xe6\xef\xf3\xff\xd3\xdf\xe5\
\xff\xbe\xce\xd5\xff\xb4\xc4\xd0\xff\xad\xbd\xcd\xff\xa7\xb8\xcb\
\xff\x9f\xb4\xc9\xff\x9b\xb3\xc9\xff\x96\xb5\xcc\xff\x92\xb4\xcc\
\xff\x8d\xaf\xcc\xff\x88\xab\xcc\xff\x84\xa6\xca\xff\x84\xa4\xcd\
\xff\x83\xa3\xce\xff\x83\xa0\xcc\xff\x82\xa0\xc9\xff\x7f\x9c\xc3\
\xff\x74\x91\xb6\xff\x6a\x87\xac\xff\x68\x85\xaa\xff\x62\x7f\xa4\
\xff\x60\x7d\xa2\xff\x64\x7e\xa6\xff\x5e\x7b\xa2\xff\x58\x75\x9c\
\xff\x59\x78\x9f\xff\x5f\x7f\xa8\xff\x64\x87\xaf\xff\x65\x88\xb0\
\xff\x65\x88\xb0\xff\x5e\x81\xa9\xff\x55\x79\x9f\xff\x52\x74\x98\
\xff\x53\x75\x99\xff\x56\x76\x9a\xff\x4c\x6c\x90\xff\x53\x76\x98\
\xff\x5f\x82\xa4\xff\x69\x8c\xae\xff\x71\x93\xb7\xff\x79\x9b\xbf\
\xff\x7a\x9f\xc1\xff\x7a\xa0\xc0\xff\x7a\xa1\xbd\xff\x77\x9f\xb8\
\xff\x74\x9b\xb7\xff\x72\x98\xb8\xff\x73\x97\xbd\xff\x75\x97\xc2\
\xff\x77\x98\xc5\xff\x7a\x98\xc7\xff\x7c\x96\xc4\xff\x82\x9b\xc5\
\xff\x88\xa0\xc4\xff\x90\xa6\xc2\xff\x97\xac\xc2\xff\xa4\xb7\xc6\
\xff\xb1\xc4\xcc\xff\xbc\xcc\xd2\xff\xd3\xe0\xe2\xff\xe5\xf1\xf1\
\xff\x00\xff\xff\x0a\x00\xdf\xff\x08\x00\x8e\xe3\x09\x00\x39\x55\
\x09\x00\x55\x80\x06\x80\x00\xff\x02\x80\x00\xff\x02\x00\x00\x00\
\x01\xff\xff\xff\x00\xff\xff\x00\x01\xff\xff\x00\x01\x00\xff\xff\
\x02\x00\xff\xff\x02\x00\xff\xff\x02\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xd5\x06\x00\xff\xff\x05\x00\xff\xff\
\x02\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\xff\x80\x00\
\x02\xff\xff\x00\x01\xff\xff\x00\x01\x55\xaa\x00\x03\x80\x00\x00\
\x04\x80\x00\x00\x04\xaa\x00\x00\x03\x00\x00\x00\x01\x00\xaa\xaa\
\x03\x00\xff\xd5\x06\x00\xcc\xcc\x0a\x5f\xaf\xca\x71\xe8\xf3\xf7\
\xff\xd8\xe3\xeb\xff\xd5\xdf\xe9\xff\xcf\xdb\xe7\xff\xcc\xd9\xe7\
\xff\xcc\xdd\xea\xff\xc8\xde\xea\xff\xc1\xdb\xe9\xff\xbf\xd7\xe9\
\xff\xc3\xdb\xef\xff\xc5\xd9\xf2\xff\xc4\xd8\xf1\xff\xc6\xd7\xf1\
\xff\xc8\xda\xf1\xff\xc8\xdb\xf0\xff\xc1\xd4\xe9\xff\xb9\xcc\xe1\
\xff\x96\xa9\xbe\xff\x8a\x9f\xb5\xff\x84\x98\xb1\xff\x83\x98\xb3\
\xff\x80\x95\xb0\xff\x77\x8e\xa8\xff\x71\x89\xa5\xff\x73\x8d\xab\
\xff\x75\x91\xaf\xff\x73\x8f\xad\xff\x6e\x8a\xa8\xff\x64\x80\x9e\
\xff\x5d\x78\x93\xff\x5c\x76\x8e\xff\x61\x79\x8f\xff\x66\x7d\x93\
\xff\x83\x9a\xb0\xff\x91\xa9\xbd\xff\xa3\xbb\xcf\xff\xb4\xc9\xde\
\xff\xbc\xd2\xe4\xff\xbe\xd4\xe6\xff\xba\xd0\xe2\xff\xb3\xca\xda\
\xff\xb0\xc7\xd7\xff\xb1\xc6\xd5\xff\xb0\xc5\xd4\xff\xb1\xc5\xd7\
\xff\xb1\xc6\xdb\xff\xb2\xc7\xdd\xff\xb3\xc7\xe0\xff\xb4\xc5\xe0\
\xff\xb2\xc1\xdb\xff\xb6\xc4\xdb\xff\xbb\xc9\xdc\xff\xc3\xcd\xde\
\xff\xcd\xd6\xe0\xff\xda\xe2\xe9\xff\xbb\xd2\xdb\xd0\x00\x8e\xaa\
\x09\x00\xb6\x92\x07\x00\xff\xbf\x04\x00\xff\xff\x04\x00\x80\xbf\
\x04\x00\x40\xff\x04\x00\x33\xff\x05\x00\x33\xff\x05\x00\x40\xff\
\x04\x00\xaa\xff\x03\x00\xaa\xaa\x03\x00\xff\xff\x02\xff\xff\x00\
\x01\xff\xff\x00\x01\xff\xff\x00\x01\xff\xff\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\x00\xff\xff\x02\x00\xff\xff\x02\x00\xff\xdf\
\x08\x00\xff\xff\x05\x00\xff\xff\x02\xff\xff\xff\x00\x00\x00\x00\
\x01\x00\x00\x00\x03\xbf\x40\x00\x04\xff\x80\x00\x02\xff\xff\x00\
\x01\xff\xff\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\x00\x00\
\x02\x00\x00\x00\x05\x00\x80\x2a\x06\x00\xd5\x80\x06\x00\xff\xaa\
\x06\x00\xff\xd5\x06\x00\xb6\xdb\x07\x00\xff\xff\x05\x00\xaa\xff\
\x06\x00\xaa\xff\x09\x00\xbf\xff\x08\x00\xd1\xff\x0b\x00\xeb\xff\
\x0d\x20\xf1\xff\x30\x3f\xdd\xff\x50\x7e\xd6\xff\x91\x20\xab\xff\
\x30\x7e\xbb\xff\x91\x5f\xb8\xff\x71\x00\xaa\xff\x0f\x00\xbf\xff\
\x0c\x00\xcc\xff\x0f\x3f\xa5\xff\x50\x00\xc8\xff\x0e\xe6\xf2\xfc\
\xff\xd3\xe1\xed\xff\xc8\xd8\xe5\xff\xbc\xcc\xdc\xff\xaf\xbf\xcf\
\xff\xa4\xb6\xc7\xff\x9f\xb3\xc5\xff\x91\xa7\xb9\xff\x93\xa9\xbb\
\xff\x95\xab\xbd\xff\x9a\xae\xc0\xff\xa1\xb3\xc4\xff\xae\xbf\xcc\
\xff\xbb\xcb\xd7\xff\xc7\xd5\xe1\xff\xda\xe6\xf0\xff\xe2\xee\xf8\
\xff\xbb\xe5\xff\xd0\x00\xc8\xff\x0e\x00\xc8\xff\x0e\x00\xcc\xff\
\x0f\x00\xcc\xff\x0f\x20\xb9\xff\x30\x00\xa4\xff\x0e\x00\x89\xff\
\x0d\x00\x95\xff\x0c\x00\xa4\xff\x0e\x00\xa4\xff\x0e\x00\xb6\xff\
\x0e\x00\xaa\xff\x0f\x3f\xb0\xff\x50\x3f\xb0\xff\x50\x00\x99\xff\
\x0f\x00\x95\xff\x0c\x00\x99\xff\x0a\x00\x60\xff\x08\x00\x33\xff\
\x05\x80\x00\xff\x02\x00\x00\x00\x01\xff\x80\x00\x02\x99\xcc\x00\
\x05\x33\x66\x00\x05\x40\x00\x80\x04\xff\x00\xff\x01\x00\x80\xff\
\x02\x00\x40\xff\x04\x00\x60\xdf\x08\x00\xaa\xd5\x06\x00\xff\xcc\
\x05\x00\xff\xcc\x05\x00\xff\x55\x03\xff\xff\x00\x01\xff\xff\x00\
\x01\xff\xff\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xff\
\x02\x00\xff\xff\x02\x00\xff\xdf\x08\x00\xff\xd5\x06\x00\xff\xff\
\x04\x00\xff\xff\x02\x00\x00\x00\x02\x00\x00\x00\x05\x80\x00\x00\
\x04\xff\x00\x00\x02\x55\xaa\x00\x03\xff\xff\x00\x01\xff\xff\xff\
\x00\x00\xaa\xaa\x03\x00\x00\x00\x01\x00\xff\xff\x02\x00\xff\x99\
\x05\x00\xff\x92\x07\x00\xdf\x40\x08\x00\xc6\x39\x09\x00\x60\x20\
\x08\x00\x80\x80\x04\x00\x80\xbf\x04\x00\x80\xbf\x04\x00\x80\xbf\
\x04\x00\xaa\xff\x03\x00\xcc\xff\x05\x00\xff\xd5\x06\x00\xdb\xb6\
\x07\x00\xff\xd5\x06\x00\xcc\xff\x05\x00\xff\xff\x04\x00\xd5\xaa\
\x06\x00\xff\xcc\x05\x00\xff\x55\x03\x00\xff\x99\x05\x00\x66\x66\
\x05\x00\xff\xcc\x05\x00\xff\xdf\x08\x00\xd5\xd5\x0c\x00\xcc\xee\
\x0f\x00\xdb\xff\x0e\x00\xeb\xff\x0d\x20\xe3\xff\x30\x5f\xed\xff\
\x71\x7e\xeb\xff\x91\x9d\xeb\xff\xb1\x5f\xd2\xff\x71\x3f\xbb\xff\
\x50\x00\xaa\xff\x0f\x00\xbf\xff\x0c\x00\xdf\xff\x08\x00\xff\xff\
\x05\x00\xff\xff\x05\x00\xff\xff\x05\x00\xcc\xff\x05\x00\x66\x7f\
\x0a\x00\x6a\x55\x0c\x00\x8e\x71\x09\x00\xaa\xd5\x06\x00\x40\xff\
\x04\x00\x00\xff\x04\x49\x00\xdb\x07\x00\x00\xff\x05\x00\x6d\xff\
\x07\x00\x99\xff\x05\x00\xd5\xff\x06\x00\xff\xff\x08\x00\xd5\xd5\
\x0c\x00\xe8\xe8\x0b\x00\xe3\xe3\x09\x00\xe3\xe3\x09\x00\xd5\xff\
\x06\x00\x80\xff\x06\x00\x2a\xd5\x06\x2a\x00\x55\x06\x33\x00\x66\
\x05\x00\x00\x00\x03\xff\x80\x00\x02\xff\xff\x00\x01\xff\x00\x00\
\x01\x40\x00\xff\x04\x00\x24\xb6\x07\x00\x55\xc6\x09\x00\x92\xb6\
\x07\x00\xff\xcc\x05\x00\xff\xd5\x06\x00\xff\xcc\x05\x00\xaa\xaa\
\x03\x00\xaa\xaa\x03\xff\xff\x00\x01\xff\xff\x00\x01\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\xff\xdf\
\x08\x00\xff\xd5\x06\x00\xff\xff\x04\x00\xff\xff\x02\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\x00\x00\x01\xff\x00\x00\x02\x40\x80\x00\
\x04\x55\xaa\x00\x03\x00\xaa\xaa\x03\x00\x55\x55\x06\x00\x80\x80\
\x04\x00\xff\xff\x04\x00\xff\x99\x05\x00\xdf\x40\x08\x00\xc6\x39\
\x09\x00\xff\x00\x06\x00\xbf\x40\x04\x55\xaa\x00\x03\xff\x00\x00\
\x01\xff\x00\x00\x01\xff\x00\x00\x01\xff\x00\x00\x01\x80\xff\x00\
\x02\x80\xff\x00\x02\xbf\xff\x00\x04\x6d\x92\x00\x07\xd5\xaa\x00\
\x06\xd5\xaa\x00\x06\xaa\xaa\x00\x09\xbf\xbf\x00\x08\xcc\xe6\x00\
\x0a\xe3\xff\x00\x09\xaa\xaa\x00\x09\x66\x99\x00\x0a\x49\xdb\x00\
\x07\x00\xff\x55\x06\x00\xff\xaa\x06\x00\xff\xe6\x0a\x00\xff\xff\
\x0a\x00\xdb\xdb\x0e\x5f\xd2\xe5\x71\x00\xdd\xff\x0f\x00\xdb\xff\
\x0e\x00\xdb\xff\x0e\x00\xdf\xff\x08\x00\xff\xff\x08\x00\xaa\xd5\
\x06\x00\xb6\x92\x07\x40\x80\x00\x04\x2a\x55\x00\x06\x55\xaa\x00\
\x03\x55\xaa\x00\x03\xd5\xaa\x00\x06\xd5\xaa\x00\x06\xbf\xff\x00\
\x04\x80\xff\x00\x02\xff\x00\x66\x05\xff\x00\xd5\x06\xdf\x00\xdf\
\x08\x92\x00\xdb\x07\xcc\x00\xff\x05\x00\x66\x99\x05\x00\x80\x80\
\x04\x00\xdf\x7f\x08\x00\xdf\x40\x08\x00\xff\x66\x0a\x00\xdf\x7f\
\x08\x00\xff\x92\x07\x00\xcc\xff\x05\x00\xaa\xd5\x06\x55\x00\xff\
\x03\x40\x00\xff\x04\x55\x00\xaa\x03\x00\x00\x00\x01\xbf\x40\x00\
\x04\x99\x33\x00\x05\xaa\x00\x00\x03\x40\x00\x80\x04\x00\x00\xff\
\x06\x00\x80\xff\x06\x00\xcc\xff\x05\x00\xff\xcc\x05\x00\xff\xd5\
\x06\x00\xff\xd5\x06\x00\xaa\xaa\x03\x00\xaa\xaa\x03\xff\xff\x00\
\x01\xff\xff\x00\x01\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\
\x00\xff\xff\xff\x00\xff\xff\xfc\x00\x00\x00\x00\x00\x03\xff\xff\
\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x7f\xff\xfc\xff\xff\x00\
\x00\x00\x00\x00\x00\x00\x1f\xff\xfc\xff\xfe\x00\x00\x00\x00\x00\
\x00\x00\x1f\xff\xfc\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x1f\xff\
\xfc\xff\xff\x00\x00\x00\x00\x00\x00\x00\x1f\xff\xfc\xff\xff\x00\
\x00\x00\x00\x00\x00\x00\x1f\xff\xfc\xff\xff\x00\x00\x00\x00\x00\
\x00\x00\x1f\xff\xfc\xff\xff\x00\x00\x00\x00\x00\x00\x00\x1f\xff\
\xfc\xff\xff\x80\x00\x00\x00\x00\x00\x00\x3f\xff\xfc\xff\xff\x80\
\x00\x00\x00\x00\x00\x00\x3f\xff\xfc\xff\xff\x80\x00\x00\x00\x00\
\x00\x00\x7f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x7f\xff\
\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x7f\xff\xfc\xff\xff\xc0\
\x00\x00\x00\x00\x00\x00\x7f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\
\x00\x00\x7f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x7f\xff\
\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x7f\xff\xfc\xff\xff\xc0\
\x00\x00\x00\x00\x00\x00\x7f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\
\x00\x00\x3f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x3f\xff\
\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x3f\xff\xfc\xff\xff\xc0\
\x00\x00\x00\x00\x00\x00\x3f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\
\x00\x00\x7f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x7f\xff\
\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x7f\xff\xfc\xff\xff\xc0\
\x00\x00\x00\x00\x00\x00\x7f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\
\x00\x00\x7f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x7f\xff\
\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\xff\xff\xfc\xff\xff\xc0\
\x00\x00\x00\x00\x00\x00\xff\xff\xfc\xff\xff\xe0\x00\x00\x00\x00\
\x00\x00\xff\xff\xfc\xff\xff\xe0\x00\x00\x00\x00\x00\x00\xff\xff\
\xfc\xff\xff\xe0\x00\x00\x00\x00\x00\x00\xff\xff\xfc\xff\xff\xe0\
\x00\x00\x00\x00\x00\x01\xff\xff\xfc\xff\xff\xf0\x00\x00\x00\x00\
\x00\x01\xff\xff\xfc\xff\xff\xf0\x00\x00\x00\x00\x00\x01\xff\xff\
\xfc\xff\xff\xf0\x00\x00\x00\x00\x00\x01\xff\xff\xfc\xff\xff\xf0\
\x00\x00\x00\x00\x00\x01\xff\xff\xfc\xff\xff\xf0\x00\x00\x00\x00\
\x00\x01\xff\xff\xfc\xff\xff\xf0\x00\x00\x00\x00\x00\x00\xff\xff\
\xfc\xff\xff\xe0\x00\x00\x00\x00\x00\x00\xff\xff\xfc\xff\xff\xe0\
\x00\x00\x00\x00\x00\x00\xff\xff\xfc\xff\xff\xe0\x00\x00\x00\x00\
\x00\x00\xff\xff\xfc\xff\xff\xe0\x00\x00\x00\x00\x00\x00\xff\xff\
\xfc\xff\xff\xe0\x00\x00\x00\x00\x00\x00\xff\xff\xfc\xff\xff\xe0\
\x00\x00\x00\x00\x00\x00\xff\xff\xfc\xff\xff\xe0\x00\x00\x00\x00\
\x00\x00\xff\xff\xfc\xff\xff\xe0\x00\x00\x00\x00\x00\x00\x7f\xff\
\xfc\xff\xff\xe0\x00\x00\x00\x00\x00\x00\x7f\xff\xfc\xff\xff\xc0\
\x00\x00\x00\x00\x00\x00\x7f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\
\x00\x00\x7f\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x7f\xff\
\xfc\xff\xff\xc0\x00\x00\x00\x00\x00\x00\x3f\xff\xfc\xff\xff\xc0\
\x00\x00\x00\x00\x00\x00\x3f\xff\xfc\xff\xff\x80\x00\x00\x00\x00\
\x00\x00\x3f\xff\xfc\xff\xff\x80\x00\x00\x00\x00\x00\x00\x3f\xff\
\xfc\xff\xff\x80\x00\x00\x00\x00\x00\x00\x3f\xff\xfc\xff\xff\x80\
\x00\x00\x00\x00\x00\x00\x3f\xff\xfc\xff\xff\x00\x00\x00\x00\x00\
\x00\x00\x1f\xff\xfc\xff\xff\x00\x00\x00\x00\x00\x00\x00\x1f\xff\
\xfc\xff\xff\x00\x00\x00\x00\x00\x00\x00\x0f\xff\xfc\xff\xff\x00\
\x00\x00\x00\x00\x00\x00\x0f\xff\xfc\xff\xfe\x00\x00\x00\x00\x00\
\x00\x00\x0f\xff\xfc\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x0f\xff\
\xfc\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x0f\xff\xfc\xff\xfe\x00\
\x00\x00\x00\x00\x00\x00\x0f\xff\xfc\xff\xfe\x00\x00\x00\x00\x00\
\x00\x00\x0f\xff\xfc\xff\xfe\x00\x00\x00\x00\x00\x00\x00\x0f\xff\
\xfc\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xfc\xf0\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x03\xfc\xc0\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\xfc\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x7c\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7c\xc0\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x7c\xc0\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x7c\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x7c\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xe0\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xe0\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\xfc\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\xfc\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xfc\xf0\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x01\xfc\xf8\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x03\xfc\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\
\xfc\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xfc\xfc\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x07\xfc\xfe\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x07\xfc\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\
\xfc\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xfc\xff\x80\x00\
\x00\x00\x00\x00\x00\x00\x00\x3f\xfc\xff\x80\x00\x00\x00\x00\x00\
\x00\x00\x00\x7f\xfc\xff\xe0\x00\x00\x00\x00\x00\x00\x00\x00\xff\
\xfc\xff\xf8\x00\x00\x00\x00\x00\x00\x00\x01\xff\xfc\xff\xfe\x00\
\x00\x00\x00\x00\x00\x00\x07\xff\xfc\xff\xff\xc0\x00\x00\x00\x00\
\x00\x00\x3f\xff\xfc\xff\xff\xff\xeb\xf0\x00\x03\xff\xff\xff\xff\
\xfc\xff\xff\xff\xff\xff\xe7\xff\xff\xff\xff\xff\xfc\xff\xff\xff\
\xff\xff\xff\xff\xff\xff\xff\xff\xfc\
"
qt_resource_name = b"\
\x00\x0e\
\x02\x74\x7e\xff\
\x00\x63\
\x00\x6d\x00\x69\x00\x73\x00\x73\x00\x5f\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x69\x00\x63\x00\x6f\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
balloob/home-assistant
|
refs/heads/dev
|
homeassistant/components/econet/water_heater.py
|
14
|
"""Support for Rheem EcoNet water heaters."""
import datetime
import logging
from pyeconet.api import PyEcoNet
import voluptuous as vol
from homeassistant.components.water_heater import (
PLATFORM_SCHEMA,
STATE_ECO,
STATE_ELECTRIC,
STATE_GAS,
STATE_HEAT_PUMP,
STATE_HIGH_DEMAND,
STATE_OFF,
STATE_PERFORMANCE,
SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_TEMPERATURE,
WaterHeaterEntity,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_TEMPERATURE,
CONF_PASSWORD,
CONF_USERNAME,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN, SERVICE_ADD_VACATION, SERVICE_DELETE_VACATION
_LOGGER = logging.getLogger(__name__)
ATTR_VACATION_START = "next_vacation_start_date"
ATTR_VACATION_END = "next_vacation_end_date"
ATTR_ON_VACATION = "on_vacation"
ATTR_TODAYS_ENERGY_USAGE = "todays_energy_usage"
ATTR_IN_USE = "in_use"
ATTR_START_DATE = "start_date"
ATTR_END_DATE = "end_date"
ATTR_LOWER_TEMP = "lower_temp"
ATTR_UPPER_TEMP = "upper_temp"
ATTR_IS_ENABLED = "is_enabled"
SUPPORT_FLAGS_HEATER = SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE
ADD_VACATION_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(ATTR_START_DATE): cv.positive_int,
vol.Required(ATTR_END_DATE): cv.positive_int,
}
)
DELETE_VACATION_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids})
ECONET_DATA = "econet"
ECONET_STATE_TO_HA = {
"Energy Saver": STATE_ECO,
"gas": STATE_GAS,
"High Demand": STATE_HIGH_DEMAND,
"Off": STATE_OFF,
"Performance": STATE_PERFORMANCE,
"Heat Pump Only": STATE_HEAT_PUMP,
"Electric-Only": STATE_ELECTRIC,
"Electric": STATE_ELECTRIC,
"Heat Pump": STATE_HEAT_PUMP,
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the EcoNet water heaters."""
hass.data[ECONET_DATA] = {}
hass.data[ECONET_DATA]["water_heaters"] = []
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
econet = PyEcoNet(username, password)
water_heaters = econet.get_water_heaters()
hass_water_heaters = [
EcoNetWaterHeater(water_heater) for water_heater in water_heaters
]
add_entities(hass_water_heaters)
hass.data[ECONET_DATA]["water_heaters"].extend(hass_water_heaters)
def service_handle(service):
"""Handle the service calls."""
entity_ids = service.data.get("entity_id")
all_heaters = hass.data[ECONET_DATA]["water_heaters"]
_heaters = [
x for x in all_heaters if not entity_ids or x.entity_id in entity_ids
]
for _water_heater in _heaters:
if service.service == SERVICE_ADD_VACATION:
start = service.data.get(ATTR_START_DATE)
end = service.data.get(ATTR_END_DATE)
_water_heater.add_vacation(start, end)
if service.service == SERVICE_DELETE_VACATION:
for vacation in _water_heater.water_heater.vacations:
vacation.delete()
_water_heater.schedule_update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_ADD_VACATION, service_handle, schema=ADD_VACATION_SCHEMA
)
hass.services.register(
DOMAIN, SERVICE_DELETE_VACATION, service_handle, schema=DELETE_VACATION_SCHEMA
)
class EcoNetWaterHeater(WaterHeaterEntity):
"""Representation of an EcoNet water heater."""
def __init__(self, water_heater):
"""Initialize the water heater."""
self.water_heater = water_heater
self.supported_modes = self.water_heater.supported_modes
self.econet_state_to_ha = {}
self.ha_state_to_econet = {}
for mode in ECONET_STATE_TO_HA:
if mode in self.supported_modes:
self.econet_state_to_ha[mode] = ECONET_STATE_TO_HA.get(mode)
for key, value in self.econet_state_to_ha.items():
self.ha_state_to_econet[value] = key
for mode in self.supported_modes:
if mode not in ECONET_STATE_TO_HA:
error = f"Invalid operation mode mapping. {mode} doesn't map. Please report this."
_LOGGER.error(error)
@property
def name(self):
"""Return the device name."""
return self.water_heater.name
@property
def available(self):
"""Return if the the device is online or not."""
return self.water_heater.is_connected
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_FAHRENHEIT
@property
def device_state_attributes(self):
"""Return the optional device state attributes."""
data = {}
vacations = self.water_heater.get_vacations()
if vacations:
data[ATTR_VACATION_START] = vacations[0].start_date
data[ATTR_VACATION_END] = vacations[0].end_date
data[ATTR_ON_VACATION] = self.water_heater.is_on_vacation
todays_usage = self.water_heater.total_usage_for_today
if todays_usage:
data[ATTR_TODAYS_ENERGY_USAGE] = todays_usage
data[ATTR_IN_USE] = self.water_heater.in_use
if self.water_heater.lower_temp is not None:
data[ATTR_LOWER_TEMP] = round(self.water_heater.lower_temp, 2)
if self.water_heater.upper_temp is not None:
data[ATTR_UPPER_TEMP] = round(self.water_heater.upper_temp, 2)
if self.water_heater.is_enabled is not None:
data[ATTR_IS_ENABLED] = self.water_heater.is_enabled
return data
@property
def current_operation(self):
"""
Return current operation as one of the following.
["eco", "heat_pump", "high_demand", "electric_only"]
"""
current_op = self.econet_state_to_ha.get(self.water_heater.mode)
return current_op
@property
def operation_list(self):
"""List of available operation modes."""
op_list = []
for mode in self.supported_modes:
ha_mode = self.econet_state_to_ha.get(mode)
if ha_mode is not None:
op_list.append(ha_mode)
return op_list
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS_HEATER
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
if target_temp is not None:
self.water_heater.set_target_set_point(target_temp)
else:
_LOGGER.error("A target temperature must be provided")
def set_operation_mode(self, operation_mode):
"""Set operation mode."""
op_mode_to_set = self.ha_state_to_econet.get(operation_mode)
if op_mode_to_set is not None:
self.water_heater.set_mode(op_mode_to_set)
else:
_LOGGER.error("An operation mode must be provided")
def add_vacation(self, start, end):
"""Add a vacation to this water heater."""
if not start:
start = datetime.datetime.now()
else:
start = datetime.datetime.fromtimestamp(start)
end = datetime.datetime.fromtimestamp(end)
self.water_heater.set_vacation_mode(start, end)
def update(self):
"""Get the latest date."""
self.water_heater.update_state()
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self.water_heater.set_point
@property
def min_temp(self):
"""Return the minimum temperature."""
return self.water_heater.min_set_point
@property
def max_temp(self):
"""Return the maximum temperature."""
return self.water_heater.max_set_point
|
Denisolt/IEEE-NYIT-MA
|
refs/heads/master
|
local/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.py
|
479
|
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
try:
import cPickle as pickle
except ImportError:
import pickle
from pip._vendor.requests.packages.urllib3.response import HTTPResponse
from pip._vendor.requests.packages.urllib3.util import is_fp_closed
# Replicate some six behaviour
try:
text_type = (unicode,)
except NameError:
text_type = (str,)
|
uskudnik/amazon-glacier-cmd-interface
|
refs/heads/master
|
setup.py
|
8
|
# -*- coding: utf-8 -*-
"""Installer for this package."""
from setuptools import setup
from setuptools import find_packages
import os
# shamlessly stolen from Hexagon IT guys
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
version = '0.2dev'
setup(name='glacier',
version=version,
description="Command line interface for Amazon Glacier",
long_description=read('README.md')+read("LICENSE"),
classifiers=[
"Programming Language :: Python",
],
keywords='amazon glacier cmd cli command line interface command-line',
author='Urban Škudnik, Jaka Hudoklin',
author_email='urban.skudnik@gmail.com',
url='https://github.com/uskudnik/amazon-glacier-cmd-interface',
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
dependency_links =
["https://github.com/boto/boto/tarball/develop#egg=boto-2.5.9999"],
install_requires=[
# list project dependencies
'boto',
'python-dateutil',
'pytz',
'argparse',
'prettytable'
],
entry_points="""
[console_scripts]
glacier-cmd = glacier.glacier:main
""",
)
|
chatelak/RMG-Py
|
refs/heads/master
|
rmgpy/molecule/parser.py
|
5
|
# global imports
import cython
import logging
import itertools
# local imports
# Assume that OB is not installed by default
INSTALLED_BACKENDS = {
'OB': False,
}
try:
import openbabel
INSTALLED_BACKENDS['OB'] = True
except :
pass
from rdkit import Chem
from rmgpy.molecule import element as elements
from .molecule import Atom, Bond, Molecule
from .adjlist import PeriodicSystem, bond_orders, ConsistencyChecker
import rmgpy.molecule.inchi as inchiutil
import rmgpy.molecule.util as util
import rmgpy.molecule.pathfinder as pathfinder
import rmgpy.molecule.generator as generator
# constants
BACKENDS = [
'rdkit',
]
if INSTALLED_BACKENDS['OB']:
BACKENDS.insert(0,'openbabel')
INCHI_LOOKUPS = {
'H': '[H]',#RDkit was improperly handling the Hydrogen radical from InChI
'He': '[He]',
}
SMILES_LOOKUPS = {
'[He]':# RDKit improperly handles helium and returns it in a triplet state
"""
He
multiplicity 1
1 He u0 p1
"""
}
def __fromSMILES(mol, smilesstr, backend):
"""Replace the Molecule `mol` with that given by the SMILES `smilesstr`
using the backend `backend`"""
if backend.lower() == 'rdkit':
rdkitmol = Chem.MolFromSmiles(smilesstr)
if rdkitmol is None:
raise ValueError("Could not interpret the SMILES string {0!r}".format(smilesstr))
fromRDKitMol(mol, rdkitmol)
return mol
elif backend.lower() == 'openbabel':
parse_openbabel(mol, smilesstr, 'smi')
return mol
else:
raise NotImplementedError('Unrecognized backend for SMILES parsing: {0}'.format(backend))
def __fromInChI(mol, inchistr, backend):
"""Replace the Molecule `mol` with that given by the InChI `inchistr`
using the backend `backend`"""
if backend.lower() == 'rdkit':
rdkitmol = Chem.inchi.MolFromInchi(inchistr, removeHs=False)
mol = fromRDKitMol(mol, rdkitmol)
return mol
elif backend.lower() == 'openbabel':
return parse_openbabel(mol, inchistr, 'inchi')
else:
raise NotImplementedError('Unrecognized backend for InChI parsing: {0}'.format(backend))
def __parse(mol, identifier, type_identifier, backend):
"""
Parses the identifier based on the type of identifier (inchi/smi)
and the backend used.
First, look up the identifier in a dictionary to see if it can be processed
this way.
If not in the dictionary, parse it through the specified backed,
or try all backends.
"""
if __lookup(mol, identifier, type_identifier) is not None:
if isCorrectlyParsed(mol, identifier):
return mol
for _backend in (BACKENDS if backend=='try-all' else [backend]):
if type_identifier == 'smi':
__fromSMILES(mol, identifier, _backend)
elif type_identifier == 'inchi':
__fromInChI(mol, identifier, _backend)
else:
raise NotImplementedError("Unknown identifier type {0}".format(type_identifier))
if isCorrectlyParsed(mol, identifier):
return mol
else:
logging.debug('Backend %s is not able to parse identifier %s', _backend, identifier)
logging.error("Unable to correctly parse %s with backend %s", identifier, backend)
raise Exception("Couldn't parse {0}".format(identifier))
def parse_openbabel(mol, identifier, type_identifier):
"""Converts the identifier to a Molecule using Openbabel."""
obConversion = openbabel.OBConversion()
obConversion.SetInAndOutFormats(type_identifier, "smi")#SetInFormat(identifier) does not exist.
obmol = openbabel.OBMol()
obConversion.ReadString(obmol, identifier)
obmol.AddHydrogens()
obmol.AssignSpinMultiplicity(True)
fromOBMol(mol, obmol)
return mol
def isCorrectlyParsed(mol, identifier):
"""Check if molecule object has been correctly parsed."""
conditions = []
if mol.atoms:
conditions.append(True)
else:
conditions.append(False)
if 'InChI' in identifier:
inchi_elementcount = util.retrieveElementCount(identifier)
mol_elementcount = util.retrieveElementCount(mol)
conditions.append(inchi_elementcount == mol_elementcount)
return all(conditions)
def __lookup(mol, identifier, type_identifier):
"""
Looks up the identifier and parses it the way we think is best.
For troublesome inchis, we look up the smiles, and parse smiles.
For troublesome smiles, we look up the adj list, and parse the adj list.
"""
if type_identifier.lower() == 'inchi':
try:
smi = INCHI_LOOKUPS[identifier.split('/', 1)[1]]
return mol.fromSMILES(smi)
except KeyError:
return None
elif type_identifier.lower() == 'smi':
try:
adjList = SMILES_LOOKUPS[identifier]
return mol.fromAdjacencyList(adjList)
except KeyError:
return None
def check(mol, aug_inchi) :
"""
Check if the molecular structure is correct.
Checks whether the multiplicity contained in the augmented inchi,
corresponds to the number of unpaired electrons + 1 found in the molecule.
Checks whether the valence of each atom is compatible with the bond order,
number of unpaired electrons, lone pairs and charge.
"""
cython.declare(inchi=str,
at=Atom
)
ConsistencyChecker.check_multiplicity(mol.getRadicalCount(), mol.multiplicity)
for at in mol.atoms:
ConsistencyChecker.check_partial_charge(at)
def fix_oxygen_unsaturated_bond(mol, u_indices):
"""
Searches for a radical or a charged oxygen atom connected to
a closed-shell carbon via an unsatured bond.
Decrements the unsatured bond,
transfers the unpaired electron from O to C or
converts the charge from O to an unpaired electron on C,
increases the lone pair count of O to 2.
Only do this once per molecule.
"""
for at in mol.atoms:
if at.isOxygen() and at.radicalElectrons == 1 and at.lonePairs == 1:
bonds = mol.getBonds(at)
oxygen = at
for atom2, bond in bonds.iteritems():
if bond.isTriple():
bond.decrementOrder()
oxygen.radicalElectrons -= 1
atom2.radicalElectrons += 1
oxygen.lonePairs += 1
return
elif at.isOxygen() and at.charge == 1 and at.lonePairs == 1:
bonds = mol.getBonds(at)
oxygen = at
start = oxygen
# search for 3-atom-2-bond [X=X-X] paths
paths = pathfinder.find_allyl_end_with_charge(start)
for path in paths:
end = path[-1]
start.charge += 1 if start.charge < 0 else -1
end.charge += 1 if end.charge < 0 else -1
start.lonePairs += 1
# filter bonds from path and convert bond orders:
bonds = path[1::2]#odd elements
for bond in bonds[::2]:# even bonds
assert isinstance(bond, Bond)
bond.decrementOrder()
for bond in bonds[1::2]:# odd bonds
assert isinstance(bond, Bond)
bond.incrementOrder()
return
else:
for atom2, bond in bonds.iteritems():
if not bond.isSingle() and atom2.charge == 0:
oxygen.charge -= 1
if (mol.atoms.index(atom2) + 1) in u_indices:
bond.decrementOrder()
atom2.radicalElectrons += 1
u_indices.remove(mol.atoms.index(atom2) + 1)
oxygen.lonePairs += 1
return
def fromInChI(mol, inchistr, backend='try-all'):
"""
Convert an InChI string `inchistr` to a molecular structure. Uses
a user-specified backend for conversion, currently supporting
rdkit (default) and openbabel.
"""
mol.InChI = inchistr
if inchiutil.INCHI_PREFIX in inchistr:
return __parse(mol, inchistr, 'inchi', backend)
else:
return __parse(mol, inchiutil.INCHI_PREFIX + '/' + inchistr, 'inchi', backend)
def fromAugmentedInChI(mol, aug_inchi):
"""
Creates a Molecule object from the augmented inchi.
First, the inchi is converted into a Molecule using
the backend parsers.
Next, the multiplicity and unpaired electron information
is used to fix a number of parsing errors made by the backends.
Finally, the atom types of the corrected molecule are perceived.
Returns a Molecule object
"""
if not isinstance(aug_inchi, inchiutil.AugmentedInChI):
aug_inchi = inchiutil.AugmentedInChI(aug_inchi)
mol = fromInChI(mol, aug_inchi.inchi)
mol.multiplicity = len(aug_inchi.u_indices) + 1 if aug_inchi.u_indices else 1
fix(mol, aug_inchi)
mol.updateAtomTypes()
return mol
def fromSMILES(mol, smilesstr, backend='try-all'):
"""
Convert a SMILES string `smilesstr` to a molecular structure. Uses
a user-specified backend for conversion, currently supporting
rdkit (default) and openbabel.
"""
return __parse(mol, smilesstr, 'smi', backend)
def fromSMARTS(mol, smartsstr):
"""
Convert a SMARTS string `smartsstr` to a molecular structure. Uses
`RDKit <http://rdkit.org/>`_ to perform the conversion.
This Kekulizes everything, removing all aromatic atom types.
"""
rdkitmol = Chem.MolFromSmarts(smartsstr)
fromRDKitMol(mol, rdkitmol)
return mol
def fromRDKitMol(mol, rdkitmol):
"""
Convert a RDKit Mol object `rdkitmol` to a molecular structure. Uses
`RDKit <http://rdkit.org/>`_ to perform the conversion.
This Kekulizes everything, removing all aromatic atom types.
"""
cython.declare(i=cython.int,
radicalElectrons=cython.int,
charge=cython.int,
lonePairs=cython.int,
number=cython.int,
order=cython.str,
atom=Atom,
atom1=Atom,
atom2=Atom,
bond=Bond)
mol.vertices = []
# Add hydrogen atoms to complete molecule if needed
rdkitmol = Chem.AddHs(rdkitmol)
Chem.rdmolops.Kekulize(rdkitmol, clearAromaticFlags=True)
# iterate through atoms in rdkitmol
for i in xrange(rdkitmol.GetNumAtoms()):
rdkitatom = rdkitmol.GetAtomWithIdx(i)
# Use atomic number as key for element
number = rdkitatom.GetAtomicNum()
element = elements.getElement(number)
# Process charge
charge = rdkitatom.GetFormalCharge()
radicalElectrons = rdkitatom.GetNumRadicalElectrons()
atom = Atom(element, radicalElectrons, charge, '', 0)
mol.vertices.append(atom)
# Add bonds by iterating again through atoms
for j in xrange(0, i):
rdkitatom2 = rdkitmol.GetAtomWithIdx(j + 1)
rdkitbond = rdkitmol.GetBondBetweenAtoms(i, j)
if rdkitbond is not None:
order = ''
# Process bond type
rdbondtype = rdkitbond.GetBondType()
if rdbondtype.name == 'SINGLE': order = 'S'
elif rdbondtype.name == 'DOUBLE': order = 'D'
elif rdbondtype.name == 'TRIPLE': order = 'T'
elif rdbondtype.name == 'AROMATIC': order = 'B'
bond = Bond(mol.vertices[i], mol.vertices[j], order)
mol.addBond(bond)
# Set atom types and connectivity values
mol.update()
mol.updateLonePairs()
# Assume this is always true
# There are cases where 2 radicalElectrons is a singlet, but
# the triplet is often more stable,
mol.multiplicity = mol.getRadicalCount() + 1
return mol
def fromOBMol(mol, obmol):
"""
Convert a OpenBabel Mol object `obmol` to a molecular structure. Uses
`OpenBabel <http://openbabel.org/>`_ to perform the conversion.
"""
# Below are the declared variables for cythonizing the module
# cython.declare(i=cython.int)
# cython.declare(radicalElectrons=cython.int, charge=cython.int, lonePairs=cython.int)
# cython.declare(atom=Atom, atom1=Atom, atom2=Atom, bond=Bond)
mol.vertices = []
# Add hydrogen atoms to complete molecule if needed
obmol.AddHydrogens()
# TODO Chem.rdmolops.Kekulize(obmol, clearAromaticFlags=True)
# iterate through atoms in obmol
for obatom in openbabel.OBMolAtomIter(obmol):
idx = obatom.GetIdx()#openbabel idx starts at 1!
# Use atomic number as key for element
number = obatom.GetAtomicNum()
element = elements.getElement(number)
# Process charge
charge = obatom.GetFormalCharge()
obatom_multiplicity = obatom.GetSpinMultiplicity()
radicalElectrons = obatom_multiplicity - 1 if obatom_multiplicity != 0 else 0
atom = Atom(element, radicalElectrons, charge, '', 0)
mol.vertices.append(atom)
# iterate through bonds in obmol
for obbond in openbabel.OBMolBondIter(obmol):
order = 0
# Process bond type
oborder = obbond.GetBondOrder()
if oborder == 1: order = 'S'
elif oborder == 2: order = 'D'
elif oborder == 3: order = 'T'
elif obbond.IsAromatic() : order = 'B'
bond = Bond(mol.vertices[obbond.GetBeginAtomIdx() - 1], mol.vertices[obbond.GetEndAtomIdx() - 1], order)#python array indices start at 0
mol.addBond(bond)
# Set atom types and connectivity values
mol.updateConnectivityValues()
mol.updateAtomTypes()
mol.updateMultiplicity()
mol.updateLonePairs()
# Assume this is always true
# There are cases where 2 radicalElectrons is a singlet, but
# the triplet is often more stable,
mol.multiplicity = mol.getRadicalCount() + 1
return mol
def fixCharge(mol, u_indices):
"""
Tries to fix a number of structural features in the molecule related to charge,
based on the information from the parameter list of atom indices with unpaired electrons.
"""
if not u_indices:
return
is_charged = sum([abs(at.charge) for at in mol.atoms]) != 0
is_correct = mol.getNumberOfRadicalElectrons() == (mol.multiplicity - 1)
if mol.multiplicity < 3 or is_correct or not is_charged:
return
# converting charges to unpaired electrons for atoms in the u-layer
convert_charge_to_unpaired_electron(mol, u_indices)
# convert neighboring atoms (or delocalized paths) to unpaired electrons
convert_delocalized_charge_to_unpaired_electron(mol, u_indices)
fix_adjacent_charges(mol)
def check_bond_order_oxygen(mol):
"""Check if total bond order of oxygen atoms is smaller than 4."""
from rmgpy.molecule.util import ORDERS
for at in mol.atoms:
if at.number == 8:
order = sum([ORDERS[b.order] for _, b in at.bonds.iteritems()])
not_correct = order >= 4
if not_correct:
return False
return True
def find_mobile_h_system(mol, all_mobile_h_atoms_couples, test_indices):
"""
"""
dummy = test_indices[:]
for mobile_h_atom_couple in all_mobile_h_atoms_couples:
for test_index in test_indices:
if test_index in mobile_h_atom_couple:
original_atom = test_index
dummy.remove(test_index)
mobile_h_atom_couple.remove(test_index)
new_partner = mobile_h_atom_couple[0]
central = dummy[0]
return mol.atoms[central - 1], mol.atoms[original_atom - 1], mol.atoms[new_partner - 1]
raise Exception('We should always have found the mobile-H system. All mobile H couples: {}, test indices: {}'
.format(all_mobile_h_atoms_couples, test_indices))
def fix_adjacent_charges(mol):
"""
Searches for pairs of charged atoms.
Neutralizes one unit of charge on each atom,
and increments the bond order of the bond in between
the atoms.
"""
for at in mol.atoms:
if at.charge != 0:
for neigh, bond in at.bonds.iteritems():
if neigh.charge != 0:
bond.incrementOrder()
at.charge += 1 if at.charge < 0 else -1
neigh.charge += 1 if neigh.charge < 0 else -1
def convert_charge_to_unpaired_electron(mol, u_indices):
"""
Iterates over the atoms foundin the parameter list and
converts a unit of charge on atoms into an unpaired electron.
Removes treated atoms from the parameter list.
"""
for at in mol.atoms:
at_index = mol.atoms.index(at) + 1
if at.charge != 0 and at_index in u_indices:
at.charge += 1 if at.charge < 0 else -1
at.radicalElectrons += 1
u_indices.remove(at_index)
def convert_delocalized_charge_to_unpaired_electron(mol, u_indices):
"""
Iterates over the atom indices of the parameter list and searches
a charged atom that is connected to that atom via some kind of
delocalization path.
"""
u_indices_copy = u_indices[:]
for index in u_indices_copy:
start = mol.atoms[index -1]
found = convert_4_atom_3_bond_path(start)
if found:
u_indices.remove(index)
continue
found = convert_3_atom_2_bond_path(start, mol)
if found:
u_indices.remove(index)
continue
def convert_4_atom_3_bond_path(start):
"""
Searches for 4-atom-3-bond [X=X-X=X+] paths starting from the parameter atom.
If a path is found, the starting atom receives an unpaired electron while
the bonds in the delocalization path are "inverted". A unit of charge on the
end atom is neutralized and a lone pair is added.
"""
path = pathfinder.find_butadiene_end_with_charge(start)
if path is not None:
start.radicalElectrons += 1
end = path[-1]
end.charge += 1 if end.charge < 0 else -1
end.lonePairs += 1
# filter bonds from path and convert bond orders:
bonds = path[1::2]#odd
for bond in bonds[::2]:# even
assert isinstance(bond, Bond)
bond.decrementOrder()
for bond in bonds[1::2]:# odd bonds
assert isinstance(bond, Bond)
bond.incrementOrder()
return True
return False
def convert_3_atom_2_bond_path(start, mol):
"""
Searches for 3-atom-2-bond [X=X-X+] paths paths starting from the parameter atom.
If a correct path is found, the starting atom receives an unpaired electron while
the bonds in the delocalization path are "inverted". A unit of charge on the
end atom is neutralized and a lone pair is added.
If it turns out the path was invalid, the actions are reverted, and another path
is tried instead.
To facilitate reverting the changes, we use a reaction recipe and populate it
with a number of actions that reflect the changes in bond orders and unpaired
electrons that the molecule should undergo.
"""
from rmgpy.data.kinetics.family import ReactionRecipe
def is_valid(mol):
"""Check if total bond order of oxygen atoms is smaller than 4."""
for at in mol.atoms:
if at.number == 8:
order = sum([bond_orders[b.order] for _, b in at.bonds.iteritems()])
not_correct = order >= 4
if not_correct:
return False
return True
index = mol.atoms.index(start) + 1
paths = pathfinder.find_allyl_end_with_charge(start)
for path in paths:
# label atoms so that we can use the labels in the actions of the recipe
for i, at in enumerate(path[::2]):
at.label = str(i)
# we have found the atom we are looking for
recipe = ReactionRecipe()
recipe.addAction(['GAIN_RADICAL', start.label, 1])
end = path[-1]
end_original_charge = end.charge
# filter bonds from path and convert bond orders:
bonds = path[1::2]#odd elements
for bond in bonds[::2]:# even
recipe.addAction(['CHANGE_BOND', bond.atom1.label, -1, bond.atom2.label])
for bond in bonds[1::2]:# odd
recipe.addAction(['CHANGE_BOND', bond.atom1.label, 1, bond.atom2.label])
end.charge += 1 if end.charge < 0 else -1
recipe.applyForward(mol)
if is_valid(mol):
# unlabel atoms so that they never cause trouble downstream
for i, at in enumerate(path[::2]):
at.label = ''
return True
else:
recipe.applyReverse(mol)
end.charge = end_original_charge
# unlabel atoms so that they never cause trouble downstream
for i, at in enumerate(path[::2]):
assert isinstance(at, Atom)
at.label = ''
return False
def fix(mol, aug_inchi):
"""
Fixes a number of structural features of the erroneous Molecule
parsed by the backends, based on multiplicity and unpaired electron information
stored in the augmented inchi.
"""
u_indices = aug_inchi.u_indices[:] if aug_inchi.u_indices else []
p_indices = aug_inchi.p_indices[:] if aug_inchi.p_indices else []
# ignore atoms that bear already unpaired electrons:
for i in set(u_indices[:]):
atom = mol.atoms[i - 1]
[u_indices.remove(i) for _ in range(atom.radicalElectrons)]
# ignore atoms that bear already lone pairs:
for i in set(p_indices[:]):
atom = mol.atoms[i - 1]
[p_indices.remove(i) for _ in range(atom.lonePairs)]
fix_triplet_to_singlet(mol, p_indices)
fixCharge(mol, u_indices)
reset_lone_pairs(mol, p_indices)
fix_oxygen_unsaturated_bond(mol, u_indices)
fix_unsaturated_bond(mol, u_indices, aug_inchi)
check(mol, aug_inchi)
def fix_triplet_to_singlet(mol, p_indices):
"""
Iterates over the atoms and checks whether atoms bearing two unpaired electrons are
also present in the p_indices list.
If so, convert to the two unpaired electrons into a lone pair, and remove that atom
index from the p_indices list.
"""
for at in mol.atoms:
index = mol.atoms.index(at) + 1
if mol.getNumberOfRadicalElectrons() == 2 and index in p_indices:
at.lonePairs += 1
at.radicalElectrons -= 2
p_indices.remove(index)
def fix_butadiene_path(start, end):
"""
Searches for a 1,3-butadiene path between the start and end atom.
Adds an unpaired electron to start and end atom, and "inverts" the bonds
in between them.
"""
path = pathfinder.find_butadiene(start, end)
if path is not None:
start.radicalElectrons += 1
end.radicalElectrons += 1
# filter bonds from path and convert bond orders:
bonds = path[1::2]#odd elements
for bond in bonds[::2]:# even bonds
assert isinstance(bond, Bond)
bond.decrementOrder()
for bond in bonds[1::2]:# odd bonds
assert isinstance(bond, Bond)
bond.incrementOrder()
return True
return False
def fix_mobile_h(mol, inchi, u1, u2):
"""
Identifies a system of atoms bearing unpaired electrons and mobile hydrogens
at the same time.
The system will consist of a central atom that does not bear any mobile hydrogens,
but that is bound to an atom that does bear a mobile hydrogen, called the "original atom".
The algorithm identifies the "new partner" atom that is part of the mobile hydrogen
system.
Next, the mobile hydrogen is transferred from the original atom, to the new partner,
and a bond is removed and added respectively.
Finally, the central atom and the original atom will each receive an unpaired electron,
and the bond between them will decrease in order.
"""
mobile_hydrogens = inchiutil.parse_H_layer(inchi)
if mobile_hydrogens:
# WIP: only consider the first system of mobile hydrogens:
mobile_hydrogens = mobile_hydrogens[0]
#find central atom:
central, original, new_partner = util.swap(mobile_hydrogens, [u1, u2])
central, original, new_partner = \
mol.atoms[central - 1], mol.atoms[original - 1], mol.atoms[new_partner - 1]
# search hydrogen atom and bond
hydrogen = None
for at, bond in original.bonds.iteritems():
if at.number == 1:
hydrogen = at
mol.removeBond(bond)
break
new_h_bond = Bond(new_partner, hydrogen, order='S')
mol.addBond(new_h_bond)
mol.getBond(central, new_partner).decrementOrder()
central.radicalElectrons += 1
original.radicalElectrons += 1
return True
return False
def convert_unsaturated_bond_to_triplet(bond):
"""
Decrements the bond if it is unsatured, and adds an unpaired
electron to each of the atoms connected by the bond.
"""
if not bond.isSingle():
for at in (bond.atom1, bond.atom2):
at.radicalElectrons += 1
bond.decrementOrder()
return True
return False
def reset_lone_pairs(mol, p_indices):
"""
Iterates over the atoms of the molecule and
resets the atom's lone pair count to the value stored in the p_indices list,
or to the default value.
"""
for at in mol.atoms:
index = mol.atoms.index(at) + 1 #1-based index
count = p_indices.count(index)
if count != 0:
at.lonePairs = count
else:
order = sum([bond_orders[b.order] for _,b in mol.getBonds(at).iteritems()])
at.lonePairs = (PeriodicSystem.valence_electrons[at.symbol] - order - at.radicalElectrons - at.charge) / 2
def fix_unsaturated_bond_to_biradical(mol, inchi, u_indices):
"""
Convert an unsaturated bond (double, triple) into a bond
with a lower bond order (single, double), and give an unpaired electron
to each of the neighboring atoms, with indices referring to the 1-based
index in the InChI string.
"""
cython.declare(u1=cython.int, u2=cython.int)
cython.declare(atom1=Atom, atom2=Atom)
cython.declare(b=Bond)
combos = itertools.combinations(u_indices, 2)
isFixed = False
for u1, u2 in combos:
atom1 = mol.atoms[u1 - 1] # convert to 0-based index for atoms in molecule
atom2 = mol.atoms[u2 - 1] # convert to 0-based index for atoms in molecule
if mol.hasBond(atom1, atom2):
b = mol.getBond(atom1, atom2)
isFixed = convert_unsaturated_bond_to_triplet(b)
if isFixed:
break
else:
isFixed = fix_mobile_h(mol, inchi, u1, u2)
if isFixed:
break
else:
isFixed = fix_butadiene_path(atom1, atom2)
if isFixed:
break
if isFixed:
u_indices.remove(u1)
u_indices.remove(u2)
return mol
else:
raise Exception(
'Could not convert an unsaturated bond into a biradical for the \
indices {} provided in the molecule: {}.'
.format(u_indices, mol.toAdjacencyList())
)
def isUnsaturated(mol):
"""
Does the molecule have a bond that's not single?
Eg. a bond that is double or triple or benzene
"""
cython.declare(atom1=Atom,
atom2=Atom,
bonds=dict,
bond=Bond)
for atom1 in mol.atoms:
bonds = mol.getBonds(atom1)
for atom2, bond in bonds.iteritems():
if not bond.isSingle():
return True
return False
def fix_unsaturated_bond(mol, indices, aug_inchi):
"""
Adds unpaired electrons to the molecule by converting unsaturated bonds into triplets.
It does so by converting an unsaturated bond into a triplet, and verifying whether
the total number of unpaired electrons matches the multiplicity.
Finishes when all unsaturated bonds have been tried, or when there are no pairs
of atoms that should be unpaired electrons left.
"""
correct = mol.getNumberOfRadicalElectrons() == (mol.multiplicity - 1)
if not correct and not indices:
raise Exception( 'Cannot correct {} based on {} by converting unsaturated bonds into unpaired electrons...'\
.format(mol.toAdjacencyList(), aug_inchi))
unsaturated = isUnsaturated(mol)
while not correct and unsaturated and len(indices) > 1:
mol = fix_unsaturated_bond_to_biradical(mol, aug_inchi.inchi, indices)
correct = mol.getNumberOfRadicalElectrons() == (mol.multiplicity - 1)
unsaturated = isUnsaturated(mol)
|
sgerhart/ansible
|
refs/heads/maintenance_policy_module
|
lib/ansible/plugins/lookup/vars.py
|
55
|
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: vars
author: Ansible Core
version_added: "2.5"
short_description: Lookup templated value of variables
description:
- Retrieves the value of an Ansible variable.
options:
_terms:
description: The variable names to look up.
required: True
default:
description:
- What to return if a variable is undefined.
- If no default is set, it will result in an error if any of the variables is undefined.
"""
EXAMPLES = """
- name: Show value of 'variablename'
debug: msg="{{ lookup('vars', 'variabl' + myvar)}}"
vars:
variablename: hello
myvar: ename
- name: Show default empty since i dont have 'variablnotename'
debug: msg="{{ lookup('vars', 'variabl' + myvar, default='')}}"
vars:
variablename: hello
myvar: notename
- name: Produce an error since i dont have 'variablnotename'
debug: msg="{{ lookup('vars', 'variabl' + myvar)}}"
ignore_errors: True
vars:
variablename: hello
myvar: notename
- name: find several related variables
debug: msg="{{ lookup('vars', 'ansible_play_hosts', 'ansible_play_batch', 'ansible_play_hosts_all') }}"
- name: alternate way to find some 'prefixed vars' in loop
debug: msg="{{ lookup('vars', 'ansible_play_' + item) }}"
loop:
- hosts
- batch
- hosts_all
"""
RETURN = """
_value:
description:
- value of the variables requested.
"""
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.module_utils.six import string_types
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
if variables is not None:
self._templar.set_available_variables(variables)
myvars = getattr(self._templar, '_available_variables', {})
self.set_options(direct=kwargs)
default = self.get_option('default')
ret = []
for term in terms:
if not isinstance(term, string_types):
raise AnsibleError('Invalid setting identifier, "%s" is not a string, its a %s' % (term, type(term)))
try:
try:
value = myvars[term]
except KeyError:
try:
value = myvars['hostvars'][myvars['inventory_hostname']][term]
except KeyError:
raise AnsibleUndefinedVariable('No variable found with this name: %s' % term)
ret.append(self._templar.template(value, fail_on_undefined=True))
except AnsibleUndefinedVariable:
if default is not None:
ret.append(default)
else:
raise
return ret
|
churchlab/millstone
|
refs/heads/master
|
genome_designer/genome_finish/jbrowse_genome_finish.py
|
1
|
from collections import defaultdict
import gzip
import os
import re
import subprocess
from django.conf import settings
from settings import JBROWSE_DATA_URL_ROOT
from settings import S3_BUCKET
from main.model_utils import get_dataset_with_type
from main.models import Dataset
from utils.bam_utils import sort_bam_by_coordinate
from utils.bam_utils import index_bam
from utils.import_util import add_dataset_to_entity
from utils.jbrowse_util import prepare_jbrowse_ref_sequence
from utils.jbrowse_util import write_tracklist_json
def maybe_create_reads_to_contig_bam(contig):
if not contig.dataset_set.filter(
type=Dataset.TYPE.BWA_ALIGN).exists():
prepare_jbrowse_ref_sequence(contig)
align_contig_reads_to_contig(contig)
# TODO(dbg): Maybe fix.
# add_contig_reads_to_contig_bam_track(contig, Dataset.TYPE.BWA_ALIGN)
def align_contig_reads_to_contig(contig):
# Get fasta of reads used to make contig
contig_reads_fasta = os.path.join(
contig.get_model_data_dir(),
'extracted_reads.fa')
# Pull out contig read qnames and put in dictionary contig_reads
p1 = re.compile('>(\S+)/(\d)')
contig_reads = defaultdict(list)
with open(contig_reads_fasta) as fh:
for line in fh:
m1 = p1.match(line)
if m1:
read_id = m1.group(1)
read_number = int(m1.group(2))
contig_reads[read_id].append(read_number)
# Get source reads fastqs
sample = contig.experiment_sample_to_alignment.experiment_sample
source_fq1 = sample.dataset_set.get(
type=Dataset.TYPE.FASTQ1).get_absolute_location()
source_fq2_query = sample.dataset_set.filter(
type=Dataset.TYPE.FASTQ2)
is_paired_end = source_fq2_query.exists()
if is_paired_end:
source_fq2 = source_fq2_query[0].get_absolute_location()
# Make filenames for contig read fastqs
output_fq1 = os.path.join(
contig.get_model_data_dir(),
'reads.1.fq')
if is_paired_end:
output_fq2 = os.path.join(
contig.get_model_data_dir(),
'reads.2.fq')
# Go through source fastqs and write reads in contig_reads to file
source_fq_list = [source_fq1]
output_fq_list = [output_fq1]
if is_paired_end:
source_fq_list.append(source_fq2)
output_fq_list.append(output_fq2)
p1 = re.compile('@(\S+)')
for input_fq_path, output_fq_path in zip(source_fq_list, output_fq_list):
if input_fq_path.endswith('.fq'):
file_like = open(input_fq_path)
elif input_fq_path.endswith('.gz'):
file_like = gzip.open(input_fq_path)
else:
raise Exception('Compression type not supported')
with file_like as in_fh, \
open(output_fq_path, 'w') as out_fh:
for line in in_fh:
m1 = p1.match(line)
if m1:
qname = m1.group(1)
if qname in contig_reads:
out_fh.write(line)
out_fh.write(in_fh.next())
out_fh.write(in_fh.next())
out_fh.write(in_fh.next())
# Align fastqs to contig fasta
contig_fasta = contig.dataset_set.get(
type=Dataset.TYPE.REFERENCE_GENOME_FASTA).get_absolute_location()
contig_reads_to_contig_bam = os.path.join(
contig.get_model_data_dir(),
'reads_to_contig.bam')
simple_align_paired_with_bwa_mem(
output_fq_list,
contig_fasta,
contig_reads_to_contig_bam)
# Coordinate sort and index bam for jbrowse
coordinate_sorted_bam = (os.path.splitext(contig_reads_to_contig_bam)[0] +
'.coordinate_sorted.bam')
sort_bam_by_coordinate(contig_reads_to_contig_bam, coordinate_sorted_bam)
index_bam(coordinate_sorted_bam)
# Add the bam file to contig as BWA_ALIGN dataset, overwriting it
# if it already exists
dataset_query = contig.dataset_set.filter(
type=Dataset.TYPE.BWA_ALIGN)
if dataset_query.count():
dataset_query[0].delete()
add_dataset_to_entity(
contig,
Dataset.TYPE.BWA_ALIGN,
Dataset.TYPE.BWA_ALIGN,
filesystem_location=coordinate_sorted_bam)
def simple_align_paired_with_bwa_mem(reads_fq, reference_fasta,
output_bam_path):
# Ensure reference fasta is indexed
subprocess.call(' '.join([
'%s/bwa/bwa' % settings.TOOLS_DIR,
'index',
reference_fasta
]),
shell=True, executable=settings.BASH_PATH)
# Align the fastqs to the reference
align_input_args = [
'%s/bwa/bwa' % settings.TOOLS_DIR,
'mem',
reference_fasta]
align_input_args.extend(reads_fq)
align_input_cmd = ' '.join(align_input_args)
# To skip saving the SAM file to disk directly, pipe output directly to
# make a BAM file.
align_input_cmd += (' | ' + settings.SAMTOOLS_BINARY +
' view -bS -')
# Run alignment
with open(output_bam_path, 'w') as fh:
subprocess.check_call(
align_input_cmd, stdout=fh,
shell=True, executable=settings.BASH_PATH)
def add_contig_reads_to_contig_bam_track(contig, alignment_type):
"""Update the JBrowse track config file, trackList.json, for this
Contig with a track for the alignment of its reads to itself
"""
# Get the bam file location from the the Dataset of the contig
# keyed by the alignment_type.
bam_dataset = get_dataset_with_type(contig, alignment_type)
# Figure out the url that JBrowse would use to show the data, e.g.:
# /jbrowse/gd_data/projects/58a62c7d/genomes/8dc829ec/align.bam
# urlTemplate = os.path.join(JBROWSE_DATA_URL_ROOT,
# bam_dataset.filesystem_location)
# NOTE: We should construct bam file urls using project.get_client
# jbrowse_link() rather than checking S3 flag here.
if contig.parent_reference_genome.project.is_s3_backed():
urlTemplate = os.path.join('http://%s.s3.amazonaws.com/' % S3_BUCKET,
bam_dataset.filesystem_location.strip("/jbrowse"))
else:
urlTemplate = os.path.join(JBROWSE_DATA_URL_ROOT,
bam_dataset.filesystem_location)
label = bam_dataset.internal_string(
contig)
key = bam_dataset.external_string(contig)
# Build the JSON object.
raw_dict_obj = {
'tracks': [
{
'storeClass': 'JBrowse/Store/SeqFeature/BAM',
'urlTemplate': urlTemplate,
'label': label,
'type': 'JBrowse/View/Track/Alignments2',
'chunkSizeLimit': 10000000, # double the default chunk size
'key': key,
'category': 'Contig BAM Tracks',
'style': {
'className': 'alignment',
'arrowheadClass': 'arrowhead',
'labelScale': 100
}
}
]}
write_tracklist_json(contig, raw_dict_obj, label)
# Also add a snp coverage track.
snp_coverage_label = bam_dataset.internal_string(
contig) + '_COVERAGE'
snp_coverage_key = key + ' Coverage'
coverage_raw_dict_obj = {
'tracks': [
{
'storeClass': 'JBrowse/Store/SeqFeature/BAM',
'urlTemplate': urlTemplate,
'label': snp_coverage_label,
'type': 'JBrowse/View/Track/SNPCoverage',
'category': 'Contig Coverage Tracks',
'key': snp_coverage_key
}
]}
write_tracklist_json(contig, coverage_raw_dict_obj,
snp_coverage_label)
def add_contig_reads_bam_track(contig, alignment_type):
"""Update the JBrowse track config file, trackList.json, for this
ReferenceGenome with a track for the given sample_alignment and
alignment_type.
"""
# Get the bam file location from the the Dataset of the genome
# keyed by the alignment_type.
bam_dataset = get_dataset_with_type(contig, alignment_type)
# Figure out the url that JBrowse would use to show the data, e.g.:
# /jbrowse/gd_data/projects/58a62c7d/genomes/8dc829ec/align.bam
# urlTemplate = os.path.join(JBROWSE_DATA_URL_ROOT,
# bam_dataset.filesystem_location)
# NOTE: We should construct bam file urls using project.get_client
# jbrowse_link() rather than checking S3 flag here.
reference_genome = contig.parent_reference_genome
if reference_genome.project.is_s3_backed():
urlTemplate = os.path.join('http://%s.s3.amazonaws.com/' % S3_BUCKET,
bam_dataset.filesystem_location.strip("/jbrowse"))
else:
urlTemplate = os.path.join(JBROWSE_DATA_URL_ROOT,
bam_dataset.filesystem_location)
# doing label as ES_AG because SA isn't currently used in the variant view
label = bam_dataset.internal_string(contig)
key = bam_dataset.external_string(contig)
# Build the JSON object.
raw_dict_obj = {
'tracks': [
{
'storeClass': 'JBrowse/Store/SeqFeature/BAM',
'urlTemplate': urlTemplate,
'label': label,
'type': 'JBrowse/View/Track/Alignments2',
'chunkSizeLimit': 10000000, # double the default chunk size
'key': key,
'category': 'Contig BAM Tracks',
'style': {
'className': 'alignment',
'arrowheadClass': 'arrowhead',
'labelScale': 100
}
}
]}
write_tracklist_json(reference_genome, raw_dict_obj, label)
# Also add a snp coverage track.
snp_coverage_label = bam_dataset.internal_string(
contig) + '_COVERAGE'
snp_coverage_key = key + ' Coverage'
coverage_raw_dict_obj = {
'tracks': [
{
'storeClass': 'JBrowse/Store/SeqFeature/BAM',
'urlTemplate': urlTemplate,
'label': snp_coverage_label,
'type': 'JBrowse/View/Track/SNPCoverage',
'category': 'Contig Coverage Tracks',
'key': snp_coverage_key
}
]}
write_tracklist_json(reference_genome, coverage_raw_dict_obj,
snp_coverage_label)
|
sivel/ansible-modules-core
|
refs/heads/devel
|
network/nxos/nxos_snmp_location.py
|
25
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: nxos_snmp_location
version_added: "2.2"
short_description: Manages SNMP location information.
description:
- Manages SNMP location configuration.
extends_documentation_fragment: nxos
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
options:
location:
description:
- Location information.
required: true
state:
description:
- Manage the state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
# ensure snmp location is configured
- nxos_snmp_location:
location: Test
state: present
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
# ensure snmp location is not configured
- nxos_snmp_location:
location: Test
state: absent
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"location": "New_Test"}
existing:
description: k/v pairs of existing snmp location
type: dict
sample: {"location": "Test"}
end_state:
description: k/v pairs of location info after module execution
returned: always
type: dict or null
sample: {"location": "New_Test"}
updates:
description: command sent to the device
returned: always
type: list
sample: ["snmp-server location New_Test"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import json
# COMMON CODE FOR MIGRATION
import re
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine
from ansible.module_utils.shell import ShellError
try:
from ansible.module_utils.nxos import get_module
except ImportError:
from ansible.module_utils.nxos import NetworkModule
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class CustomNetworkConfig(NetworkConfig):
def expand_section(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj.children:
if child in S:
continue
self.expand_section(child, S)
return S
def get_object(self, path):
for item in self.items:
if item.text == path[-1]:
parents = [p.text for p in item.parents]
if parents == path[:-1]:
return item
def to_block(self, section):
return '\n'.join([item.raw for item in section])
def get_section(self, path):
try:
section = self.get_section_objects(path)
return self.to_block(section)
except ValueError:
return list()
def get_section_objects(self, path):
if not isinstance(path, list):
path = [path]
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self.expand_section(obj)
def add(self, lines, parents=None):
"""Adds one or lines of configuration
"""
ancestors = list()
offset = 0
obj = None
## global config command
if not parents:
for line in to_list(lines):
item = ConfigLine(line)
item.raw = line
if item not in self.items:
self.items.append(item)
else:
for index, p in enumerate(parents):
try:
i = index + 1
obj = self.get_section_objects(parents[:i])[0]
ancestors.append(obj)
except ValueError:
# add parent to config
offset = index * self.indent
obj = ConfigLine(p)
obj.raw = p.rjust(len(p) + offset)
if ancestors:
obj.parents = list(ancestors)
ancestors[-1].children.append(obj)
self.items.append(obj)
ancestors.append(obj)
# add child objects
for line in to_list(lines):
# check if child already exists
for child in ancestors[-1].children:
if child.text == line:
break
else:
offset = len(parents) * self.indent
item = ConfigLine(line)
item.raw = line.rjust(len(line) + offset)
item.parents = ancestors
ancestors[-1].children.append(item)
self.items.append(item)
def get_network_module(**kwargs):
try:
return get_module(**kwargs)
except NameError:
return NetworkModule(**kwargs)
def get_config(module, include_defaults=False):
config = module.params['config']
if not config:
try:
config = module.get_config()
except AttributeError:
defaults = module.params['include_defaults']
config = module.config.get_config(include_defaults=defaults)
return CustomNetworkConfig(indent=2, contents=config)
def load_config(module, candidate):
config = get_config(module)
commands = candidate.difference(config)
commands = [str(c).strip() for c in commands]
save_config = module.params['save']
result = dict(changed=False)
if commands:
if not module.check_mode:
try:
module.configure(commands)
except AttributeError:
module.config(commands)
if save_config:
try:
module.config.save_config()
except AttributeError:
module.execute(['copy running-config startup-config'])
result['changed'] = True
result['updates'] = commands
return result
# END OF COMMON CODE
def execute_config_command(commands, module):
try:
module.configure(commands)
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
except AttributeError:
try:
commands.insert(0, 'configure')
module.cli.add_commands(commands, output='config')
module.cli.run_commands()
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
def get_cli_body_ssh(command, response, module):
"""Get response for when transport=cli. This is kind of a hack and mainly
needed because these modules were originally written for NX-API. And
not every command supports "| json" when using cli/ssh. As such, we assume
if | json returns an XML string, it is a valid command, but that the
resource doesn't exist yet. Instead, the output will be a raw string
when issuing commands containing 'show run'.
"""
if 'xml' in response[0]:
body = []
elif 'show run' in command:
body = response
else:
try:
body = [json.loads(response[0])]
except ValueError:
module.fail_json(msg='Command does not support JSON output',
command=command)
return body
def execute_show(cmds, module, command_type=None):
command_type_map = {
'cli_show': 'json',
'cli_show_ascii': 'text'
}
try:
if command_type:
response = module.execute(cmds, command_type=command_type)
else:
response = module.execute(cmds)
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
except AttributeError:
try:
if command_type:
command_type = command_type_map.get(command_type)
module.cli.add_commands(cmds, output=command_type)
response = module.cli.run_commands()
else:
module.cli.add_commands(cmds, raw=True)
response = module.cli.run_commands()
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
return response
def execute_show_command(command, module, command_type='cli_show'):
if module.params['transport'] == 'cli':
if 'show run' not in command:
command += ' | json'
cmds = [command]
response = execute_show(cmds, module)
body = get_cli_body_ssh(command, response, module)
elif module.params['transport'] == 'nxapi':
cmds = [command]
body = execute_show(cmds, module, command_type=command_type)
return body
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = str(value)
else:
new_dict[new_key] = value
return new_dict
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_snmp_location(module):
location = {}
location_regex = '.*snmp-server\slocation\s(?P<location>\S+).*'
command = 'show run snmp'
body = execute_show_command(command, module, command_type='cli_show_ascii')
try:
match_location = re.match(location_regex, body[0], re.DOTALL)
group_location = match_location.groupdict()
location['location'] = group_location["location"]
except (AttributeError, TypeError):
location = {}
return location
def main():
argument_spec = dict(
location=dict(required=True, type='str'),
state=dict(choices=['absent', 'present'],
default='present')
)
module = get_network_module(argument_spec=argument_spec,
supports_check_mode=True)
location = module.params['location']
state = module.params['state']
existing = get_snmp_location(module)
changed = False
commands = []
proposed = dict(location=location)
end_state = existing
if state == 'absent':
if existing and existing['location'] == location:
commands.append('no snmp-server location')
elif state == 'present':
if not existing or existing['location'] != location:
commands.append('snmp-server location {0}'.format(location))
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
changed = True
execute_config_command(cmds, module)
end_state = get_snmp_location(module)
if 'configure' in cmds:
cmds.pop(0)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['end_state'] = end_state
results['updates'] = cmds
results['changed'] = changed
module.exit_json(**results)
from ansible.module_utils.basic import *
if __name__ == "__main__":
main()
|
CredoReference/edx-platform
|
refs/heads/integration-hawthorn-qa
|
openedx/core/djangoapps/ace_common/settings/aws.py
|
13
|
def plugin_settings(settings):
settings.ACE_ENABLED_CHANNELS = settings.ENV_TOKENS.get('ACE_ENABLED_CHANNELS', settings.ACE_ENABLED_CHANNELS)
settings.ACE_ENABLED_POLICIES = settings.ENV_TOKENS.get('ACE_ENABLED_POLICIES', settings.ACE_ENABLED_POLICIES)
settings.ACE_CHANNEL_SAILTHRU_DEBUG = settings.ENV_TOKENS.get(
'ACE_CHANNEL_SAILTHRU_DEBUG', settings.ACE_CHANNEL_SAILTHRU_DEBUG,
)
settings.ACE_CHANNEL_SAILTHRU_TEMPLATE_NAME = settings.ENV_TOKENS.get(
'ACE_CHANNEL_SAILTHRU_TEMPLATE_NAME', settings.ACE_CHANNEL_SAILTHRU_TEMPLATE_NAME,
)
settings.ACE_CHANNEL_SAILTHRU_API_KEY = settings.AUTH_TOKENS.get(
'ACE_CHANNEL_SAILTHRU_API_KEY', settings.ACE_CHANNEL_SAILTHRU_API_KEY,
)
settings.ACE_CHANNEL_SAILTHRU_API_SECRET = settings.AUTH_TOKENS.get(
'ACE_CHANNEL_SAILTHRU_API_SECRET', settings.ACE_CHANNEL_SAILTHRU_API_SECRET,
)
settings.ACE_ROUTING_KEY = settings.ENV_TOKENS.get('ACE_ROUTING_KEY', settings.ACE_ROUTING_KEY)
settings.ACE_CHANNEL_DEFAULT_EMAIL = settings.ENV_TOKENS.get(
'ACE_CHANNEL_DEFAULT_EMAIL', settings.ACE_CHANNEL_DEFAULT_EMAIL
)
settings.ACE_CHANNEL_TRANSACTIONAL_EMAIL = settings.ENV_TOKENS.get(
'ACE_CHANNEL_TRANSACTIONAL_EMAIL', settings.ACE_CHANNEL_TRANSACTIONAL_EMAIL
)
|
espadrine/opera
|
refs/heads/master
|
chromium/src/build/android/pylib/host_driven/__init__.py
|
998
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
|
sodexis/odoo
|
refs/heads/8.0
|
addons/lunch/tests/__init__.py
|
260
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_lunch
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
2014c2g4/2015cda0623
|
refs/heads/master
|
static/Brython3.1.0-20150301-090019/Lib/sre_compile.py
|
630
|
#
# Secret Labs' Regular Expression Engine
#
# convert template to internal format
#
# Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
import sys
import _sre
import sre_parse
from sre_constants import *
from _sre import MAXREPEAT
assert _sre.MAGIC == MAGIC, "SRE module mismatch"
if _sre.CODESIZE == 2:
MAXCODE = 65535
else:
MAXCODE = 0xFFFFFFFF
def _identityfunction(x):
return x
_LITERAL_CODES = set([LITERAL, NOT_LITERAL])
_REPEATING_CODES = set([REPEAT, MIN_REPEAT, MAX_REPEAT])
_SUCCESS_CODES = set([SUCCESS, FAILURE])
_ASSERT_CODES = set([ASSERT, ASSERT_NOT])
def _compile(code, pattern, flags):
# internal: compile a (sub)pattern
emit = code.append
_len = len
LITERAL_CODES = _LITERAL_CODES
REPEATING_CODES = _REPEATING_CODES
SUCCESS_CODES = _SUCCESS_CODES
ASSERT_CODES = _ASSERT_CODES
for op, av in pattern:
#print('sre_compile.py:_compile:42', op, av)
#print('sre_compile.py:_compile:42', code)
if op in LITERAL_CODES:
if flags & SRE_FLAG_IGNORECASE:
emit(OPCODES[OP_IGNORE[op]])
emit(_sre.getlower(av, flags))
else:
emit(OPCODES[op])
emit(av)
elif op is IN:
if flags & SRE_FLAG_IGNORECASE:
emit(OPCODES[OP_IGNORE[op]])
def fixup(literal, flags=flags):
return _sre.getlower(literal, flags)
else:
emit(OPCODES[op])
fixup = _identityfunction
skip = _len(code); emit(0)
_compile_charset(av, flags, code, fixup)
code[skip] = _len(code) - skip
elif op is ANY:
if flags & SRE_FLAG_DOTALL:
emit(OPCODES[ANY_ALL])
else:
emit(OPCODES[ANY])
elif op in REPEATING_CODES:
if flags & SRE_FLAG_TEMPLATE:
raise error("internal: unsupported template operator")
emit(OPCODES[REPEAT])
skip = _len(code); emit(0)
emit(av[0])
emit(av[1])
_compile(code, av[2], flags)
emit(OPCODES[SUCCESS])
code[skip] = _len(code) - skip
elif _simple(av) and op is not REPEAT:
if op is MAX_REPEAT:
emit(OPCODES[REPEAT_ONE])
else:
emit(OPCODES[MIN_REPEAT_ONE])
skip = _len(code); emit(0)
emit(av[0])
emit(av[1])
_compile(code, av[2], flags)
emit(OPCODES[SUCCESS])
code[skip] = _len(code) - skip
else:
emit(OPCODES[REPEAT])
skip = _len(code); emit(0)
emit(av[0])
emit(av[1])
_compile(code, av[2], flags)
code[skip] = _len(code) - skip
if op is MAX_REPEAT:
emit(OPCODES[MAX_UNTIL])
else:
emit(OPCODES[MIN_UNTIL])
elif op is SUBPATTERN:
if av[0]:
emit(OPCODES[MARK])
emit((av[0]-1)*2)
# _compile_info(code, av[1], flags)
_compile(code, av[1], flags)
if av[0]:
emit(OPCODES[MARK])
emit((av[0]-1)*2+1)
elif op in SUCCESS_CODES:
emit(OPCODES[op])
elif op in ASSERT_CODES:
emit(OPCODES[op])
skip = _len(code); emit(0)
if av[0] >= 0:
emit(0) # look ahead
else:
lo, hi = av[1].getwidth()
if lo != hi:
raise error("look-behind requires fixed-width pattern")
emit(lo) # look behind
_compile(code, av[1], flags)
emit(OPCODES[SUCCESS])
code[skip] = _len(code) - skip
elif op is CALL:
emit(OPCODES[op])
skip = _len(code); emit(0)
_compile(code, av, flags)
emit(OPCODES[SUCCESS])
code[skip] = _len(code) - skip
elif op is AT:
emit(OPCODES[op])
if flags & SRE_FLAG_MULTILINE:
av = AT_MULTILINE.get(av, av)
if flags & SRE_FLAG_LOCALE:
av = AT_LOCALE.get(av, av)
elif flags & SRE_FLAG_UNICODE:
av = AT_UNICODE.get(av, av)
emit(ATCODES[av])
elif op is BRANCH:
emit(OPCODES[op])
tail = []
tailappend = tail.append
for av in av[1]:
skip = _len(code); emit(0)
# _compile_info(code, av, flags)
_compile(code, av, flags)
emit(OPCODES[JUMP])
tailappend(_len(code)); emit(0)
code[skip] = _len(code) - skip
emit(0) # end of branch
for tail in tail:
code[tail] = _len(code) - tail
elif op is CATEGORY:
emit(OPCODES[op])
if flags & SRE_FLAG_LOCALE:
av = CH_LOCALE[av]
elif flags & SRE_FLAG_UNICODE:
av = CH_UNICODE[av]
emit(CHCODES[av])
elif op is GROUPREF:
if flags & SRE_FLAG_IGNORECASE:
emit(OPCODES[OP_IGNORE[op]])
else:
emit(OPCODES[op])
emit(av-1)
elif op is GROUPREF_EXISTS:
emit(OPCODES[op])
emit(av[0]-1)
skipyes = _len(code); emit(0)
_compile(code, av[1], flags)
if av[2]:
emit(OPCODES[JUMP])
skipno = _len(code); emit(0)
code[skipyes] = _len(code) - skipyes + 1
_compile(code, av[2], flags)
code[skipno] = _len(code) - skipno
else:
code[skipyes] = _len(code) - skipyes + 1
else:
raise ValueError("unsupported operand type", op)
def _compile_charset(charset, flags, code, fixup=None):
# compile charset subprogram
emit = code.append
if fixup is None:
fixup = _identityfunction
for op, av in _optimize_charset(charset, fixup):
emit(OPCODES[op])
if op is NEGATE:
pass
elif op is LITERAL:
emit(fixup(av))
elif op is RANGE:
emit(fixup(av[0]))
emit(fixup(av[1]))
elif op is CHARSET:
code.extend(av)
elif op is BIGCHARSET:
code.extend(av)
elif op is CATEGORY:
if flags & SRE_FLAG_LOCALE:
emit(CHCODES[CH_LOCALE[av]])
elif flags & SRE_FLAG_UNICODE:
emit(CHCODES[CH_UNICODE[av]])
else:
emit(CHCODES[av])
else:
raise error("internal: unsupported set operator")
emit(OPCODES[FAILURE])
def _optimize_charset(charset, fixup):
# internal: optimize character set
out = []
outappend = out.append
charmap = [0]*256
try:
for op, av in charset:
if op is NEGATE:
outappend((op, av))
elif op is LITERAL:
charmap[fixup(av)] = 1
elif op is RANGE:
for i in range(fixup(av[0]), fixup(av[1])+1):
charmap[i] = 1
elif op is CATEGORY:
# XXX: could append to charmap tail
return charset # cannot compress
except IndexError:
# character set contains unicode characters
return _optimize_unicode(charset, fixup)
# compress character map
i = p = n = 0
runs = []
runsappend = runs.append
for c in charmap:
if c:
if n == 0:
p = i
n = n + 1
elif n:
runsappend((p, n))
n = 0
i = i + 1
if n:
runsappend((p, n))
if len(runs) <= 2:
# use literal/range
for p, n in runs:
if n == 1:
outappend((LITERAL, p))
else:
outappend((RANGE, (p, p+n-1)))
if len(out) < len(charset):
return out
else:
# use bitmap
data = _mk_bitmap(charmap)
outappend((CHARSET, data))
return out
return charset
def _mk_bitmap(bits):
data = []
dataappend = data.append
if _sre.CODESIZE == 2:
start = (1, 0)
else:
start = (1, 0)
m, v = start
for c in bits:
if c:
v = v + m
m = m + m
if m > MAXCODE:
dataappend(v)
m, v = start
return data
# To represent a big charset, first a bitmap of all characters in the
# set is constructed. Then, this bitmap is sliced into chunks of 256
# characters, duplicate chunks are eliminated, and each chunk is
# given a number. In the compiled expression, the charset is
# represented by a 16-bit word sequence, consisting of one word for
# the number of different chunks, a sequence of 256 bytes (128 words)
# of chunk numbers indexed by their original chunk position, and a
# sequence of chunks (16 words each).
# Compression is normally good: in a typical charset, large ranges of
# Unicode will be either completely excluded (e.g. if only cyrillic
# letters are to be matched), or completely included (e.g. if large
# subranges of Kanji match). These ranges will be represented by
# chunks of all one-bits or all zero-bits.
# Matching can be also done efficiently: the more significant byte of
# the Unicode character is an index into the chunk number, and the
# less significant byte is a bit index in the chunk (just like the
# CHARSET matching).
# In UCS-4 mode, the BIGCHARSET opcode still supports only subsets
# of the basic multilingual plane; an efficient representation
# for all of UTF-16 has not yet been developed. This means,
# in particular, that negated charsets cannot be represented as
# bigcharsets.
def _optimize_unicode(charset, fixup):
try:
import array
except ImportError:
return charset
charmap = [0]*65536
negate = 0
try:
for op, av in charset:
if op is NEGATE:
negate = 1
elif op is LITERAL:
charmap[fixup(av)] = 1
elif op is RANGE:
for i in range(fixup(av[0]), fixup(av[1])+1):
charmap[i] = 1
elif op is CATEGORY:
# XXX: could expand category
return charset # cannot compress
except IndexError:
# non-BMP characters; XXX now they should work
return charset
if negate:
if sys.maxunicode != 65535:
# XXX: negation does not work with big charsets
# XXX2: now they should work, but removing this will make the
# charmap 17 times bigger
return charset
for i in range(65536):
charmap[i] = not charmap[i]
comps = {}
mapping = [0]*256
block = 0
data = []
for i in range(256):
chunk = tuple(charmap[i*256:(i+1)*256])
new = comps.setdefault(chunk, block)
mapping[i] = new
if new == block:
block = block + 1
data = data + _mk_bitmap(chunk)
header = [block]
if _sre.CODESIZE == 2:
code = 'H'
else:
code = 'I'
# Convert block indices to byte array of 256 bytes
mapping = array.array('b', mapping).tobytes()
# Convert byte array to word array
mapping = array.array(code, mapping)
assert mapping.itemsize == _sre.CODESIZE
assert len(mapping) * mapping.itemsize == 256
header = header + mapping.tolist()
data[0:0] = header
return [(BIGCHARSET, data)]
def _simple(av):
# check if av is a "simple" operator
lo, hi = av[2].getwidth()
if lo == 0 and hi == MAXREPEAT:
raise error("nothing to repeat")
return lo == hi == 1 and av[2][0][0] != SUBPATTERN
def _compile_info(code, pattern, flags):
# internal: compile an info block. in the current version,
# this contains min/max pattern width, and an optional literal
# prefix or a character map
lo, hi = pattern.getwidth()
#print('sre_compile.py:_compile_info:370', lo, hi)
if lo == 0:
return # not worth it
# look for a literal prefix
prefix = []
prefixappend = prefix.append
prefix_skip = 0
charset = [] # not used
charsetappend = charset.append
if not (flags & SRE_FLAG_IGNORECASE):
# look for literal prefix
for op, av in pattern.data:
#print('sre_compile.py:_code:381',op,av)
if op is LITERAL:
if len(prefix) == prefix_skip:
prefix_skip = prefix_skip + 1
prefixappend(av)
elif op is SUBPATTERN and len(av[1]) == 1:
op, av = av[1][0]
if op is LITERAL:
prefixappend(av)
else:
break
else:
break
# if no prefix, look for charset prefix
if not prefix and pattern.data:
op, av = pattern.data[0]
if op is SUBPATTERN and av[1]:
op, av = av[1][0]
if op is LITERAL:
charsetappend((op, av))
elif op is BRANCH:
c = []
cappend = c.append
for p in av[1]:
if not p:
break
op, av = p[0]
if op is LITERAL:
cappend((op, av))
else:
break
else:
charset = c
elif op is BRANCH:
c = []
cappend = c.append
for p in av[1]:
if not p:
break
op, av = p[0]
if op is LITERAL:
cappend((op, av))
else:
break
else:
charset = c
elif op is IN:
charset = av
#print('sre_compile.py:_code:430', code)
## if prefix:
## print "*** PREFIX", prefix, prefix_skip
## if charset:
## print "*** CHARSET", charset
# add an info block
emit = code.append
emit(OPCODES[INFO])
skip = len(code); emit(0)
# literal flag
mask = 0
if prefix:
mask = SRE_INFO_PREFIX
if len(prefix) == prefix_skip == len(pattern.data):
mask = mask + SRE_INFO_LITERAL
elif charset:
mask = mask + SRE_INFO_CHARSET
emit(mask)
# pattern length
if lo < MAXCODE:
emit(lo)
else:
emit(MAXCODE)
prefix = prefix[:MAXCODE]
if hi < MAXCODE:
emit(hi)
else:
emit(0)
# add literal prefix
#print('sre_compile.py:_code:457', code)
if prefix:
emit(len(prefix)) # length
emit(prefix_skip) # skip
code.extend(prefix)
# generate overlap table
table = [-1] + ([0]*len(prefix))
for i in range(len(prefix)):
table[i+1] = table[i]+1
while table[i+1] > 0 and prefix[i] != prefix[table[i+1]-1]:
table[i+1] = table[table[i+1]-1]+1
code.extend(table[1:]) # don't store first entry
elif charset:
_compile_charset(charset, flags, code)
code[skip] = len(code) - skip
def isstring(obj):
return isinstance(obj, (str, bytes))
def _code(p, flags):
flags = p.pattern.flags | flags
code = []
# compile info block
_compile_info(code, p, flags)
# compile the pattern
_compile(code, p.data, flags)
code.append(OPCODES[SUCCESS])
return code
def compile(p, flags=0):
# internal: convert pattern list to internal format
#print("sre_compile.py:compile:504:p", p)
if isstring(p):
pattern = p
p = sre_parse.parse(p, flags)
else:
pattern = None
#print('sre_compile.py:498:p', p)
code = _code(p, flags)
#print('sre_compile.py:501:code', code)
# print code
# XXX: <fl> get rid of this limitation!
if p.pattern.groups > 100:
raise AssertionError(
"sorry, but this version only supports 100 named groups"
)
# map in either direction
groupindex = p.pattern.groupdict
indexgroup = [None] * p.pattern.groups
for k, i in groupindex.items():
indexgroup[i] = k
return _sre.compile(
pattern, flags | p.pattern.flags, code,
p.pattern.groups-1,
groupindex, indexgroup
)
|
bjackman/trappy
|
refs/heads/master
|
trappy/compare_runs.py
|
3
|
# Copyright 2015-2017 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import trappy.ftrace
def compare_runs(actor_order, map_label, runs, **kwords):
"""A side by side comparison of multiple runs
Plots include temperature, utilization, frequencies, PID
controller and power.
:param actor_order: An array showing the order in which the actors
were registered. The array values are the labels that
will be used in the input and output power plots.
For Example:
::
["GPU", "A15", "A7"]
:param map_label: A dict that matches cpumasks (as found in the
trace) with their proper name. This "proper name" will be used as
a label for the load and allfreqs plots. It's recommended that
the names of the cpus matches those in actor_order.
For Example:
::
{"0000000f": "A7", "000000f0": "A15"}
:param runs: An array of tuples consisting of a name and the path to
the directory where the trace.dat is.
For example:
::
[("experiment1", "wa_output/antutu_antutu_1"),
("known good", "good/antutu_antutu_1")]
:param tz_id: thermal zone id as it appears in the id field of the
thermal_temperature trace event
:type actor_order: list
:type map_label: dict
:type runs: list
:type tz_id: int
"""
import trappy.plot_utils
import trappy.wa
if not isinstance(actor_order, list):
raise TypeError("actor_order has to be an array")
if not isinstance(map_label, dict):
raise TypeError("map_label has to be a dict")
if "width" not in kwords:
kwords["width"] = 20
if "height" not in kwords:
kwords["height"] = 5
run_data = []
for name, path in runs:
run_data.append(trappy.FTrace(name=name, path=path, scope="thermal"))
trappy.wa.SysfsExtractor(path).pretty_print_in_ipython()
trappy.plot_utils.plot_temperature(run_data, **kwords)
if "tz_id" in kwords:
del kwords["tz_id"]
try:
trappy.plot_utils.plot_load(run_data, map_label, **kwords)
except IndexError:
raise ValueError("No power allocator traces found. Was IPA active (temp above switch on temperature) and FTrace configured to collect all thermal events?")
trappy.plot_utils.plot_allfreqs(run_data, map_label, **kwords)
trappy.plot_utils.plot_controller(run_data, **kwords)
trappy.plot_utils.plot_input_power(run_data, actor_order, **kwords)
trappy.plot_utils.plot_output_power(run_data, actor_order, **kwords)
trappy.plot_utils.plot_freq_hists(run_data, map_label)
trappy.plot_utils.plot_temperature_hist(run_data)
def summary_plots(actor_order, map_label, **kwords):
"""A summary of plots for a given run
.. warning::
This is a wrapper around compare_runs(). Use that instead.
"""
path = kwords.pop("path", ".")
title = kwords.pop("title", "")
return compare_runs(actor_order, map_label, [(title, path)], **kwords)
|
passiweinberger/passiweinberger.github.io
|
refs/heads/master
|
_site/presentations/IBM_Lab_Lecture/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
|
1284
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import copy
import hashlib
import json
import multiprocessing
import os.path
import re
import signal
import subprocess
import sys
import gyp
import gyp.common
from gyp.common import OrderedSet
import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
from cStringIO import StringIO
from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
# Gyp expects the following variables to be expandable by the build
# system to the appropriate locations. Ninja prefers paths to be
# known at gyp time. To resolve this, introduce special
# variables starting with $! and $| (which begin with a $ so gyp knows it
# should be treated specially, but is otherwise an invalid
# ninja/shell variable) that are passed to gyp here but expanded
# before writing out into the target .ninja files; see
# ExpandSpecial.
# $! is used for variables that represent a path and that can only appear at
# the start of a string, while $| is used for variables that can appear
# anywhere in a string.
'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
'PRODUCT_DIR': '$!PRODUCT_DIR',
'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
# Special variables that may be used by gyp 'rule' targets.
# We generate definitions for these variables on the fly when processing a
# rule.
'RULE_INPUT_ROOT': '${root}',
'RULE_INPUT_DIRNAME': '${dirname}',
'RULE_INPUT_PATH': '${source}',
'RULE_INPUT_EXT': '${ext}',
'RULE_INPUT_NAME': '${name}',
}
# Placates pylint.
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
def StripPrefix(arg, prefix):
if arg.startswith(prefix):
return arg[len(prefix):]
return arg
def QuoteShellArgument(arg, flavor):
"""Quote a string such that it will be interpreted as a single argument
by the shell."""
# Rather than attempting to enumerate the bad shell characters, just
# whitelist common OK ones and quote anything else.
if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
return arg # No quoting necessary.
if flavor == 'win':
return gyp.msvs_emulation.QuoteForRspFile(arg)
return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
def Define(d, flavor):
"""Takes a preprocessor define and returns a -D parameter that's ninja- and
shell-escaped."""
if flavor == 'win':
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
d = d.replace('#', '\\%03o' % ord('#'))
return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
def AddArch(output, arch):
"""Adds an arch string to an output path."""
output, extension = os.path.splitext(output)
return '%s.%s%s' % (output, arch, extension)
class Target(object):
"""Target represents the paths used within a single gyp target.
Conceptually, building a single target A is a series of steps:
1) actions/rules/copies generates source/resources/etc.
2) compiles generates .o files
3) link generates a binary (library/executable)
4) bundle merges the above in a mac bundle
(Any of these steps can be optional.)
From a build ordering perspective, a dependent target B could just
depend on the last output of this series of steps.
But some dependent commands sometimes need to reach inside the box.
For example, when linking B it needs to get the path to the static
library generated by A.
This object stores those paths. To keep things simple, member
variables only store concrete paths to single files, while methods
compute derived values like "the last output of the target".
"""
def __init__(self, type):
# Gyp type ("static_library", etc.) of this target.
self.type = type
# File representing whether any input dependencies necessary for
# dependent actions have completed.
self.preaction_stamp = None
# File representing whether any input dependencies necessary for
# dependent compiles have completed.
self.precompile_stamp = None
# File representing the completion of actions/rules/copies, if any.
self.actions_stamp = None
# Path to the output of the link step, if any.
self.binary = None
# Path to the file representing the completion of building the bundle,
# if any.
self.bundle = None
# On Windows, incremental linking requires linking against all the .objs
# that compose a .lib (rather than the .lib itself). That list is stored
# here. In this case, we also need to save the compile_deps for the target,
# so that the the target that directly depends on the .objs can also depend
# on those.
self.component_objs = None
self.compile_deps = None
# Windows only. The import .lib is the output of a build step, but
# because dependents only link against the lib (not both the lib and the
# dll) we keep track of the import library here.
self.import_lib = None
def Linkable(self):
"""Return true if this is a target that can be linked against."""
return self.type in ('static_library', 'shared_library')
def UsesToc(self, flavor):
"""Return true if the target should produce a restat rule based on a TOC
file."""
# For bundles, the .TOC should be produced for the binary, not for
# FinalOutput(). But the naive approach would put the TOC file into the
# bundle, so don't do this for bundles for now.
if flavor == 'win' or self.bundle:
return False
return self.type in ('shared_library', 'loadable_module')
def PreActionInput(self, flavor):
"""Return the path, if any, that should be used as a dependency of
any dependent action step."""
if self.UsesToc(flavor):
return self.FinalOutput() + '.TOC'
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
"""Return the path, if any, that should be used as a dependency of
any dependent compile step."""
return self.actions_stamp or self.precompile_stamp
def FinalOutput(self):
"""Return the last output of the target, which depends on all prior
steps."""
return self.bundle or self.binary or self.actions_stamp
# A small discourse on paths as used within the Ninja build:
# All files we produce (both at gyp and at build time) appear in the
# build directory (e.g. out/Debug).
#
# Paths within a given .gyp file are always relative to the directory
# containing the .gyp file. Call these "gyp paths". This includes
# sources as well as the starting directory a given gyp rule/action
# expects to be run from. We call the path from the source root to
# the gyp file the "base directory" within the per-.gyp-file
# NinjaWriter code.
#
# All paths as written into the .ninja files are relative to the build
# directory. Call these paths "ninja paths".
#
# We translate between these two notions of paths with two helper
# functions:
#
# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
# into the equivalent ninja path.
#
# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
# an output file; the result can be namespaced such that it is unique
# to the input file name as well as the output target name.
class NinjaWriter(object):
def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
output_file, toplevel_build, output_file_name, flavor,
toplevel_dir=None):
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
build_dir: path from source root to build output
toplevel_dir: path to the toplevel directory
"""
self.hash_for_rules = hash_for_rules
self.target_outputs = target_outputs
self.base_dir = base_dir
self.build_dir = build_dir
self.ninja = ninja_syntax.Writer(output_file)
self.toplevel_build = toplevel_build
self.output_file_name = output_file_name
self.flavor = flavor
self.abs_build_dir = None
if toplevel_dir is not None:
self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
build_dir))
self.obj_ext = '.obj' if flavor == 'win' else '.o'
if flavor == 'win':
# See docstring of msvs_emulation.GenerateEnvironmentFiles().
self.win_env = {}
for arch in ('x86', 'x64'):
self.win_env[arch] = 'environment.' + arch
# Relative path from build output dir to base dir.
build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
self.build_to_base = os.path.join(build_to_top, base_dir)
# Relative path from base dir to build dir.
base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
self.base_to_build = os.path.join(base_to_top, build_dir)
def ExpandSpecial(self, path, product_dir=None):
"""Expand specials like $!PRODUCT_DIR in |path|.
If |product_dir| is None, assumes the cwd is already the product
dir. Otherwise, |product_dir| is the relative path to the product
dir.
"""
PRODUCT_DIR = '$!PRODUCT_DIR'
if PRODUCT_DIR in path:
if product_dir:
path = path.replace(PRODUCT_DIR, product_dir)
else:
path = path.replace(PRODUCT_DIR + '/', '')
path = path.replace(PRODUCT_DIR + '\\', '')
path = path.replace(PRODUCT_DIR, '.')
INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
if INTERMEDIATE_DIR in path:
int_dir = self.GypPathToUniqueOutput('gen')
# GypPathToUniqueOutput generates a path relative to the product dir,
# so insert product_dir in front if it is provided.
path = path.replace(INTERMEDIATE_DIR,
os.path.join(product_dir or '', int_dir))
CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
path = path.replace(CONFIGURATION_NAME, self.config_name)
return path
def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
if self.flavor == 'win':
path = self.msvs_settings.ConvertVSMacros(
path, config=self.config_name)
path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
dirname)
path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
return path
def GypPathToNinja(self, path, env=None):
"""Translate a gyp path to a ninja path, optionally expanding environment
variable references in |path| with |env|.
See the above discourse on path conversions."""
if env:
if self.flavor == 'mac':
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
elif self.flavor == 'win':
path = gyp.msvs_emulation.ExpandMacros(path, env)
if path.startswith('$!'):
expanded = self.ExpandSpecial(path)
if self.flavor == 'win':
expanded = os.path.normpath(expanded)
return expanded
if '$|' in path:
path = self.ExpandSpecial(path)
assert '$' not in path, path
return os.path.normpath(os.path.join(self.build_to_base, path))
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith('$'), path
# Translate the path following this scheme:
# Input: foo/bar.gyp, target targ, references baz/out.o
# Output: obj/foo/baz/targ.out.o (if qualified)
# obj/foo/baz/out.o (otherwise)
# (and obj.host instead of obj for cross-compiles)
#
# Why this scheme and not some other one?
# 1) for a given input, you can compute all derived outputs by matching
# its path, even if the input is brought via a gyp file with '..'.
# 2) simple files like libraries and stamps have a simple filename.
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
path_dir, path_basename = os.path.split(path)
assert not os.path.isabs(path_dir), (
"'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
if qualified:
path_basename = self.name + '.' + path_basename
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
path_basename))
def WriteCollapsedDependencies(self, name, targets, order_only=None):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
Uses a stamp file if necessary."""
assert targets == filter(None, targets), targets
if len(targets) == 0:
assert not order_only
return None
if len(targets) > 1 or order_only:
stamp = self.GypPathToUniqueOutput(name + '.stamp')
targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
self.ninja.newline()
return targets[0]
def _SubninjaNameForArch(self, arch):
output_file_base = os.path.splitext(self.output_file_name)[0]
return '%s.%s.ninja' % (output_file_base, arch)
def WriteSpec(self, spec, config_name, generator_flags):
"""The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec.
Returns None if there are no outputs (e.g. a settings-only 'none' type
target)."""
self.config_name = config_name
self.name = spec['target_name']
self.toolset = spec['toolset']
config = spec['configurations'][config_name]
self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
# Track if this target contains any C++ files, to decide if gcc or g++
# should be used for linking.
self.uses_cpp = False
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags)
arch = self.msvs_settings.GetArch(config_name)
self.ninja.variable('arch', self.win_env[arch])
self.ninja.variable('cc', '$cl_' + arch)
self.ninja.variable('cxx', '$cl_' + arch)
self.ninja.variable('cc_host', '$cl_' + arch)
self.ninja.variable('cxx_host', '$cl_' + arch)
self.ninja.variable('asm', '$ml_' + arch)
if self.flavor == 'mac':
self.archs = self.xcode_settings.GetActiveArchs(config_name)
if len(self.archs) > 1:
self.arch_subninjas = dict(
(arch, ninja_syntax.Writer(
OpenOutput(os.path.join(self.toplevel_build,
self._SubninjaNameForArch(arch)),
'w')))
for arch in self.archs)
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
# any of its action/rule/copy steps.
# compile_depends is the dependencies this target depends on before running
# any of its compile steps.
actions_depends = []
compile_depends = []
# TODO(evan): it is rather confusing which things are lists and which
# are strings. Fix these.
if 'dependencies' in spec:
for dep in spec['dependencies']:
if dep in self.target_outputs:
target = self.target_outputs[dep]
actions_depends.append(target.PreActionInput(self.flavor))
compile_depends.append(target.PreCompileInput())
actions_depends = filter(None, actions_depends)
compile_depends = filter(None, compile_depends)
actions_depends = self.WriteCollapsedDependencies('actions_depends',
actions_depends)
compile_depends = self.WriteCollapsedDependencies('compile_depends',
compile_depends)
self.target.preaction_stamp = actions_depends
self.target.precompile_stamp = compile_depends
# Write out actions, rules, and copies. These must happen before we
# compile any sources, so compute a list of predependencies for sources
# while we do it.
extra_sources = []
mac_bundle_depends = []
self.target.actions_stamp = self.WriteActionsRulesCopies(
spec, extra_sources, actions_depends, mac_bundle_depends)
# If we have actions/rules/copies, we depend directly on those, but
# otherwise we depend on dependent target's actions/rules/copies etc.
# We never need to explicitly depend on previous target's link steps,
# because no compile ever depends on them.
compile_depends_stamp = (self.target.actions_stamp or compile_depends)
# Write out the compilation steps, if any.
link_deps = []
sources = extra_sources + spec.get('sources', [])
if sources:
if self.flavor == 'mac' and len(self.archs) > 1:
# Write subninja file containing compile and link commands scoped to
# a single arch if a fat binary is being built.
for arch in self.archs:
self.ninja.subninja(self._SubninjaNameForArch(arch))
pch = None
if self.flavor == 'win':
gyp.msvs_emulation.VerifyMissingSources(
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
pch = gyp.msvs_emulation.PrecompiledHeader(
self.msvs_settings, config_name, self.GypPathToNinja,
self.GypPathToUniqueOutput, self.obj_ext)
else:
pch = gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, self.GypPathToNinja,
lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
link_deps = self.WriteSources(
self.ninja, config_name, config, sources, compile_depends_stamp, pch,
spec)
# Some actions/rules output 'sources' that are already object files.
obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
if obj_outputs:
if self.flavor != 'mac' or len(self.archs) == 1:
link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
else:
print "Warning: Actions/rules writing object files don't work with " \
"multiarch targets, dropping. (target %s)" % spec['target_name']
elif self.flavor == 'mac' and len(self.archs) > 1:
link_deps = collections.defaultdict(list)
compile_deps = self.target.actions_stamp or actions_depends
if self.flavor == 'win' and self.target.type == 'static_library':
self.target.component_objs = link_deps
self.target.compile_deps = compile_deps
# Write out a link step, if needed.
output = None
is_empty_bundle = not link_deps and not mac_bundle_depends
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
compile_deps)
if self.is_mac_bundle:
mac_bundle_depends.append(output)
# Bundle all of the above together, if needed.
if self.is_mac_bundle:
output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
if not output:
return None
assert self.target.FinalOutput(), output
return self.target
def _WinIdlRule(self, source, prebuild, outputs):
"""Handle the implicit VS .idl rule for one source file. Fills |outputs|
with files that are generated."""
outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
source, self.config_name)
outdir = self.GypPathToNinja(outdir)
def fix_path(path, rel=None):
path = os.path.join(outdir, path)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
path = self.ExpandRuleVariables(
path, root, dirname, source, ext, basename)
if rel:
path = os.path.relpath(path, rel)
return path
vars = [(name, fix_path(value, outdir)) for name, value in vars]
output = [fix_path(p) for p in output]
vars.append(('outdir', outdir))
vars.append(('idlflags', flags))
input = self.GypPathToNinja(source)
self.ninja.build(output, 'idl', input,
variables=vars, order_only=prebuild)
outputs.extend(output)
def WriteWinIdlFiles(self, spec, prebuild):
"""Writes rules to match MSVS's implicit idl handling."""
assert self.flavor == 'win'
if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
return []
outputs = []
for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
self._WinIdlRule(source, prebuild, outputs)
return outputs
def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
mac_bundle_depends):
"""Write out the Actions, Rules, and Copies steps. Return a path
representing the outputs of these steps."""
outputs = []
if self.is_mac_bundle:
mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
else:
mac_bundle_resources = []
extra_mac_bundle_resources = []
if 'actions' in spec:
outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
extra_mac_bundle_resources)
if 'rules' in spec:
outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
mac_bundle_resources,
extra_mac_bundle_resources)
if 'copies' in spec:
outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild)
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
xcassets = self.WriteMacBundleResources(
extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
return stamp
def GenerateDescription(self, verb, message, fallback):
"""Generate and return a description of a build step.
|verb| is the short summary, e.g. ACTION or RULE.
|message| is a hand-written description, or None if not available.
|fallback| is the gyp-level name of the step, usable as a fallback.
"""
if self.toolset != 'target':
verb += '(%s)' % self.toolset
if message:
return '%s %s' % (verb, self.ExpandSpecial(message))
else:
return '%s %s: %s' % (verb, self.name, fallback)
def WriteActions(self, actions, extra_sources, prebuild,
extra_mac_bundle_resources):
# Actions cd into the base directory.
env = self.GetToolchainEnv()
all_outputs = []
for action in actions:
# First write out a rule for the action.
name = '%s_%s' % (action['action_name'], self.hash_for_rules)
description = self.GenerateDescription('ACTION',
action.get('message', None),
name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
if self.flavor == 'win' else False)
args = action['action']
depfile = action.get('depfile', None)
if depfile:
depfile = self.ExpandSpecial(depfile, self.base_to_build)
pool = 'console' if int(action.get('ninja_use_console', 0)) else None
rule_name, _ = self.WriteNewNinjaRule(name, args, description,
is_cygwin, env, pool,
depfile=depfile)
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
if int(action.get('process_outputs_as_sources', False)):
extra_sources += action['outputs']
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += action['outputs']
outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
# Then write out an edge using the rule.
self.ninja.build(outputs, rule_name, inputs,
order_only=prebuild)
all_outputs += outputs
self.ninja.newline()
return all_outputs
def WriteRules(self, rules, extra_sources, prebuild,
mac_bundle_resources, extra_mac_bundle_resources):
env = self.GetToolchainEnv()
all_outputs = []
for rule in rules:
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
# First write out a rule for the rule action.
name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
args = rule['action']
description = self.GenerateDescription(
'RULE',
rule.get('message', None),
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
if self.flavor == 'win' else False)
pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
rule_name, args = self.WriteNewNinjaRule(
name, args, description, is_cygwin, env, pool)
# TODO: if the command references the outputs directly, we should
# simplify it to just use $out.
# Rules can potentially make use of some special variables which
# must vary per source file.
# Compute the list of variables we'll need to provide.
special_locals = ('source', 'root', 'dirname', 'ext', 'name')
needed_variables = set(['source'])
for argument in args:
for var in special_locals:
if '${%s}' % var in argument:
needed_variables.add(var)
def cygwin_munge(path):
# pylint: disable=cell-var-from-loop
if is_cygwin:
return path.replace('\\', '/')
return path
inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
# If there are n source files matching the rule, and m additional rule
# inputs, then adding 'inputs' to each build edge written below will
# write m * n inputs. Collapsing reduces this to m + n.
sources = rule.get('rule_sources', [])
num_inputs = len(inputs)
if prebuild:
num_inputs += 1
if num_inputs > 2 and len(sources) > 2:
inputs = [self.WriteCollapsedDependencies(
rule['rule_name'], inputs, order_only=prebuild)]
prebuild = []
# For each source file, write an edge that generates all the outputs.
for source in sources:
source = os.path.normpath(source)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
# Gather the list of inputs and outputs, expanding $vars if possible.
outputs = [self.ExpandRuleVariables(o, root, dirname,
source, ext, basename)
for o in rule['outputs']]
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
was_mac_bundle_resource = source in mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
# Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
# items in a set and remove them all in a single pass if this becomes
# a performance issue.
if was_mac_bundle_resource:
mac_bundle_resources.remove(source)
extra_bindings = []
for var in needed_variables:
if var == 'root':
extra_bindings.append(('root', cygwin_munge(root)))
elif var == 'dirname':
# '$dirname' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
elif var == 'source':
# '$source' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
source_expanded = self.ExpandSpecial(source, self.base_to_build)
extra_bindings.append(('source', cygwin_munge(source_expanded)))
elif var == 'ext':
extra_bindings.append(('ext', ext))
elif var == 'name':
extra_bindings.append(('name', cygwin_munge(basename)))
else:
assert var == None, repr(var)
outputs = [self.GypPathToNinja(o, env) for o in outputs]
if self.flavor == 'win':
# WriteNewNinjaRule uses unique_name for creating an rsp file on win.
extra_bindings.append(('unique_name',
hashlib.md5(outputs[0]).hexdigest()))
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
variables=extra_bindings)
all_outputs.extend(outputs)
return all_outputs
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
env = self.GetToolchainEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
path = os.path.normpath(path)
basename = os.path.split(path)[1]
src = self.GypPathToNinja(path, env)
dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
env)
outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
if self.is_mac_bundle:
# gyp has mac_bundle_resources to copy things into a bundle's
# Resources folder, but there's no built-in way to copy files to other
# places in the bundle. Hence, some targets use copies for this. Check
# if this file is copied into the current bundle, and if so add it to
# the bundle depends so that dependent targets get rebuilt if the copy
# input changes.
if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
mac_bundle_depends.append(dst)
return outputs
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
xcassets = []
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
if os.path.splitext(output)[-1] != '.xcassets':
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource'), \
('binary', isBinary)])
bundle_depends.append(output)
else:
xcassets.append(res)
return xcassets
def WriteMacXCassets(self, xcassets, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources' .xcassets files.
This add an invocation of 'actool' via the 'mac_tool.py' helper script.
It assumes that the assets catalogs define at least one imageset and
thus an Assets.car file will be generated in the application resources
directory. If this is not the case, then the build will probably be done
at each invocation of ninja."""
if not xcassets:
return
extra_arguments = {}
settings_to_arg = {
'XCASSETS_APP_ICON': 'app-icon',
'XCASSETS_LAUNCH_IMAGE': 'launch-image',
}
settings = self.xcode_settings.xcode_settings[self.config_name]
for settings_key, arg_name in settings_to_arg.iteritems():
value = settings.get(settings_key)
if value:
extra_arguments[arg_name] = value
partial_info_plist = None
if extra_arguments:
partial_info_plist = self.GypPathToUniqueOutput(
'assetcatalog_generated_info.plist')
extra_arguments['output-partial-info-plist'] = partial_info_plist
outputs = []
outputs.append(
os.path.join(
self.xcode_settings.GetBundleResourceFolder(),
'Assets.car'))
if partial_info_plist:
outputs.append(partial_info_plist)
keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
extra_env = self.xcode_settings.GetPerTargetSettings()
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
bundle_depends.extend(self.ninja.build(
outputs, 'compile_xcassets', xcassets,
variables=[('env', env), ('keys', keys)]))
return partial_info_plist
def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
"""Write build rules for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, self.GypPathToNinja)
if not info_plist:
return
out = self.ExpandSpecial(out)
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = self.GypPathToUniqueOutput(
os.path.basename(info_plist))
defines = ' '.join([Define(d, self.flavor) for d in defines])
info_plist = self.ninja.build(
intermediate_plist, 'preprocess_infoplist', info_plist,
variables=[('defines',defines)])
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
if partial_info_plist:
intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
info_plist = self.ninja.build(
intermediate_plist, 'merge_infoplist',
[partial_info_plist, info_plist])
keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
keys = QuoteShellArgument(json.dumps(keys), self.flavor)
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(out, 'copy_infoplist', info_plist,
variables=[('env', env), ('keys', keys),
('binary', isBinary)])
bundle_depends.append(out)
def WriteSources(self, ninja_file, config_name, config, sources, predepends,
precompiled_header, spec):
"""Write build rules to compile all of |sources|."""
if self.toolset == 'host':
self.ninja.variable('ar', '$ar_host')
self.ninja.variable('cc', '$cc_host')
self.ninja.variable('cxx', '$cxx_host')
self.ninja.variable('ld', '$ld_host')
self.ninja.variable('ldxx', '$ldxx_host')
self.ninja.variable('nm', '$nm_host')
self.ninja.variable('readelf', '$readelf_host')
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteSourcesForArch(
self.ninja, config_name, config, sources, predepends,
precompiled_header, spec)
else:
return dict((arch, self.WriteSourcesForArch(
self.arch_subninjas[arch], config_name, config, sources, predepends,
precompiled_header, spec, arch=arch))
for arch in self.archs)
def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
predepends, precompiled_header, spec, arch=None):
"""Write build rules to compile all of |sources|."""
extra_defines = []
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
cflags_c = self.xcode_settings.GetCflagsC(config_name)
cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
cflags_objc = ['$cflags_c'] + \
self.xcode_settings.GetCflagsObjC(config_name)
cflags_objcc = ['$cflags_cc'] + \
self.xcode_settings.GetCflagsObjCC(config_name)
elif self.flavor == 'win':
asmflags = self.msvs_settings.GetAsmflags(config_name)
cflags = self.msvs_settings.GetCflags(config_name)
cflags_c = self.msvs_settings.GetCflagsC(config_name)
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
# See comment at cc_command for why there's two .pdb files.
pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
config_name, self.ExpandSpecial)
if not pdbpath_c:
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
pdbpath_c = pdbpath + '.c.pdb'
pdbpath_cc = pdbpath + '.cc.pdb'
self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
else:
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cc = config.get('cflags_cc', [])
# Respect environment variables related to build, but target-specific
# flags can still override them.
if self.toolset == 'target':
cflags_c = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CFLAGS', '').split() + cflags_c)
cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CXXFLAGS', '').split() + cflags_cc)
elif self.toolset == 'host':
cflags_c = (os.environ.get('CPPFLAGS_host', '').split() +
os.environ.get('CFLAGS_host', '').split() + cflags_c)
cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() +
os.environ.get('CXXFLAGS_host', '').split() + cflags_cc)
defines = config.get('defines', []) + extra_defines
self.WriteVariableList(ninja_file, 'defines',
[Define(d, self.flavor) for d in defines])
if self.flavor == 'win':
self.WriteVariableList(ninja_file, 'asmflags',
map(self.ExpandSpecial, asmflags))
self.WriteVariableList(ninja_file, 'rcflags',
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
for f in self.msvs_settings.GetRcflags(config_name,
self.GypPathToNinja)])
include_dirs = config.get('include_dirs', [])
env = self.GetToolchainEnv()
if self.flavor == 'win':
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
config_name)
self.WriteVariableList(ninja_file, 'includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in include_dirs])
if self.flavor == 'win':
midl_include_dirs = config.get('midl_include_dirs', [])
midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
midl_include_dirs, config_name)
self.WriteVariableList(ninja_file, 'midl_includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in midl_include_dirs])
pch_commands = precompiled_header.GetPchBuildCommands(arch)
if self.flavor == 'mac':
# Most targets use no precompiled headers, so only write these if needed.
for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
include = precompiled_header.GetInclude(ext, arch)
if include: ninja_file.variable(var, include)
arflags = config.get('arflags', [])
self.WriteVariableList(ninja_file, 'cflags',
map(self.ExpandSpecial, cflags))
self.WriteVariableList(ninja_file, 'cflags_c',
map(self.ExpandSpecial, cflags_c))
self.WriteVariableList(ninja_file, 'cflags_cc',
map(self.ExpandSpecial, cflags_cc))
if self.flavor == 'mac':
self.WriteVariableList(ninja_file, 'cflags_objc',
map(self.ExpandSpecial, cflags_objc))
self.WriteVariableList(ninja_file, 'cflags_objcc',
map(self.ExpandSpecial, cflags_objcc))
self.WriteVariableList(ninja_file, 'arflags',
map(self.ExpandSpecial, arflags))
ninja_file.newline()
outputs = []
has_rc_source = False
for source in sources:
filename, ext = os.path.splitext(source)
ext = ext[1:]
obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
self.uses_cpp = True
elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
command = 'cc_s'
elif (self.flavor == 'win' and ext == 'asm' and
not self.msvs_settings.HasExplicitAsmRules(spec)):
command = 'asm'
# Add the _asm suffix as msvs is capable of handling .cc and
# .asm files of the same name without collision.
obj_ext = '_asm.obj'
elif self.flavor == 'mac' and ext == 'm':
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
self.uses_cpp = True
elif self.flavor == 'win' and ext == 'rc':
command = 'rc'
obj_ext = '.res'
has_rc_source = True
else:
# Ignore unhandled extensions.
continue
input = self.GypPathToNinja(source)
output = self.GypPathToUniqueOutput(filename + obj_ext)
if arch is not None:
output = AddArch(output, arch)
implicit = precompiled_header.GetObjDependencies([input], [output], arch)
variables = []
if self.flavor == 'win':
variables, output, implicit = precompiled_header.GetFlagsModifications(
input, output, implicit, command, cflags_c, cflags_cc,
self.ExpandSpecial)
ninja_file.build(output, command, input,
implicit=[gch for _, _, gch in implicit],
order_only=predepends, variables=variables)
outputs.append(output)
if has_rc_source:
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
self.WriteVariableList(ninja_file, 'resource_includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in resource_include_dirs])
self.WritePchTargets(ninja_file, pch_commands)
ninja_file.newline()
return outputs
def WritePchTargets(self, ninja_file, pch_commands):
"""Writes ninja rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
var_name = {
'c': 'cflags_pch_c',
'cc': 'cflags_pch_cc',
'm': 'cflags_pch_objc',
'mm': 'cflags_pch_objcc',
}[lang]
map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
cmd = map.get(lang)
ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
def WriteLink(self, spec, config_name, config, link_deps):
"""Write out a link step. Fills out target.binary. """
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteLinkForArch(
self.ninja, spec, config_name, config, link_deps)
else:
output = self.ComputeOutput(spec)
inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
config_name, config, link_deps[arch],
arch=arch)
for arch in self.archs]
extra_bindings = []
build_output = output
if not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
# TODO(yyanagisawa): more work needed to fix:
# https://code.google.com/p/gyp/issues/detail?id=411
if (spec['type'] in ('shared_library', 'loadable_module') and
not self.is_mac_bundle):
extra_bindings.append(('lib', output))
self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
variables=extra_bindings)
else:
self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
return output
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
link_deps, arch=None):
"""Write out a link step. Fills out target.binary. """
command = {
'executable': 'link',
'loadable_module': 'solink_module',
'shared_library': 'solink',
}[spec['type']]
command_suffix = ''
implicit_deps = set()
solibs = set()
order_deps = set()
if 'dependencies' in spec:
# Two kinds of dependencies:
# - Linkable dependencies (like a .a or a .so): add them to the link line.
# - Non-linkable dependencies (like a rule that generates a file
# and writes a stamp file): add them to implicit_deps
extra_link_deps = set()
for dep in spec['dependencies']:
target = self.target_outputs.get(dep)
if not target:
continue
linkable = target.Linkable()
if linkable:
new_deps = []
if (self.flavor == 'win' and
target.component_objs and
self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
new_deps = target.component_objs
if target.compile_deps:
order_deps.add(target.compile_deps)
elif self.flavor == 'win' and target.import_lib:
new_deps = [target.import_lib]
elif target.UsesToc(self.flavor):
solibs.add(target.binary)
implicit_deps.add(target.binary + '.TOC')
else:
new_deps = [target.binary]
for new_dep in new_deps:
if new_dep not in extra_link_deps:
extra_link_deps.add(new_dep)
link_deps.append(new_dep)
final_output = target.FinalOutput()
if not linkable or final_output != target.binary:
implicit_deps.add(final_output)
extra_bindings = []
if self.uses_cpp and self.flavor != 'win':
extra_bindings.append(('ld', '$ldxx'))
output = self.ComputeOutput(spec, arch)
if arch is None and not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
is_executable = spec['type'] == 'executable'
# The ldflags config key is not used on mac or win. On those platforms
# linker flags are set via xcode_settings and msvs_settings, respectively.
env_ldflags = os.environ.get('LDFLAGS', '').split()
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(config_name,
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.GypPathToNinja, arch)
ldflags = env_ldflags + ldflags
elif self.flavor == 'win':
manifest_base_name = self.GypPathToUniqueOutput(
self.ComputeOutputFileName(spec))
ldflags, intermediate_manifest, manifest_files = \
self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
self.ExpandSpecial, manifest_base_name,
output, is_executable,
self.toplevel_build)
ldflags = env_ldflags + ldflags
self.WriteVariableList(ninja_file, 'manifests', manifest_files)
implicit_deps = implicit_deps.union(manifest_files)
if intermediate_manifest:
self.WriteVariableList(
ninja_file, 'intermediatemanifest', [intermediate_manifest])
command_suffix = _GetWinLinkRuleNameSuffix(
self.msvs_settings.IsEmbedManifest(config_name))
def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
if def_file:
implicit_deps.add(def_file)
else:
# Respect environment variables related to build, but target-specific
# flags can still override them.
ldflags = env_ldflags + config.get('ldflags', [])
if is_executable and len(solibs):
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList(ninja_file, 'ldflags',
map(self.ExpandSpecial, ldflags))
library_dirs = config.get('library_dirs', [])
if self.flavor == 'win':
library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
for l in library_dirs]
library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
else:
library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
libraries = gyp.common.uniquer(map(self.ExpandSpecial,
spec.get('libraries', [])))
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
elif self.flavor == 'win':
libraries = self.msvs_settings.AdjustLibraries(libraries)
self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
linked_binary = output
if command in ('solink', 'solink_module'):
extra_bindings.append(('soname', os.path.split(output)[1]))
extra_bindings.append(('lib',
gyp.common.EncodePOSIXShellArgument(output)))
if self.flavor != 'win':
link_file_list = output
if self.is_mac_bundle:
# 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
# 'Dependency Framework.framework.rsp'
link_file_list = self.xcode_settings.GetWrapperName()
if arch:
link_file_list += '.' + arch
link_file_list += '.rsp'
# If an rspfile contains spaces, ninja surrounds the filename with
# quotes around it and then passes it to open(), creating a file with
# quotes in its name (and when looking for the rsp file, the name
# makes it through bash which strips the quotes) :-/
link_file_list = link_file_list.replace(' ', '_')
extra_bindings.append(
('link_file_list',
gyp.common.EncodePOSIXShellArgument(link_file_list)))
if self.flavor == 'win':
extra_bindings.append(('binary', output))
if ('/NOENTRY' not in ldflags and
not self.msvs_settings.GetNoImportLibrary(config_name)):
self.target.import_lib = output + '.lib'
extra_bindings.append(('implibflag',
'/IMPLIB:%s' % self.target.import_lib))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
output = [output, self.target.import_lib]
if pdbname:
output.append(pdbname)
elif not self.is_mac_bundle:
output = [output, output + '.TOC']
else:
command = command + '_notoc'
elif self.flavor == 'win':
extra_bindings.append(('binary', output))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
if pdbname:
output = [output, pdbname]
if len(solibs):
extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
ninja_file.build(output, command + command_suffix, link_deps,
implicit=list(implicit_deps),
order_only=list(order_deps),
variables=extra_bindings)
return linked_binary
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
extra_link_deps = any(self.target_outputs.get(dep).Linkable()
for dep in spec.get('dependencies', [])
if dep in self.target_outputs)
if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
# TODO(evan): don't call this function for 'none' target types, as
# it doesn't do anything, and we fake out a 'binary' with a stamp file.
self.target.binary = compile_deps
self.target.type = 'none'
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
order_only=compile_deps)
else:
variables = []
if self.xcode_settings:
libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
if libtool_flags:
variables.append(('libtool_flags', libtool_flags))
if self.msvs_settings:
libflags = self.msvs_settings.GetLibFlags(config_name,
self.GypPathToNinja)
variables.append(('libflags', libflags))
if self.flavor != 'mac' or len(self.archs) == 1:
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', link_deps,
order_only=compile_deps, variables=variables)
else:
inputs = []
for arch in self.archs:
output = self.ComputeOutput(spec, arch)
self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
order_only=compile_deps,
variables=variables)
inputs.append(output)
# TODO: It's not clear if libtool_flags should be passed to the alink
# call that combines single-arch .a files into a fat .a file.
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', inputs,
# FIXME: test proving order_only=compile_deps isn't
# needed.
variables=variables)
else:
self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
return self.target.binary
def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
assert self.is_mac_bundle
package_framework = spec['type'] in ('shared_library', 'loadable_module')
output = self.ComputeMacBundleOutput()
if is_empty:
output += '.stamp'
variables = []
self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
is_command_start=not package_framework)
if package_framework and not is_empty:
variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
self.ninja.build(output, 'package_framework', mac_bundle_depends,
variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
self.target.bundle = output
return output
def GetToolchainEnv(self, additional_settings=None):
"""Returns the variables toolchain would set for build steps."""
env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
if self.flavor == 'win':
env = self.GetMsvsToolchainEnv(
additional_settings=additional_settings)
return env
def GetMsvsToolchainEnv(self, additional_settings=None):
"""Returns the variables Visual Studio would set for build steps."""
return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
config=self.config_name)
def GetSortedXcodeEnv(self, additional_settings=None):
"""Returns the variables Xcode would set for build steps."""
assert self.abs_build_dir
abs_build_dir = self.abs_build_dir
return gyp.xcode_emulation.GetSortedXcodeEnv(
self.xcode_settings, abs_build_dir,
os.path.join(abs_build_dir, self.build_to_base), self.config_name,
additional_settings)
def GetSortedXcodePostbuildEnv(self):
"""Returns the variables Xcode would set for postbuild steps."""
postbuild_settings = {}
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE')
if strip_save_file:
postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
def AppendPostbuildVariable(self, variables, spec, output, binary,
is_command_start=False):
"""Adds a 'postbuild' variable if there is a postbuild for |output|."""
postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
if postbuild:
variables.append(('postbuilds', postbuild))
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
"""Returns a shell command that runs all the postbuilds, and removes
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
if not self.xcode_settings or spec['type'] == 'none' or not output:
return ''
output = QuoteShellArgument(output, self.flavor)
postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
if output_binary is not None:
postbuilds = self.xcode_settings.AddImplicitPostbuilds(
self.config_name,
os.path.normpath(os.path.join(self.base_to_build, output)),
QuoteShellArgument(
os.path.normpath(os.path.join(self.base_to_build, output_binary)),
self.flavor),
postbuilds, quiet=True)
if not postbuilds:
return ''
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
['cd', self.build_to_base]))
env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
# G will be non-null if any postbuild fails. Run all postbuilds in a
# subshell.
commands = env + ' (' + \
' && '.join([ninja_syntax.escape(command) for command in postbuilds])
command_string = (commands + '); G=$$?; '
# Remove the final output if any postbuild failed.
'((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
if is_command_start:
return '(' + command_string + ' && '
else:
return '$ && (' + command_string
def ComputeExportEnvString(self, env):
"""Given an environment, returns a string looking like
'export FOO=foo; export BAR="${FOO} bar;'
that exports |env| to the shell."""
export_str = []
for k, v in env:
export_str.append('export %s=%s;' %
(k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
return ' '.join(export_str)
def ComputeMacBundleOutput(self):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = generator_default_variables['PRODUCT_DIR']
return self.ExpandSpecial(
os.path.join(path, self.xcode_settings.GetWrapperName()))
def ComputeOutputFileName(self, spec, type=None):
"""Compute the filename of the final output for the current target."""
if not type:
type = spec['type']
default_variables = copy.copy(generator_default_variables)
CalculateVariables(default_variables, {'flavor': self.flavor})
# Compute filename prefix: the product prefix, or a default for
# the product type.
DEFAULT_PREFIX = {
'loadable_module': default_variables['SHARED_LIB_PREFIX'],
'shared_library': default_variables['SHARED_LIB_PREFIX'],
'static_library': default_variables['STATIC_LIB_PREFIX'],
'executable': default_variables['EXECUTABLE_PREFIX'],
}
prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
# Compute filename extension: the product extension, or a default
# for the product type.
DEFAULT_EXTENSION = {
'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
'shared_library': default_variables['SHARED_LIB_SUFFIX'],
'static_library': default_variables['STATIC_LIB_SUFFIX'],
'executable': default_variables['EXECUTABLE_SUFFIX'],
}
extension = spec.get('product_extension')
if extension:
extension = '.' + extension
else:
extension = DEFAULT_EXTENSION.get(type, '')
if 'product_name' in spec:
# If we were given an explicit name, use that.
target = spec['product_name']
else:
# Otherwise, derive a name from the target name.
target = spec['target_name']
if prefix == 'lib':
# Snip out an extra 'lib' from libs if appropriate.
target = StripPrefix(target, 'lib')
if type in ('static_library', 'loadable_module', 'shared_library',
'executable'):
return '%s%s%s' % (prefix, target, extension)
elif type == 'none':
return '%s.stamp' % target
else:
raise Exception('Unhandled output type %s' % type)
def ComputeOutput(self, spec, arch=None):
"""Compute the path for the final output of the spec."""
type = spec['type']
if self.flavor == 'win':
override = self.msvs_settings.GetOutputName(self.config_name,
self.ExpandSpecial)
if override:
return override
if arch is None and self.flavor == 'mac' and type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
filename = self.xcode_settings.GetExecutablePath()
else:
filename = self.ComputeOutputFileName(spec, type)
if arch is None and 'product_dir' in spec:
path = os.path.join(spec['product_dir'], filename)
return self.ExpandSpecial(path)
# Some products go into the output root, libraries go into shared library
# dir, and everything else goes into the normal place.
type_in_output_root = ['executable', 'loadable_module']
if self.flavor == 'mac' and self.toolset == 'target':
type_in_output_root += ['shared_library', 'static_library']
elif self.flavor == 'win' and self.toolset == 'target':
type_in_output_root += ['shared_library']
if arch is not None:
# Make sure partial executables don't end up in a bundle or the regular
# output directory.
archdir = 'arch'
if self.toolset != 'target':
archdir = os.path.join('arch', '%s' % self.toolset)
return os.path.join(archdir, AddArch(filename, arch))
elif type in type_in_output_root or self.is_standalone_static_library:
return filename
elif type == 'shared_library':
libdir = 'lib'
if self.toolset != 'target':
libdir = os.path.join('lib', '%s' % self.toolset)
return os.path.join(libdir, filename)
else:
return self.GypPathToUniqueOutput(filename, qualified=False)
def WriteVariableList(self, ninja_file, var, values):
assert not isinstance(values, str)
if values is None:
values = []
ninja_file.variable(var, ' '.join(values))
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
depfile=None):
"""Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule, and a copy of |args| with variables
expanded."""
if self.flavor == 'win':
args = [self.msvs_settings.ConvertVSMacros(
arg, self.base_to_build, config=self.config_name)
for arg in args]
description = self.msvs_settings.ConvertVSMacros(
description, config=self.config_name)
elif self.flavor == 'mac':
# |env| is an empty list on non-mac.
args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
description = gyp.xcode_emulation.ExpandEnvVars(description, env)
# TODO: we shouldn't need to qualify names; we do it because
# currently the ninja rule namespace is global, but it really
# should be scoped to the subninja.
rule_name = self.name
if self.toolset == 'target':
rule_name += '.' + self.toolset
rule_name += '.' + name
rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
# Remove variable references, but not if they refer to the magic rule
# variables. This is not quite right, as it also protects these for
# actions, not just for rules where they are valid. Good enough.
protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
description = re.sub(protect + r'\$', '_', description)
# gyp dictates that commands are run from the base directory.
# cd into the directory before running, and adjust paths in
# the arguments to point to the proper locations.
rspfile = None
rspfile_content = None
args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
if self.flavor == 'win':
rspfile = rule_name + '.$unique_name.rsp'
# The cygwin case handles this inside the bash sub-shell.
run_in = '' if is_cygwin else ' ' + self.build_to_base
if is_cygwin:
rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
args, self.build_to_base)
else:
rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
rspfile + run_in)
else:
env = self.ComputeExportEnvString(env)
command = gyp.common.EncodePOSIXShellList(args)
command = 'cd %s; ' % self.build_to_base + env + command
# GYP rules/actions express being no-ops by not touching their outputs.
# Avoid executing downstream dependencies in this case by specifying
# restat=1 to ninja.
self.ninja.rule(rule_name, command, description, depfile=depfile,
restat=True, pool=pool,
rspfile=rspfile, rspfile_content=rspfile_content)
self.ninja.newline()
return rule_name, args
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
global generator_additional_non_configuration_keys
global generator_additional_path_sections
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Ninja generator.
import gyp.generator.xcode as xcode_generator
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
elif flavor == 'win':
exts = gyp.MSVSUtil.TARGET_TYPE_EXT
default_variables.setdefault('OS', 'win')
default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
default_variables['STATIC_LIB_PREFIX'] = ''
default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
default_variables['SHARED_LIB_PREFIX'] = ''
default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'lib'))
default_variables.setdefault('LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'obj'))
def ComputeOutputDir(params):
"""Returns the path from the toplevel_dir to the build output directory."""
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to ninja easier, ninja doesn't put anything here.
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
# Relative path from source root to our output files. e.g. "out"
return os.path.normpath(os.path.join(generator_dir, output_dir))
def CalculateGeneratorInputInfo(params):
"""Called by __init__ to initialize generator values based on params."""
# E.g. "out/gypfiles"
toplevel = params['options'].toplevel_dir
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, ComputeOutputDir(params), 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
gyp.common.EnsureDirExists(path)
return open(path, mode)
def CommandWithWrapper(cmd, wrappers, prog):
wrapper = wrappers.get(cmd, '')
if wrapper:
return wrapper + ' ' + prog
return prog
def GetDefaultConcurrentLinks():
"""Returns a best-guess for a number of concurrent links."""
pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0))
if pool_size:
return pool_size
if sys.platform in ('win32', 'cygwin'):
import ctypes
class MEMORYSTATUSEX(ctypes.Structure):
_fields_ = [
("dwLength", ctypes.c_ulong),
("dwMemoryLoad", ctypes.c_ulong),
("ullTotalPhys", ctypes.c_ulonglong),
("ullAvailPhys", ctypes.c_ulonglong),
("ullTotalPageFile", ctypes.c_ulonglong),
("ullAvailPageFile", ctypes.c_ulonglong),
("ullTotalVirtual", ctypes.c_ulonglong),
("ullAvailVirtual", ctypes.c_ulonglong),
("sullAvailExtendedVirtual", ctypes.c_ulonglong),
]
stat = MEMORYSTATUSEX()
stat.dwLength = ctypes.sizeof(stat)
ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
# VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
# on a 64 GB machine.
mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB
hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
return min(mem_limit, hard_cap)
elif sys.platform.startswith('linux'):
if os.path.exists("/proc/meminfo"):
with open("/proc/meminfo") as meminfo:
memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
for line in meminfo:
match = memtotal_re.match(line)
if not match:
continue
# Allow 8Gb per link on Linux because Gold is quite memory hungry
return max(1, int(match.group(1)) / (8 * (2 ** 20)))
return 1
elif sys.platform == 'darwin':
try:
avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
# A static library debug build of Chromium's unit_tests takes ~2.7GB, so
# 4GB per ld process allows for some more bloat.
return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
except:
return 1
else:
# TODO(scottmg): Implement this for other platforms.
return 1
def _GetWinLinkRuleNameSuffix(embed_manifest):
"""Returns the suffix used to select an appropriate linking rule depending on
whether the manifest embedding is enabled."""
return '_embed' if embed_manifest else ''
def _AddWinLinkRules(master_ninja, embed_manifest):
"""Adds link rules for Windows platform to |master_ninja|."""
def FullLinkCommand(ldcmd, out, binary_type):
resource_name = {
'exe': '1',
'dll': '2',
}[binary_type]
return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
'%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
'$manifests' % {
'python': sys.executable,
'out': out,
'ldcmd': ldcmd,
'resname': resource_name,
'embed': embed_manifest }
rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
use_separate_mspdbsrv = (
int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo $implibflag /DLL /OUT:$binary '
'@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
master_ninja.rule('solink' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
master_ninja.rule('solink_module' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
# Note that ldflags goes at the end so that it has the option of
# overriding default settings earlier in the command line.
exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo /OUT:$binary @$binary.rsp' %
(sys.executable, use_separate_mspdbsrv))
exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
master_ninja.rule('link' + rule_name_suffix,
description='LINK%s $binary' % rule_name_suffix.upper(),
command=exe_cmd,
rspfile='$binary.rsp',
rspfile_content='$in_newline $libs $ldflags',
pool='link_pool')
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(
os.path.join(ComputeOutputDir(params), config_name))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
# Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build)
# Grab make settings for CC/CXX.
# The rules are
# - The priority from low to high is gcc/g++, the 'make_global_settings' in
# gyp, the environment variable.
# - If there is no 'make_global_settings' for CC.host/CXX.host or
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
# to cc/cxx.
if flavor == 'win':
ar = 'lib.exe'
# cc and cxx must be set to the correct architecture by overriding with one
# of cl_x86 or cl_x64 below.
cc = 'UNSET'
cxx = 'UNSET'
ld = 'link.exe'
ld_host = '$ld'
else:
ar = 'ar'
cc = 'cc'
cxx = 'c++'
ld = '$cc'
ldxx = '$cxx'
ld_host = '$cc_host'
ldxx_host = '$cxx_host'
ar_host = 'ar'
cc_host = None
cxx_host = None
cc_host_global_setting = None
cxx_host_global_setting = None
clang_cl = None
nm = 'nm'
nm_host = 'nm'
readelf = 'readelf'
readelf_host = 'readelf'
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings = data[build_file].get('make_global_settings', [])
build_to_root = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
wrappers = {}
for key, value in make_global_settings:
if key == 'AR':
ar = os.path.join(build_to_root, value)
if key == 'AR.host':
ar_host = os.path.join(build_to_root, value)
if key == 'CC':
cc = os.path.join(build_to_root, value)
if cc.endswith('clang-cl'):
clang_cl = cc
if key == 'CXX':
cxx = os.path.join(build_to_root, value)
if key == 'CC.host':
cc_host = os.path.join(build_to_root, value)
cc_host_global_setting = value
if key == 'CXX.host':
cxx_host = os.path.join(build_to_root, value)
cxx_host_global_setting = value
if key == 'LD':
ld = os.path.join(build_to_root, value)
if key == 'LD.host':
ld_host = os.path.join(build_to_root, value)
if key == 'NM':
nm = os.path.join(build_to_root, value)
if key == 'NM.host':
nm_host = os.path.join(build_to_root, value)
if key == 'READELF':
readelf = os.path.join(build_to_root, value)
if key == 'READELF.host':
readelf_host = os.path.join(build_to_root, value)
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
# Support wrappers from environment variables too.
for key, value in os.environ.iteritems():
if key.lower().endswith('_wrapper'):
key_prefix = key[:-len('_wrapper')]
key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
wrappers[key_prefix] = os.path.join(build_to_root, value)
if flavor == 'win':
configs = [target_dicts[qualified_target]['configurations'][config_name]
for qualified_target in target_list]
shared_system_includes = None
if not generator_flags.get('ninja_use_custom_environment_files', 0):
shared_system_includes = \
gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
configs, generator_flags)
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, shared_system_includes, OpenOutput)
for arch, path in cl_paths.iteritems():
if clang_cl:
# If we have selected clang-cl, use that instead.
path = clang_cl
command = CommandWithWrapper('CC', wrappers,
QuoteShellArgument(path, 'win'))
if clang_cl:
# Use clang-cl to cross-compile for x86 or x86_64.
command += (' -m32' if arch == 'x86' else ' -m64')
master_ninja.variable('cl_' + arch, command)
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
if flavor == 'win':
master_ninja.variable('ld', ld)
master_ninja.variable('idl', 'midl.exe')
master_ninja.variable('ar', ar)
master_ninja.variable('rc', 'rc.exe')
master_ninja.variable('ml_x86', 'ml.exe')
master_ninja.variable('ml_x64', 'ml64.exe')
master_ninja.variable('mt', 'mt.exe')
else:
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
if flavor != 'mac':
# Mac does not use readelf/nm for .TOC generation, so avoiding polluting
# the master ninja with extra unused variables.
master_ninja.variable(
'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
master_ninja.variable(
'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
if generator_supports_multiple_toolsets:
if not cc_host:
cc_host = cc
if not cxx_host:
cxx_host = cxx
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
master_ninja.variable('readelf_host',
GetEnvironFallback(['READELF_host'], readelf_host))
cc_host = GetEnvironFallback(['CC_host'], cc_host)
cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
# The environment variable could be used in 'make_global_settings', like
# ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
if '$(CC)' in cc_host and cc_host_global_setting:
cc_host = cc_host_global_setting.replace('$(CC)', cc)
if '$(CXX)' in cxx_host and cxx_host_global_setting:
cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
master_ninja.variable('cc_host',
CommandWithWrapper('CC.host', wrappers, cc_host))
master_ninja.variable('cxx_host',
CommandWithWrapper('CXX.host', wrappers, cxx_host))
if flavor == 'win':
master_ninja.variable('ld_host', ld_host)
else:
master_ninja.variable('ld_host', CommandWithWrapper(
'LINK', wrappers, ld_host))
master_ninja.variable('ldxx_host', CommandWithWrapper(
'LINK', wrappers, ldxx_host))
master_ninja.newline()
master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
master_ninja.newline()
deps = 'msvc' if flavor == 'win' else 'gcc'
if flavor != 'win':
master_ninja.rule(
'cc',
description='CC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'cc_s',
description='CC $out',
command=('$cc $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'))
master_ninja.rule(
'cxx',
description='CXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
'$cflags_pch_cc -c $in -o $out'),
depfile='$out.d',
deps=deps)
else:
# TODO(scottmg) Separate pdb names is a test to see if it works around
# http://crbug.com/142362. It seems there's a race between the creation of
# the .pdb by the precompiled header step for .cc and the compilation of
# .c files. This should be handled by mspdbsrv, but rarely errors out with
# c1xx : fatal error C1033: cannot open program database
# By making the rules target separate pdb files this might be avoided.
cc_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cc /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
cxx_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cxx /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
master_ninja.rule(
'cc',
description='CC $out',
command=cc_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_c',
deps=deps)
master_ninja.rule(
'cxx',
description='CXX $out',
command=cxx_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_cc',
deps=deps)
master_ninja.rule(
'idl',
description='IDL $in',
command=('%s gyp-win-tool midl-wrapper $arch $outdir '
'$tlb $h $dlldata $iid $proxy $in '
'$midl_includes $idlflags' % sys.executable))
master_ninja.rule(
'rc',
description='RC $in',
# Note: $in must be last otherwise rc.exe complains.
command=('%s gyp-win-tool rc-wrapper '
'$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
sys.executable))
master_ninja.rule(
'asm',
description='ASM $out',
command=('%s gyp-win-tool asm-wrapper '
'$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
sys.executable))
if flavor != 'mac' and flavor != 'win':
master_ninja.rule(
'alink',
description='AR $out',
command='rm -f $out && $ar rcs $arflags $out $in')
master_ninja.rule(
'alink_thin',
description='AR $out',
command='rm -f $out && $ar rcsT $arflags $out $in')
# This allows targets that only need to depend on $lib's API to declare an
# order-only dependency on $lib.TOC and avoid relinking such downstream
# dependencies when $lib changes only in non-public ways.
# The resulting string leaves an uninterpolated %{suffix} which
# is used in the final substitution below.
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
'%(solink)s && %(extract_toc)s > $lib.TOC; else '
'%(solink)s && %(extract_toc)s > $lib.tmp && '
'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
'fi; fi'
% { 'solink':
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
'extract_toc':
('{ $readelf -d $lib | grep SONAME ; '
'$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
master_ninja.rule(
'solink',
description='SOLINK $lib',
restat=True,
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
rspfile='$link_file_list',
rspfile_content=
'-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib',
restat=True,
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
rspfile='$link_file_list',
rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld $ldflags -o $out '
'-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
pool='link_pool')
elif flavor == 'win':
master_ninja.rule(
'alink',
description='LIB $out',
command=('%s gyp-win-tool link-wrapper $arch False '
'$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
sys.executable),
rspfile='$out.rsp',
rspfile_content='$in_newline $libflags')
_AddWinLinkRules(master_ninja, embed_manifest=True)
_AddWinLinkRules(master_ninja, embed_manifest=False)
else:
master_ninja.rule(
'objc',
description='OBJC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
'$cflags_pch_objc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'objcxx',
description='OBJCXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
'$cflags_pch_objcc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'alink',
description='LIBTOOL-STATIC $out, POSTBUILDS',
command='rm -f $out && '
'./gyp-mac-tool filter-libtool libtool $libtool_flags '
'-static -o $out $in'
'$postbuilds')
master_ninja.rule(
'lipo',
description='LIPO $out, POSTBUILDS',
command='rm -f $out && lipo -create $in -output $out$postbuilds')
master_ninja.rule(
'solipo',
description='SOLIPO $out, POSTBUILDS',
command=(
'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
'%(extract_toc)s > $lib.TOC'
% { 'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
# Record the public interface of $lib in $lib.TOC. See the corresponding
# comment in the posix section above for details.
solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e $lib.TOC ] || '
# Always force dependent targets to relink if this library
# reexports something. Handling this correctly would require
# recursive TOC dumping but this is rare in practice, so punt.
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
'%(solink)s && %(extract_toc)s > $lib.TOC; '
'else '
'%(solink)s && %(extract_toc)s > $lib.tmp && '
'if ! cmp -s $lib.tmp $lib.TOC; then '
'mv $lib.tmp $lib.TOC ; '
'fi; '
'fi'
% { 'solink': solink_base,
'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
solink_suffix = '@$link_file_list$postbuilds'
master_ninja.rule(
'solink',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-shared'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_notoc',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-bundle'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_module_notoc',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out, POSTBUILDS',
command=('$ld $ldflags -o $out '
'$in $solibs $libs$postbuilds'),
pool='link_pool')
master_ninja.rule(
'preprocess_infoplist',
description='PREPROCESS INFOPLIST $out',
command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
'plutil -convert xml1 $out $out'))
master_ninja.rule(
'copy_infoplist',
description='COPY INFOPLIST $in',
command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
master_ninja.rule(
'merge_infoplist',
description='MERGE INFOPLISTS $in',
command='$env ./gyp-mac-tool merge-info-plist $out $in')
master_ninja.rule(
'compile_xcassets',
description='COMPILE XCASSETS $in',
command='$env ./gyp-mac-tool compile-xcassets $keys $in')
master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
master_ninja.rule(
'package_framework',
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out')
if flavor == 'win':
master_ninja.rule(
'stamp',
description='STAMP $out',
command='%s gyp-win-tool stamp $out' % sys.executable)
master_ninja.rule(
'copy',
description='COPY $in $out',
command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
else:
master_ninja.rule(
'stamp',
description='STAMP $out',
command='${postbuilds}touch $out')
master_ninja.rule(
'copy',
description='COPY $in $out',
command='rm -rf $out && cp -af $in $out')
master_ninja.newline()
all_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list,
target_dicts,
os.path.normpath(build_file)):
all_targets.add(target)
all_outputs = set()
# target_outputs is a map from qualified target name to a Target object.
target_outputs = {}
# target_short_names is a map from target short name to a list of Target
# objects.
target_short_names = {}
# short name of targets that were skipped because they didn't contain anything
# interesting.
# NOTE: there may be overlap between this an non_empty_target_names.
empty_target_names = set()
# Set of non-empty short target names.
# NOTE: there may be overlap between this an empty_target_names.
non_empty_target_names = set()
for qualified_target in target_list:
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
build_file, name, toolset = \
gyp.common.ParseQualifiedTarget(qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', [])
assert make_global_settings == this_make_global_settings, (
"make_global_settings needs to be the same for all targets. %s vs. %s" %
(this_make_global_settings, make_global_settings))
spec = target_dicts[qualified_target]
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
# If build_file is a symlink, we must not follow it because there's a chance
# it could point to a path above toplevel_dir, and we cannot correctly deal
# with that case at the moment.
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir,
False)
qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
toolset)
hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
base_path = os.path.dirname(build_file)
obj = 'obj'
if toolset != 'target':
obj += '.' + toolset
output_file = os.path.join(obj, base_path, name + '.ninja')
ninja_output = StringIO()
writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
ninja_output,
toplevel_build, output_file,
flavor, toplevel_dir=options.toplevel_dir)
target = writer.WriteSpec(spec, config_name, generator_flags)
if ninja_output.tell() > 0:
# Only create files for ninja files that actually have contents.
with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
ninja_file.write(ninja_output.getvalue())
ninja_output.close()
master_ninja.subninja(output_file)
if target:
if name != target.FinalOutput() and spec['toolset'] == 'target':
target_short_names.setdefault(name, []).append(target)
target_outputs[qualified_target] = target
if qualified_target in all_targets:
all_outputs.add(target.FinalOutput())
non_empty_target_names.add(name)
else:
empty_target_names.add(name)
if target_short_names:
# Write a short name to build this target. This benefits both the
# "build chrome" case as well as the gyp tests, which expect to be
# able to run actions and build libraries by their short name.
master_ninja.newline()
master_ninja.comment('Short names for targets.')
for short_name in target_short_names:
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
# Write phony targets for any empty targets that weren't written yet. As
# short names are not necessarily unique only do this for short names that
# haven't already been output for another target.
empty_target_names = empty_target_names - non_empty_target_names
if empty_target_names:
master_ninja.newline()
master_ninja.comment('Empty targets (output for completeness).')
for name in sorted(empty_target_names):
master_ninja.build(name, 'phony')
if all_outputs:
master_ninja.newline()
master_ninja.build('all', 'phony', list(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
master_ninja_file.close()
def PerformBuild(data, configurations, params):
options = params['options']
for config in configurations:
builddir = os.path.join(options.toplevel_dir, 'out', config)
arguments = ['ninja', '-C', builddir]
print 'Building [%s]: %s' % (config, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
(target_list, target_dicts, data, params, config_name) = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
# Update target_dicts for iOS device builds.
target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
target_dicts)
user_config = params.get('generator_flags', {}).get('config', None)
if gyp.common.GetFlavor(params) == 'win':
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append(
(target_list, target_dicts, data, params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
|
albertomurillo/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/checkpoint/checkpoint_access_rule_facts.py
|
30
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: checkpoint_access_rule_facts
short_description: Get access rules objects facts on Checkpoint over Web Services API
description:
- Get access rules objects facts on Checkpoint devices.
All operations are performed over Web Services API.
version_added: "2.8"
author: "Ansible by Red Hat (@rcarrillocruz)"
options:
name:
description:
- Name of the access rule. If not provided, UID is required.
type: str
uid:
description:
- UID of the access rule. If not provided, name is required.
type: str
layer:
description:
- Layer the access rule is attached to.
required: True
type: str
"""
EXAMPLES = """
- name: Get access rule facts
checkpoint_access_rule_facts:
layer: Network
name: "Drop attacker"
"""
RETURN = """
ansible_facts:
description: The checkpoint access rule object facts.
returned: always.
type: list
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.six.moves.urllib.error import HTTPError
import json
def get_access_rule(module, connection):
name = module.params['name']
uid = module.params['uid']
layer = module.params['layer']
if uid:
payload = {'uid': uid, 'layer': layer}
elif name:
payload = {'name': name, 'layer': layer}
code, response = connection.send_request('/web_api/show-access-rule', payload)
return code, response
def main():
argument_spec = dict(
name=dict(type='str'),
uid=dict(type='str'),
layer=dict(type='str', required=True),
)
module = AnsibleModule(argument_spec=argument_spec)
connection = Connection(module._socket_path)
code, response = get_access_rule(module, connection)
if code == 200:
module.exit_json(ansible_facts=dict(checkpoint_access_rules=response))
else:
module.fail_json(msg='Checkpoint device returned error {0} with message {1}'.format(code, response))
if __name__ == '__main__':
main()
|
raspberrypi-tw/camera-python
|
refs/heads/master
|
07_face_detection/image_face_detect.py
|
1
|
#!/usr/bin/python
#+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#|R|a|s|p|b|e|r|r|y|P|i|.|c|o|m|.|t|w|
#+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
# image_face_detect.py
# Face detect from image
#
# Author : Shantnu
# Date : 06/06/2017
# Origin : https://github.com/shantnu/FaceDetect/blob/master/face_detect.py
# Usage : python image_face_detect.py abba.png haarcascade_frontalface_default.xml
import cv2
import sys
# Get user supplied values
imagePath = sys.argv[1]
cascPath = sys.argv[2]
# Create the haar cascade
faceCascade = cv2.CascadeClassifier(cascPath)
# Read the image
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# Detect faces in the image
faces = faceCascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30, 30),
flags = cv2.cv.CV_HAAR_SCALE_IMAGE
)
print "Found {0} faces!".format(len(faces))
# Draw a rectangle around the faces
for (x, y, w, h) in faces:
cv2.rectangle(image, (x, y), (x+w, y+h), (0, 255, 0), 2)
cv2.imshow("preview", image)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
grevutiu-gabriel/sympy
|
refs/heads/master
|
sympy/matrices/tests/test_densesolve.py
|
80
|
from sympy.matrices.densesolve import LU_solve, rref_solve, cholesky_solve
from sympy import Dummy
from sympy import QQ
def test_LU_solve():
x, y, z = Dummy('x'), Dummy('y'), Dummy('z')
assert LU_solve([[QQ(2), QQ(-1), QQ(-2)], [QQ(-4), QQ(6), QQ(3)], [QQ(-4), QQ(-2), QQ(8)]], [[x], [y], [z]], [[QQ(-1)], [QQ(13)], [QQ(-6)]], QQ) == [[QQ(2,1)], [QQ(3, 1)], [QQ(1, 1)]]
def test_cholesky_solve():
x, y, z = Dummy('x'), Dummy('y'), Dummy('z')
assert cholesky_solve([[QQ(25), QQ(15), QQ(-5)], [QQ(15), QQ(18), QQ(0)], [QQ(-5), QQ(0), QQ(11)]], [[x], [y], [z]], [[QQ(2)], [QQ(3)], [QQ(1)]], QQ) == [[QQ(-1, 225)], [QQ(23, 135)], [QQ(4, 45)]]
def test_rref_solve():
x, y, z = Dummy('x'), Dummy('y'), Dummy('z')
assert rref_solve([[QQ(25), QQ(15), QQ(-5)], [QQ(15), QQ(18), QQ(0)], [QQ(-5), QQ(0), QQ(11)]], [[x], [y], [z]], [[QQ(2)], [QQ(3)], [QQ(1)]], QQ) == [[QQ(-1, 225)], [QQ(23, 135)], [QQ(4, 45)]]
|
wiltonlazary/arangodb
|
refs/heads/devel
|
3rdParty/V8/V8-5.0.71.39/buildtools/checkdeps/checkdeps.py
|
4
|
#!/usr/bin/env python
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that files include headers from allowed directories.
Checks DEPS files in the source tree for rules, and applies those rules to
"#include" and "import" directives in the .cpp and .java source files.
Any source file including something not permitted by the DEPS files will fail.
See builddeps.py for a detailed description of the DEPS format.
"""
import os
import optparse
import re
import sys
import cpp_checker
import java_checker
import results
from builddeps import DepsBuilder
from rules import Rule, Rules
def _IsTestFile(filename):
"""Does a rudimentary check to try to skip test files; this could be
improved but is good enough for now.
"""
return re.match('(test|mock|dummy)_.*|.*_[a-z]*test\.(cc|mm|java)', filename)
class DepsChecker(DepsBuilder):
"""Parses include_rules from DEPS files and verifies files in the
source tree against them.
"""
def __init__(self,
base_directory=None,
verbose=False,
being_tested=False,
ignore_temp_rules=False,
skip_tests=False,
resolve_dotdot=False):
"""Creates a new DepsChecker.
Args:
base_directory: OS-compatible path to root of checkout, e.g. C:\chr\src.
verbose: Set to true for debug output.
being_tested: Set to true to ignore the DEPS file at tools/checkdeps/DEPS.
ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!").
"""
DepsBuilder.__init__(
self, base_directory, verbose, being_tested, ignore_temp_rules)
self._skip_tests = skip_tests
self._resolve_dotdot = resolve_dotdot
self.results_formatter = results.NormalResultsFormatter(verbose)
def Report(self):
"""Prints a report of results, and returns an exit code for the process."""
if self.results_formatter.GetResults():
self.results_formatter.PrintResults()
return 1
print '\nSUCCESS\n'
return 0
def CheckDirectory(self, start_dir):
"""Checks all relevant source files in the specified directory and
its subdirectories for compliance with DEPS rules throughout the
tree (starting at |self.base_directory|). |start_dir| must be a
subdirectory of |self.base_directory|.
On completion, self.results_formatter has the results of
processing, and calling Report() will print a report of results.
"""
java = java_checker.JavaChecker(self.base_directory, self.verbose)
cpp = cpp_checker.CppChecker(
self.verbose, self._resolve_dotdot, self.base_directory)
checkers = dict(
(extension, checker)
for checker in [java, cpp] for extension in checker.EXTENSIONS)
for rules, file_paths in self.GetAllRulesAndFiles(start_dir):
for full_name in file_paths:
if self._skip_tests and _IsTestFile(os.path.basename(full_name)):
continue
file_extension = os.path.splitext(full_name)[1]
if not file_extension in checkers:
continue
checker = checkers[file_extension]
file_status = checker.CheckFile(rules, full_name)
if file_status.HasViolations():
self.results_formatter.AddError(file_status)
def CheckAddedCppIncludes(self, added_includes):
"""This is used from PRESUBMIT.py to check new #include statements added in
the change being presubmit checked.
Args:
added_includes: ((file_path, (include_line, include_line, ...), ...)
Return:
A list of tuples, (bad_file_path, rule_type, rule_description)
where rule_type is one of Rule.DISALLOW or Rule.TEMP_ALLOW and
rule_description is human-readable. Empty if no problems.
"""
cpp = cpp_checker.CppChecker(self.verbose)
problems = []
for file_path, include_lines in added_includes:
if not cpp.IsCppFile(file_path):
continue
rules_for_file = self.GetDirectoryRules(os.path.dirname(file_path))
if not rules_for_file:
continue
for line in include_lines:
is_include, violation = cpp.CheckLine(
rules_for_file, line, file_path, True)
if not violation:
continue
rule_type = violation.violated_rule.allow
if rule_type == Rule.ALLOW:
continue
violation_text = results.NormalResultsFormatter.FormatViolation(
violation, self.verbose)
problems.append((file_path, rule_type, violation_text))
return problems
def PrintUsage():
print """Usage: python checkdeps.py [--root <root>] [tocheck]
--root ROOT Specifies the repository root. This defaults to "../../.."
relative to the script file. This will be correct given the
normal location of the script in "<root>/tools/checkdeps".
--(others) There are a few lesser-used options; run with --help to show them.
tocheck Specifies the directory, relative to root, to check. This defaults
to "." so it checks everything.
Examples:
python checkdeps.py
python checkdeps.py --root c:\\source chrome"""
def main():
option_parser = optparse.OptionParser()
option_parser.add_option(
'', '--root',
default='', dest='base_directory',
help='Specifies the repository root. This defaults '
'to "../../.." relative to the script file, which '
'will normally be the repository root.')
option_parser.add_option(
'', '--ignore-temp-rules',
action='store_true', dest='ignore_temp_rules', default=False,
help='Ignore !-prefixed (temporary) rules.')
option_parser.add_option(
'', '--generate-temp-rules',
action='store_true', dest='generate_temp_rules', default=False,
help='Print rules to temporarily allow files that fail '
'dependency checking.')
option_parser.add_option(
'', '--count-violations',
action='store_true', dest='count_violations', default=False,
help='Count #includes in violation of intended rules.')
option_parser.add_option(
'', '--skip-tests',
action='store_true', dest='skip_tests', default=False,
help='Skip checking test files (best effort).')
option_parser.add_option(
'-v', '--verbose',
action='store_true', default=False,
help='Print debug logging')
option_parser.add_option(
'', '--json',
help='Path to JSON output file')
option_parser.add_option(
'', '--resolve-dotdot',
action='store_true', dest='resolve_dotdot', default=False,
help='resolve leading ../ in include directive paths relative '
'to the file perfoming the inclusion.')
options, args = option_parser.parse_args()
deps_checker = DepsChecker(options.base_directory,
verbose=options.verbose,
ignore_temp_rules=options.ignore_temp_rules,
skip_tests=options.skip_tests,
resolve_dotdot=options.resolve_dotdot)
base_directory = deps_checker.base_directory # Default if needed, normalized
# Figure out which directory we have to check.
start_dir = base_directory
if len(args) == 1:
# Directory specified. Start here. It's supposed to be relative to the
# base directory.
start_dir = os.path.abspath(os.path.join(base_directory, args[0]))
elif len(args) >= 2 or (options.generate_temp_rules and
options.count_violations):
# More than one argument, or incompatible flags, we don't handle this.
PrintUsage()
return 1
if not start_dir.startswith(deps_checker.base_directory):
print 'Directory to check must be a subdirectory of the base directory,'
print 'but %s is not a subdirectory of %s' % (start_dir, base_directory)
return 1
print 'Using base directory:', base_directory
print 'Checking:', start_dir
if options.generate_temp_rules:
deps_checker.results_formatter = results.TemporaryRulesFormatter()
elif options.count_violations:
deps_checker.results_formatter = results.CountViolationsFormatter()
if options.json:
deps_checker.results_formatter = results.JSONResultsFormatter(
options.json, deps_checker.results_formatter)
deps_checker.CheckDirectory(start_dir)
return deps_checker.Report()
if '__main__' == __name__:
sys.exit(main())
|
daviddoria/PointGraphsPhase1
|
refs/heads/PointGraphsPhase1
|
Examples/VisualizationAlgorithms/Python/ExtractGeometry.py
|
15
|
#!/usr/bin/env python
# This example shows how to extract a piece of a dataset using an
# implicit function. In this case the implicit function is formed by
# the boolean combination of two ellipsoids.
import vtk
# Here we create two ellipsoidal implicit functions and boolean them
# together tto form a "cross" shaped implicit function.
quadric = vtk.vtkQuadric()
quadric.SetCoefficients(.5, 1, .2, 0, .1, 0, 0, .2, 0, 0)
sample = vtk.vtkSampleFunction()
sample.SetSampleDimensions(50, 50, 50)
sample.SetImplicitFunction(quadric)
sample.ComputeNormalsOff()
trans = vtk.vtkTransform()
trans.Scale(1, .5, .333)
sphere = vtk.vtkSphere()
sphere.SetRadius(0.25)
sphere.SetTransform(trans)
trans2 = vtk.vtkTransform()
trans2.Scale(.25, .5, 1.0)
sphere2 = vtk.vtkSphere()
sphere2.SetRadius(0.25)
sphere2.SetTransform(trans2)
union = vtk.vtkImplicitBoolean()
union.AddFunction(sphere)
union.AddFunction(sphere2)
union.SetOperationType(0) #union
# Here is where it gets interesting. The implicit function is used to
# extract those cells completely inside the function. They are then
# shrunk to help show what was extracted.
extract = vtk.vtkExtractGeometry()
extract.SetInputConnection(sample.GetOutputPort())
extract.SetImplicitFunction(union)
shrink = vtk.vtkShrinkFilter()
shrink.SetInputConnection(extract.GetOutputPort())
shrink.SetShrinkFactor(0.5)
dataMapper = vtk.vtkDataSetMapper()
dataMapper.SetInputConnection(shrink.GetOutputPort())
dataActor = vtk.vtkActor()
dataActor.SetMapper(dataMapper)
# The outline gives context to the original data.
outline = vtk.vtkOutlineFilter()
outline.SetInputConnection(sample.GetOutputPort())
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
outlineProp = outlineActor.GetProperty()
outlineProp.SetColor(0, 0, 0)
# The usual rendering stuff is created.
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
ren.AddActor(outlineActor)
ren.AddActor(dataActor)
ren.SetBackground(1, 1, 1)
renWin.SetSize(500, 500)
ren.ResetCamera()
ren.GetActiveCamera().Zoom(1.5)
iren.Initialize()
renWin.Render()
iren.Start()
|
gnina/scripts
|
refs/heads/master
|
bootstrap.py
|
1
|
#!/usr/bin/env python3
import predict
import sklearn.metrics
import argparse, sys
import os
import numpy as np
import glob
import re
import matplotlib.pyplot as plt
def calc_auc(predictions):
y_true =[]
y_score=[]
for line in predictions:
values= line.split(" ")
y_true.append(float(values[1]))
y_score.append(float(values[0]))
auc = sklearn.metrics.roc_auc_score(y_true,y_score)
return auc
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='bootstrap(sampling with replacement) test')
parser.add_argument('-m','--model',type=str,required=True,help="Model template. Must use TESTFILE with unshuffled, unbalanced input")
parser.add_argument('-w','--weights',type=str,required=True,help="Model weights (.caffemodel)")
parser.add_argument('-i','--input',type=str,required=True,help="Input .types file to predict")
parser.add_argument('-g','--gpu',type=int,help='Specify GPU to run on',default=-1)
parser.add_argument('-o','--output',type=str,default='',help='Output file name,default= predict_[model]_[input]')
parser.add_argument('--iterations',type=int,default=1000,help="number of times to bootstrap")
parser.add_argument('-k','--keep',action='store_true',default=False,help="Don't delete prototxt files")
parser.add_argument('-n', '--number',action='store_true',default=False,help="if true uses caffemodel/input as is. if false uses all folds")
parser.add_argument('--max_score',action='store_true',default=False,help="take max score per ligand as its score")
parser.add_argument('--notcalc_predictions', type=str, default='',help='file of predictions')
args = parser.parse_args()
if args.output == '':
output = 'bootstrap_%s_%s'%(args.model, args.input)
else:
output = args.output
outname=output
predictions=[]
if args.notcalc_predictions=='':
cm = args.weights
ts = args.input
if not args.number:
foldnum = re.search('.[0-9]_iter',cm).group()
cm=cm.replace(foldnum, '.[0-9]_iter')
foldnum = re.search('[0-9].types',ts).group()
ts=ts.replace(foldnum, '[NUMBER].types')
for caffemodel in glob.glob(cm):
testset = ts
if not args.number:
num = re.search('.[0-9]_iter',caffemodel).group()
num=re.search(r'\d+', num).group()
testset = ts.replace('[NUMBER]',num)
args.input = testset
args.weights = caffemodel
predictions.extend(predict.predict_lines(args))
elif args.notcalc_predictions != '':
for line in open(args.notcalc_predictions).readlines():
predictions.append(line)
all_aucs=[]
for _ in range(args.iterations):
sample = np.random.choice(predictions,len(predictions), replace=True)
all_aucs.append(calc_auc(sample))
mean=np.mean(all_aucs)
std_dev = np.std(all_aucs)
txt = 'mean: %.2f standard deviation: %.2f'%(mean,std_dev)
print(txt)
output = open(output, 'w')
output.writelines('%.2f\n' %auc for auc in all_aucs)
output.write(txt)
output.close()
plt.figure()
plt.boxplot(all_aucs,0,'rs',0)
plt.title('%s AUCs'%args.output, fontsize=22)
plt.xlabel('AUC(%s)'%txt, fontsize=18)
plt.savefig('%s_plot.pdf'%outname,bbox_inches='tight')
|
alvin319/CarnotKE
|
refs/heads/master
|
jyhton/bugtests/test342p/doimp.py
|
13
|
kind = __import__('test342m').kind
|
kyuridenamida/atcoder-tools
|
refs/heads/master
|
tests/resources/test_tester/test_run_single_test_decimal_mixed/test_decimal.py
|
2
|
#!/usr/bin/python3
x = float(input())
print(x + 0.001, "POSSIBLE")
|
pywkm/earthpic
|
refs/heads/master
|
earthpic/earth.py
|
1
|
#!/usr/bin/env python3
"""
Main console script. Call with '-h' argument for help.
"""
import argparse
import logging
import sched
import time
from datetime import datetime, timedelta
import pytz
from pathlib import Path
from earthpic.himawari8 import EarthPhoto
from earthpic.utils import (
set_wallpaper,
scheduled_downloading,
info_console_handler,
debug_console_handler,
)
logger = logging.getLogger(__name__)
start_time = datetime.now(pytz.utc) - timedelta(minutes=20)
default_date = start_time.strftime('%Y-%m-%d')
default_time = start_time.strftime('%H:%M')
parser = argparse.ArgumentParser(
prog='earthpic',
description='Download satelite Earth photo(s)',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'-d',
'--date',
metavar='DATE',
default=default_date,
help='date of the photo [format: YYYY-MM-DD]',
)
parser.add_argument(
'-t',
'--time',
metavar='TIME',
default=default_time,
help='time of the photo [fotmat: hh:mm]',
)
parser.add_argument(
'-s',
'--scale',
type=int,
metavar='SCALE',
choices=(1, 2, 4, 8, 16, 20),
default=2,
help='scale of the photo, choices: 1, 2, 4, 8, 16 or 20'
)
parser.add_argument(
'-w',
'--wallpaper',
help='set downloaded image as desktop wallpaper',
action='store_true',
)
parser.add_argument(
'-v',
'--verbose',
help='increase output verbosity',
action='store_true',
)
parser.add_argument(
'--debug',
action='store_true',
help=argparse.SUPPRESS,
)
parser.add_argument(
'-l',
'--last',
type=int,
metavar='N_PHOTOS',
default=1,
help='process N photos to given date'
)
parser.add_argument(
'-b',
'--batch',
help='run batch downloading photos. To stop, press ctrl+c.',
action='store_true',
)
default_path = str(Path('.') / 'images')
parser.add_argument(
'-p',
'--path',
metavar='PATH',
default=default_path,
help='path where images will be saved',
)
def main():
args = parser.parse_args()
if args.debug:
logging.getLogger().addHandler(debug_console_handler)
elif args.verbose:
logging.getLogger().addHandler(info_console_handler)
logger.debug(args)
# Downloading big images needs confirmation
if args.scale > 4:
answer = input(
'Are you sure you want to download photo of that size '
'({0}x{0}={1} tiles)?'.format(args.scale, args.scale ** 2) +
' [Y/n] '
)
if answer.lower() in ('n', 'no'):
return
earth_photo = EarthPhoto(args.scale, args.path)
date_string = '{} {}'.format(args.date, args.time)
date = datetime.strptime(date_string, '%Y-%m-%d %H:%M')
date = pytz.utc.localize(date)
if date > start_time:
date = start_time
logger.debug(' date: {}'.format(date))
logger.debug('start_date: {}'.format(start_time))
if args.batch:
if args.last > 1:
print("Don't use batch option when defining '--last N' option")
return
scheduler = sched.scheduler(time.time, time.sleep)
scheduler.enter(
0, # 0s ie. instant execution
1,
scheduled_downloading,
(earth_photo, date, args.wallpaper, scheduler),
)
try:
scheduler.run()
except KeyboardInterrupt:
return
for delta in range(args.last-1, -1, -1):
image_path = earth_photo.download(date - timedelta(minutes=10 * delta))
logger.debug('image_path: {}'.format(image_path))
if args.wallpaper and image_path:
set_wallpaper(image_path)
if __name__ == "__main__":
main()
|
concerned3rdparty/jirafe-python
|
refs/heads/master
|
jirafe/models/BaseProduct.py
|
1
|
#!/usr/bin/env python
"""
Copyright 2014 Jirafe, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class BaseProduct:
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'id': 'str',
'code': 'str',
'name': 'str'
}
self.id = None # str
self.code = None # str
self.name = None # str
|
tailorian/Sick-Beard
|
refs/heads/ThePirateBay
|
lib/unidecode/x0c9.py
|
253
|
data = (
'jun', # 0x00
'junj', # 0x01
'junh', # 0x02
'jud', # 0x03
'jul', # 0x04
'julg', # 0x05
'julm', # 0x06
'julb', # 0x07
'juls', # 0x08
'jult', # 0x09
'julp', # 0x0a
'julh', # 0x0b
'jum', # 0x0c
'jub', # 0x0d
'jubs', # 0x0e
'jus', # 0x0f
'juss', # 0x10
'jung', # 0x11
'juj', # 0x12
'juc', # 0x13
'juk', # 0x14
'jut', # 0x15
'jup', # 0x16
'juh', # 0x17
'jweo', # 0x18
'jweog', # 0x19
'jweogg', # 0x1a
'jweogs', # 0x1b
'jweon', # 0x1c
'jweonj', # 0x1d
'jweonh', # 0x1e
'jweod', # 0x1f
'jweol', # 0x20
'jweolg', # 0x21
'jweolm', # 0x22
'jweolb', # 0x23
'jweols', # 0x24
'jweolt', # 0x25
'jweolp', # 0x26
'jweolh', # 0x27
'jweom', # 0x28
'jweob', # 0x29
'jweobs', # 0x2a
'jweos', # 0x2b
'jweoss', # 0x2c
'jweong', # 0x2d
'jweoj', # 0x2e
'jweoc', # 0x2f
'jweok', # 0x30
'jweot', # 0x31
'jweop', # 0x32
'jweoh', # 0x33
'jwe', # 0x34
'jweg', # 0x35
'jwegg', # 0x36
'jwegs', # 0x37
'jwen', # 0x38
'jwenj', # 0x39
'jwenh', # 0x3a
'jwed', # 0x3b
'jwel', # 0x3c
'jwelg', # 0x3d
'jwelm', # 0x3e
'jwelb', # 0x3f
'jwels', # 0x40
'jwelt', # 0x41
'jwelp', # 0x42
'jwelh', # 0x43
'jwem', # 0x44
'jweb', # 0x45
'jwebs', # 0x46
'jwes', # 0x47
'jwess', # 0x48
'jweng', # 0x49
'jwej', # 0x4a
'jwec', # 0x4b
'jwek', # 0x4c
'jwet', # 0x4d
'jwep', # 0x4e
'jweh', # 0x4f
'jwi', # 0x50
'jwig', # 0x51
'jwigg', # 0x52
'jwigs', # 0x53
'jwin', # 0x54
'jwinj', # 0x55
'jwinh', # 0x56
'jwid', # 0x57
'jwil', # 0x58
'jwilg', # 0x59
'jwilm', # 0x5a
'jwilb', # 0x5b
'jwils', # 0x5c
'jwilt', # 0x5d
'jwilp', # 0x5e
'jwilh', # 0x5f
'jwim', # 0x60
'jwib', # 0x61
'jwibs', # 0x62
'jwis', # 0x63
'jwiss', # 0x64
'jwing', # 0x65
'jwij', # 0x66
'jwic', # 0x67
'jwik', # 0x68
'jwit', # 0x69
'jwip', # 0x6a
'jwih', # 0x6b
'jyu', # 0x6c
'jyug', # 0x6d
'jyugg', # 0x6e
'jyugs', # 0x6f
'jyun', # 0x70
'jyunj', # 0x71
'jyunh', # 0x72
'jyud', # 0x73
'jyul', # 0x74
'jyulg', # 0x75
'jyulm', # 0x76
'jyulb', # 0x77
'jyuls', # 0x78
'jyult', # 0x79
'jyulp', # 0x7a
'jyulh', # 0x7b
'jyum', # 0x7c
'jyub', # 0x7d
'jyubs', # 0x7e
'jyus', # 0x7f
'jyuss', # 0x80
'jyung', # 0x81
'jyuj', # 0x82
'jyuc', # 0x83
'jyuk', # 0x84
'jyut', # 0x85
'jyup', # 0x86
'jyuh', # 0x87
'jeu', # 0x88
'jeug', # 0x89
'jeugg', # 0x8a
'jeugs', # 0x8b
'jeun', # 0x8c
'jeunj', # 0x8d
'jeunh', # 0x8e
'jeud', # 0x8f
'jeul', # 0x90
'jeulg', # 0x91
'jeulm', # 0x92
'jeulb', # 0x93
'jeuls', # 0x94
'jeult', # 0x95
'jeulp', # 0x96
'jeulh', # 0x97
'jeum', # 0x98
'jeub', # 0x99
'jeubs', # 0x9a
'jeus', # 0x9b
'jeuss', # 0x9c
'jeung', # 0x9d
'jeuj', # 0x9e
'jeuc', # 0x9f
'jeuk', # 0xa0
'jeut', # 0xa1
'jeup', # 0xa2
'jeuh', # 0xa3
'jyi', # 0xa4
'jyig', # 0xa5
'jyigg', # 0xa6
'jyigs', # 0xa7
'jyin', # 0xa8
'jyinj', # 0xa9
'jyinh', # 0xaa
'jyid', # 0xab
'jyil', # 0xac
'jyilg', # 0xad
'jyilm', # 0xae
'jyilb', # 0xaf
'jyils', # 0xb0
'jyilt', # 0xb1
'jyilp', # 0xb2
'jyilh', # 0xb3
'jyim', # 0xb4
'jyib', # 0xb5
'jyibs', # 0xb6
'jyis', # 0xb7
'jyiss', # 0xb8
'jying', # 0xb9
'jyij', # 0xba
'jyic', # 0xbb
'jyik', # 0xbc
'jyit', # 0xbd
'jyip', # 0xbe
'jyih', # 0xbf
'ji', # 0xc0
'jig', # 0xc1
'jigg', # 0xc2
'jigs', # 0xc3
'jin', # 0xc4
'jinj', # 0xc5
'jinh', # 0xc6
'jid', # 0xc7
'jil', # 0xc8
'jilg', # 0xc9
'jilm', # 0xca
'jilb', # 0xcb
'jils', # 0xcc
'jilt', # 0xcd
'jilp', # 0xce
'jilh', # 0xcf
'jim', # 0xd0
'jib', # 0xd1
'jibs', # 0xd2
'jis', # 0xd3
'jiss', # 0xd4
'jing', # 0xd5
'jij', # 0xd6
'jic', # 0xd7
'jik', # 0xd8
'jit', # 0xd9
'jip', # 0xda
'jih', # 0xdb
'jja', # 0xdc
'jjag', # 0xdd
'jjagg', # 0xde
'jjags', # 0xdf
'jjan', # 0xe0
'jjanj', # 0xe1
'jjanh', # 0xe2
'jjad', # 0xe3
'jjal', # 0xe4
'jjalg', # 0xe5
'jjalm', # 0xe6
'jjalb', # 0xe7
'jjals', # 0xe8
'jjalt', # 0xe9
'jjalp', # 0xea
'jjalh', # 0xeb
'jjam', # 0xec
'jjab', # 0xed
'jjabs', # 0xee
'jjas', # 0xef
'jjass', # 0xf0
'jjang', # 0xf1
'jjaj', # 0xf2
'jjac', # 0xf3
'jjak', # 0xf4
'jjat', # 0xf5
'jjap', # 0xf6
'jjah', # 0xf7
'jjae', # 0xf8
'jjaeg', # 0xf9
'jjaegg', # 0xfa
'jjaegs', # 0xfb
'jjaen', # 0xfc
'jjaenj', # 0xfd
'jjaenh', # 0xfe
'jjaed', # 0xff
)
|
DBrianKimmel/PyHouse
|
refs/heads/develop
|
Project/src/Modules/Core/Utilities/_test/test_user_validation.py
|
1
|
"""
@name: PyHouse/src/Modules.Core.Utilities._test/test_user_validation.py
@author: D. Brian Kimmel
@contact: D.BrianKimmel@gmail.com
@copyright: (c) 2015-2017 by D. Brian Kimmel
@license: MIT License
@note: Created on Oct 30, 2015
@Summary:
"""
# Import system type stuff
from twisted.trial import unittest
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testName(self):
pass
# ## END DBK
|
floresconlimon/qutebrowser
|
refs/heads/master
|
tests/unit/utils/usertypes/test_timer.py
|
8
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for Timer."""
from qutebrowser.utils import usertypes
import pytest
from PyQt5.QtCore import QObject
class Parent(QObject):
"""Class for test_parent()."""
pass
def test_parent():
"""Make sure the parent is set correctly."""
parent = Parent()
t = usertypes.Timer(parent)
assert t.parent() is parent
def test_named():
"""Make sure the name is set correctly."""
t = usertypes.Timer(name='foobar')
assert t._name == 'foobar'
assert t.objectName() == 'foobar'
assert repr(t) == "<qutebrowser.utils.usertypes.Timer name='foobar'>"
def test_unnamed():
"""Make sure an unnamed Timer is named correctly."""
t = usertypes.Timer()
assert not t.objectName()
assert t._name == 'unnamed'
assert repr(t) == "<qutebrowser.utils.usertypes.Timer name='unnamed'>"
def test_set_interval_overflow():
"""Make sure setInterval raises OverflowError with very big numbers."""
t = usertypes.Timer()
with pytest.raises(OverflowError):
t.setInterval(2 ** 64)
def test_start_overflow():
"""Make sure start raises OverflowError with very big numbers."""
t = usertypes.Timer()
with pytest.raises(OverflowError):
t.start(2 ** 64)
def test_timeout_start(qtbot):
"""Make sure the timer works with start()."""
t = usertypes.Timer()
with qtbot.waitSignal(t.timeout, timeout=3000, raising=True):
t.start(200)
def test_timeout_set_interval(qtbot):
"""Make sure the timer works with setInterval()."""
t = usertypes.Timer()
with qtbot.waitSignal(t.timeout, timeout=3000, raising=True):
t.setInterval(200)
t.start()
|
craynot/django
|
refs/heads/master
|
tests/check_framework/models.py
|
396
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class SimpleModel(models.Model):
field = models.IntegerField()
manager = models.manager.Manager()
|
grap/OpenUpgrade
|
refs/heads/8.0
|
addons/marketing_crm/__init__.py
|
378
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-TODAY OpenERP SA (http://www.openerp.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import models
|
maykinmedia/django-mailer-2
|
refs/heads/master
|
django_mailer/testapp/tests/engine.py
|
28
|
from django.test import TestCase
from django_mailer import engine, settings
from django_mailer.lockfile import FileLock
from StringIO import StringIO
import logging
import time
class LockTest(TestCase):
"""
Tests for Django Mailer trying to send mail when the lock is already in
place.
"""
def setUp(self):
# Create somewhere to store the log debug output.
self.output = StringIO()
# Create a log handler which can capture the log debug output.
self.handler = logging.StreamHandler(self.output)
self.handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(message)s')
self.handler.setFormatter(formatter)
# Add the log handler.
logger = logging.getLogger('django_mailer')
logger.addHandler(self.handler)
# Set the LOCK_WAIT_TIMEOUT to the default value.
self.original_timeout = settings.LOCK_WAIT_TIMEOUT
settings.LOCK_WAIT_TIMEOUT = 0
# Use a test lock-file name in case something goes wrong, then emulate
# that the lock file has already been acquired by another process.
self.original_lock_path = engine.LOCK_PATH
engine.LOCK_PATH += '.mailer-test'
self.lock = FileLock(engine.LOCK_PATH)
self.lock.unique_name += '.mailer_test'
self.lock.acquire(0)
def tearDown(self):
# Remove the log handler.
logger = logging.getLogger('django_mailer')
logger.removeHandler(self.handler)
# Revert the LOCK_WAIT_TIMEOUT to it's original value.
settings.LOCK_WAIT_TIMEOUT = self.original_timeout
# Revert the lock file unique name
engine.LOCK_PATH = self.original_lock_path
self.lock.release()
def test_locked(self):
# Acquire the lock so that send_all will fail.
engine.send_all()
self.output.seek(0)
self.assertEqual(self.output.readlines()[-1].strip(),
'Lock already in place. Exiting.')
# Try with a timeout.
settings.LOCK_WAIT_TIMEOUT = .1
engine.send_all()
self.output.seek(0)
self.assertEqual(self.output.readlines()[-1].strip(),
'Waiting for the lock timed out. Exiting.')
def test_locked_timeoutbug(self):
# We want to emulate the lock acquiring taking no time, so the next
# three calls to time.time() always return 0 (then set it back to the
# real function).
original_time = time.time
global time_call_count
time_call_count = 0
def fake_time():
global time_call_count
time_call_count = time_call_count + 1
if time_call_count >= 3:
time.time = original_time
return 0
time.time = fake_time
try:
engine.send_all()
self.output.seek(0)
self.assertEqual(self.output.readlines()[-1].strip(),
'Lock already in place. Exiting.')
finally:
time.time = original_time
|
aurelijusb/arangodb
|
refs/heads/devel
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/distutils/command/install_headers.py
|
53
|
"""distutils.command.install_headers
Implements the Distutils 'install_headers' command, to install C/C++ header
files to the Python include directory."""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id: install_headers.py 61000 2008-02-23 17:40:11Z christian.heimes $"
from distutils.core import Command
class install_headers (Command):
description = "install C/C++ header files"
user_options = [('install-dir=', 'd',
"directory to install header files to"),
('force', 'f',
"force installation (overwrite existing files)"),
]
boolean_options = ['force']
def initialize_options (self):
self.install_dir = None
self.force = 0
self.outfiles = []
def finalize_options (self):
self.set_undefined_options('install',
('install_headers', 'install_dir'),
('force', 'force'))
def run (self):
headers = self.distribution.headers
if not headers:
return
self.mkpath(self.install_dir)
for header in headers:
(out, _) = self.copy_file(header, self.install_dir)
self.outfiles.append(out)
def get_inputs (self):
return self.distribution.headers or []
def get_outputs (self):
return self.outfiles
# class install_headers
|
mdanielwork/intellij-community
|
refs/heads/master
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/__init__.py
|
30
|
from ._pkg1_0 import *
from ._pkg1_1 import *
|
ataylor32/django
|
refs/heads/master
|
tests/template_tests/filter_tests/test_stringformat.py
|
345
|
from django.template.defaultfilters import stringformat
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class StringformatTests(SimpleTestCase):
"""
Notice that escaping is applied *after* any filters, so the string
formatting here only needs to deal with pre-escaped characters.
"""
@setup({'stringformat01':
'{% autoescape off %}.{{ a|stringformat:"5s" }}. .{{ b|stringformat:"5s" }}.{% endautoescape %}'})
def test_stringformat01(self):
output = self.engine.render_to_string('stringformat01', {'a': 'a<b', 'b': mark_safe('a<b')})
self.assertEqual(output, '. a<b. . a<b.')
@setup({'stringformat02': '.{{ a|stringformat:"5s" }}. .{{ b|stringformat:"5s" }}.'})
def test_stringformat02(self):
output = self.engine.render_to_string('stringformat02', {'a': 'a<b', 'b': mark_safe('a<b')})
self.assertEqual(output, '. a<b. . a<b.')
class FunctionTests(SimpleTestCase):
def test_format(self):
self.assertEqual(stringformat(1, '03d'), '001')
def test_invalid(self):
self.assertEqual(stringformat(1, 'z'), '')
|
FireWRT/OpenWrt-Firefly-Libraries
|
refs/heads/master
|
staging_dir/host/lib/python3.4/config-3.4/python-config.py
|
2
|
#!/home/wbj/project/mt7621/firewrt_build_all_app/FireWRT-OpenWrt-Firefly-FireWRT-SDK/staging_dir/host/bin/python3.4
# -*- python -*-
# Keep this script in sync with python-config.sh.in
import getopt
import os
import sys
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'extension-suffix', 'help', 'abiflags', 'configdir']
def exit_with_usage(code=1):
print("Usage: {0} [{1}]".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)), file=sys.stderr)
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
libs = ['-lpython' + pyver + sys.abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
print(sysconfig.get_config_var('EXT_SUFFIX'))
elif opt == '--abiflags':
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
|
HousekeepLtd/django
|
refs/heads/master
|
tests/many_to_one/tests.py
|
88
|
import datetime
from copy import deepcopy
from django.core.exceptions import FieldError, MultipleObjectsReturned
from django.db import models, transaction
from django.test import TestCase
from django.utils import six
from django.utils.translation import ugettext_lazy
from .models import (
Article, Category, Child, First, Parent, Record, Relation, Reporter,
School, Student, Third, ToFieldChild,
)
class ManyToOneTests(TestCase):
def setUp(self):
# Create a few Reporters.
self.r = Reporter(first_name='John', last_name='Smith', email='john@example.com')
self.r.save()
self.r2 = Reporter(first_name='Paul', last_name='Jones', email='paul@example.com')
self.r2.save()
# Create an Article.
self.a = Article(id=None, headline="This is a test",
pub_date=datetime.date(2005, 7, 27), reporter=self.r)
self.a.save()
def test_get(self):
# Article objects have access to their related Reporter objects.
r = self.a.reporter
self.assertEqual(r.id, self.r.id)
# These are strings instead of unicode strings because that's what was used in
# the creation of this reporter (and we haven't refreshed the data from the
# database, which always returns unicode strings).
self.assertEqual((r.first_name, self.r.last_name), ('John', 'Smith'))
def test_create(self):
# You can also instantiate an Article by passing the Reporter's ID
# instead of a Reporter object.
a3 = Article(id=None, headline="Third article",
pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
a3.save()
self.assertEqual(a3.reporter.id, self.r.id)
# Similarly, the reporter ID can be a string.
a4 = Article(id=None, headline="Fourth article",
pub_date=datetime.date(2005, 7, 27), reporter_id=str(self.r.id))
a4.save()
self.assertEqual(repr(a4.reporter), "<Reporter: John Smith>")
def test_add(self):
# Create an Article via the Reporter object.
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
self.assertEqual(repr(new_article), "<Article: John's second story>")
self.assertEqual(new_article.reporter.id, self.r.id)
# Create a new article, and add it to the article set.
new_article2 = Article(headline="Paul's story", pub_date=datetime.date(2006, 1, 17))
msg = "<Article: Paul's story> instance isn't saved. Use bulk=False or save the object first."
with self.assertRaisesMessage(ValueError, msg):
self.r.article_set.add(new_article2)
self.r.article_set.add(new_article2, bulk=False)
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
# Add the same article to a different article set - check that it moves.
self.r2.article_set.add(new_article2)
self.assertEqual(new_article2.reporter.id, self.r2.id)
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
# Adding an object of the wrong type raises TypeError.
with transaction.atomic():
with six.assertRaisesRegex(self, TypeError,
"'Article' instance expected, got <Reporter.*"):
self.r.article_set.add(self.r2)
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
def test_set(self):
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
new_article2 = self.r2.article_set.create(headline="Paul's story",
pub_date=datetime.date(2006, 1, 17))
# Assign the article to the reporter.
new_article2.reporter = self.r
new_article2.save()
self.assertEqual(repr(new_article2.reporter), "<Reporter: John Smith>")
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), [])
# Set the article back again.
self.r2.article_set.set([new_article, new_article2])
self.assertQuerysetEqual(self.r.article_set.all(), ["<Article: This is a test>"])
self.assertQuerysetEqual(self.r2.article_set.all(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
])
# Funny case - because the ForeignKey cannot be null,
# existing members of the set must remain.
self.r.article_set.set([new_article])
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
def test_assign(self):
new_article = self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
new_article2 = self.r2.article_set.create(headline="Paul's story",
pub_date=datetime.date(2006, 1, 17))
# Assign the article to the reporter directly using the descriptor.
new_article2.reporter = self.r
new_article2.save()
self.assertEqual(repr(new_article2.reporter), "<Reporter: John Smith>")
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), [])
# Set the article back again using set descriptor.
self.r2.article_set = [new_article, new_article2]
self.assertQuerysetEqual(self.r.article_set.all(), ["<Article: This is a test>"])
self.assertQuerysetEqual(self.r2.article_set.all(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
])
# Funny case - assignment notation can only go so far; because the
# ForeignKey cannot be null, existing members of the set must remain.
self.r.article_set = [new_article]
self.assertQuerysetEqual(self.r.article_set.all(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
# Reporter cannot be null - there should not be a clear or remove method
self.assertFalse(hasattr(self.r2.article_set, 'remove'))
self.assertFalse(hasattr(self.r2.article_set, 'clear'))
def test_selects(self):
self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
self.r2.article_set.create(headline="Paul's story",
pub_date=datetime.date(2006, 1, 17))
# Reporter objects have access to their related Article objects.
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r.article_set.filter(headline__startswith='This'),
["<Article: This is a test>"])
self.assertEqual(self.r.article_set.count(), 2)
self.assertEqual(self.r2.article_set.count(), 1)
# Get articles by id
self.assertQuerysetEqual(Article.objects.filter(id__exact=self.a.id),
["<Article: This is a test>"])
self.assertQuerysetEqual(Article.objects.filter(pk=self.a.id),
["<Article: This is a test>"])
# Query on an article property
self.assertQuerysetEqual(Article.objects.filter(headline__startswith='This'),
["<Article: This is a test>"])
# The API automatically follows relationships as far as you need.
# Use double underscores to separate relationships.
# This works as many levels deep as you want. There's no limit.
# Find all Articles for any Reporter whose first name is "John".
self.assertQuerysetEqual(Article.objects.filter(reporter__first_name__exact='John'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Check that implied __exact also works
self.assertQuerysetEqual(Article.objects.filter(reporter__first_name='John'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Query twice over the related field.
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John',
reporter__last_name__exact='Smith'),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# The underlying query only makes one join when a related table is referenced twice.
queryset = Article.objects.filter(reporter__first_name__exact='John',
reporter__last_name__exact='Smith')
self.assertNumQueries(1, list, queryset)
self.assertEqual(queryset.query.get_compiler(queryset.db).as_sql()[0].count('INNER JOIN'), 1)
# The automatically joined table has a predictable name.
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John').extra(
where=["many_to_one_reporter.last_name='Smith'"]),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# ... and should work fine with the unicode that comes out of forms.Form.cleaned_data
self.assertQuerysetEqual(
(Article.objects
.filter(reporter__first_name__exact='John')
.extra(where=["many_to_one_reporter.last_name='%s'" % 'Smith'])),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
# Find all Articles for a Reporter.
# Use direct ID check, pk check, and object comparison
self.assertQuerysetEqual(
Article.objects.filter(reporter__id__exact=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__pk=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter=self.r),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__in=[self.r.id, self.r2.id]).distinct(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__in=[self.r, self.r2]).distinct(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
# You can also use a queryset instead of a literal list of instances.
# The queryset must be reduced to a list of values using values(),
# then converted into a query
self.assertQuerysetEqual(
Article.objects.filter(
reporter__in=Reporter.objects.filter(first_name='John').values('pk').query
).distinct(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
def test_reverse_selects(self):
a3 = Article.objects.create(id=None, headline="Third article",
pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
Article.objects.create(id=None, headline="Fourth article",
pub_date=datetime.date(2005, 7, 27), reporter_id=str(self.r.id))
# Reporters can be queried
self.assertQuerysetEqual(Reporter.objects.filter(id__exact=self.r.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(pk=self.r.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(first_name__startswith='John'),
["<Reporter: John Smith>"])
# Reporters can query in opposite direction of ForeignKey definition
self.assertQuerysetEqual(Reporter.objects.filter(article__id__exact=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article__pk=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article=self.a.id),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(Reporter.objects.filter(article=self.a),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a.id, a3.id]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a.id, a3]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__in=[self.a, a3]).distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__headline__startswith='T'),
["<Reporter: John Smith>", "<Reporter: John Smith>"],
ordered=False
)
self.assertQuerysetEqual(
Reporter.objects.filter(article__headline__startswith='T').distinct(),
["<Reporter: John Smith>"])
# Counting in the opposite direction works in conjunction with distinct()
self.assertEqual(
Reporter.objects.filter(article__headline__startswith='T').count(), 2)
self.assertEqual(
Reporter.objects.filter(article__headline__startswith='T').distinct().count(), 1)
# Queries can go round in circles.
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__first_name__startswith='John'),
[
"<Reporter: John Smith>",
"<Reporter: John Smith>",
"<Reporter: John Smith>",
],
ordered=False
)
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__first_name__startswith='John').distinct(),
["<Reporter: John Smith>"])
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__exact=self.r).distinct(),
["<Reporter: John Smith>"])
# Check that implied __exact also works.
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter=self.r).distinct(),
["<Reporter: John Smith>"])
# It's possible to use values() calls across many-to-one relations.
# (Note, too, that we clear the ordering here so as not to drag the
# 'headline' field into the columns being used to determine uniqueness)
d = {'reporter__first_name': 'John', 'reporter__last_name': 'Smith'}
self.assertEqual([d],
list(Article.objects.filter(reporter=self.r).distinct().order_by()
.values('reporter__first_name', 'reporter__last_name')))
def test_select_related(self):
# Check that Article.objects.select_related().dates() works properly when
# there are multiple Articles with the same date but different foreign-key
# objects (Reporters).
r1 = Reporter.objects.create(first_name='Mike', last_name='Royko', email='royko@suntimes.com')
r2 = Reporter.objects.create(first_name='John', last_name='Kass', email='jkass@tribune.com')
Article.objects.create(headline='First', pub_date=datetime.date(1980, 4, 23), reporter=r1)
Article.objects.create(headline='Second', pub_date=datetime.date(1980, 4, 23), reporter=r2)
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'day')),
[
datetime.date(1980, 4, 23),
datetime.date(2005, 7, 27),
])
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'month')),
[
datetime.date(1980, 4, 1),
datetime.date(2005, 7, 1),
])
self.assertEqual(list(Article.objects.select_related().dates('pub_date', 'year')),
[
datetime.date(1980, 1, 1),
datetime.date(2005, 1, 1),
])
def test_delete(self):
self.r.article_set.create(headline="John's second story",
pub_date=datetime.date(2005, 7, 29))
self.r2.article_set.create(headline="Paul's story",
pub_date=datetime.date(2006, 1, 17))
Article.objects.create(id=None, headline="Third article",
pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
Article.objects.create(id=None, headline="Fourth article",
pub_date=datetime.date(2005, 7, 27), reporter_id=str(self.r.id))
# If you delete a reporter, his articles will be deleted.
self.assertQuerysetEqual(Article.objects.all(),
[
"<Article: Fourth article>",
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: Third article>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(Reporter.objects.order_by('first_name'),
[
"<Reporter: John Smith>",
"<Reporter: Paul Jones>",
])
self.r2.delete()
self.assertQuerysetEqual(Article.objects.all(),
[
"<Article: Fourth article>",
"<Article: John's second story>",
"<Article: Third article>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(Reporter.objects.order_by('first_name'),
["<Reporter: John Smith>"])
# You can delete using a JOIN in the query.
Reporter.objects.filter(article__headline__startswith='This').delete()
self.assertQuerysetEqual(Reporter.objects.all(), [])
self.assertQuerysetEqual(Article.objects.all(), [])
def test_explicit_fk(self):
# Create a new Article with get_or_create using an explicit value
# for a ForeignKey.
a2, created = Article.objects.get_or_create(id=None,
headline="John's second test",
pub_date=datetime.date(2011, 5, 7),
reporter_id=self.r.id)
self.assertTrue(created)
self.assertEqual(a2.reporter.id, self.r.id)
# You can specify filters containing the explicit FK value.
self.assertQuerysetEqual(
Article.objects.filter(reporter_id__exact=self.r.id),
[
"<Article: John's second test>",
"<Article: This is a test>",
])
# Create an Article by Paul for the same date.
a3 = Article.objects.create(id=None, headline="Paul's commentary",
pub_date=datetime.date(2011, 5, 7),
reporter_id=self.r2.id)
self.assertEqual(a3.reporter.id, self.r2.id)
# Get should respect explicit foreign keys as well.
self.assertRaises(MultipleObjectsReturned,
Article.objects.get, reporter_id=self.r.id)
self.assertEqual(repr(a3),
repr(Article.objects.get(reporter_id=self.r2.id,
pub_date=datetime.date(2011, 5, 7))))
def test_deepcopy_and_circular_references(self):
# Regression for #12876 -- Model methods that include queries that
# recursive don't cause recursion depth problems under deepcopy.
self.r.cached_query = Article.objects.filter(reporter=self.r)
self.assertEqual(repr(deepcopy(self.r)), "<Reporter: John Smith>")
def test_manager_class_caching(self):
r1 = Reporter.objects.create(first_name='Mike')
r2 = Reporter.objects.create(first_name='John')
# Same twice
self.assertIs(r1.article_set.__class__, r1.article_set.__class__)
# Same as each other
self.assertIs(r1.article_set.__class__, r2.article_set.__class__)
def test_create_relation_with_ugettext_lazy(self):
reporter = Reporter.objects.create(first_name='John',
last_name='Smith',
email='john.smith@example.com')
lazy = ugettext_lazy('test')
reporter.article_set.create(headline=lazy,
pub_date=datetime.date(2011, 6, 10))
notlazy = six.text_type(lazy)
article = reporter.article_set.get()
self.assertEqual(article.headline, notlazy)
def test_values_list_exception(self):
expected_message = "Cannot resolve keyword 'notafield' into field. Choices are: %s"
self.assertRaisesMessage(FieldError,
expected_message % ', '.join(sorted(f.name for f in Reporter._meta.get_fields())),
Article.objects.values_list,
'reporter__notafield')
self.assertRaisesMessage(
FieldError,
expected_message % ', '.join(['EXTRA'] + sorted(f.name for f in Article._meta.get_fields())),
Article.objects.extra(select={'EXTRA': 'EXTRA_SELECT'}).values_list,
'notafield'
)
def test_fk_assignment_and_related_object_cache(self):
# Tests of ForeignKey assignment and the related-object cache (see #6886).
p = Parent.objects.create(name="Parent")
c = Child.objects.create(name="Child", parent=p)
# Look up the object again so that we get a "fresh" object.
c = Child.objects.get(name="Child")
p = c.parent
# Accessing the related object again returns the exactly same object.
self.assertIs(c.parent, p)
# But if we kill the cache, we get a new object.
del c._parent_cache
self.assertIsNot(c.parent, p)
# Assigning a new object results in that object getting cached immediately.
p2 = Parent.objects.create(name="Parent 2")
c.parent = p2
self.assertIs(c.parent, p2)
# Assigning None succeeds if field is null=True.
p.bestchild = None
self.assertIsNone(p.bestchild)
# bestchild should still be None after saving.
p.save()
self.assertIsNone(p.bestchild)
# bestchild should still be None after fetching the object again.
p = Parent.objects.get(name="Parent")
self.assertIsNone(p.bestchild)
# Assigning None fails: Child.parent is null=False.
self.assertRaises(ValueError, setattr, c, "parent", None)
# You also can't assign an object of the wrong type here
self.assertRaises(ValueError, setattr, c, "parent", First(id=1, second=1))
# Nor can you explicitly assign None to Child.parent during object
# creation (regression for #9649).
self.assertRaises(ValueError, Child, name='xyzzy', parent=None)
self.assertRaises(ValueError, Child.objects.create, name='xyzzy', parent=None)
# Creation using keyword argument should cache the related object.
p = Parent.objects.get(name="Parent")
c = Child(parent=p)
self.assertIs(c.parent, p)
# Creation using keyword argument and unsaved related instance (#8070).
p = Parent()
msg = "save() prohibited to prevent data loss due to unsaved related object 'parent'."
with self.assertRaisesMessage(ValueError, msg):
Child.objects.create(parent=p)
msg = "save() prohibited to prevent data loss due to unsaved related object 'parent'."
with self.assertRaisesMessage(ValueError, msg):
ToFieldChild.objects.create(parent=p)
# Creation using attname keyword argument and an id will cause the
# related object to be fetched.
p = Parent.objects.get(name="Parent")
c = Child(parent_id=p.id)
self.assertIsNot(c.parent, p)
self.assertEqual(c.parent, p)
def test_multiple_foreignkeys(self):
# Test of multiple ForeignKeys to the same model (bug #7125).
c1 = Category.objects.create(name='First')
c2 = Category.objects.create(name='Second')
c3 = Category.objects.create(name='Third')
r1 = Record.objects.create(category=c1)
r2 = Record.objects.create(category=c1)
r3 = Record.objects.create(category=c2)
r4 = Record.objects.create(category=c2)
r5 = Record.objects.create(category=c3)
Relation.objects.create(left=r1, right=r2)
Relation.objects.create(left=r3, right=r4)
Relation.objects.create(left=r1, right=r3)
Relation.objects.create(left=r5, right=r2)
Relation.objects.create(left=r3, right=r2)
q1 = Relation.objects.filter(left__category__name__in=['First'], right__category__name__in=['Second'])
self.assertQuerysetEqual(q1, ["<Relation: First - Second>"])
q2 = Category.objects.filter(record__left_set__right__category__name='Second').order_by('name')
self.assertQuerysetEqual(q2, ["<Category: First>", "<Category: Second>"])
p = Parent.objects.create(name="Parent")
c = Child.objects.create(name="Child", parent=p)
self.assertRaises(ValueError, Child.objects.create, name="Grandchild", parent=c)
def test_fk_instantiation_outside_model(self):
# Regression for #12190 -- Should be able to instantiate a FK outside
# of a model, and interrogate its related field.
cat = models.ForeignKey(Category, models.CASCADE)
self.assertEqual('id', cat.remote_field.get_related_field().name)
def test_relation_unsaved(self):
# Test that the <field>_set manager does not join on Null value fields (#17541)
Third.objects.create(name='Third 1')
Third.objects.create(name='Third 2')
th = Third(name="testing")
# The object isn't saved an thus the relation field is null - we won't even
# execute a query in this case.
with self.assertNumQueries(0):
self.assertEqual(th.child_set.count(), 0)
th.save()
# Now the model is saved, so we will need to execute an query.
with self.assertNumQueries(1):
self.assertEqual(th.child_set.count(), 0)
def test_related_object(self):
public_school = School.objects.create(is_public=True)
public_student = Student.objects.create(school=public_school)
private_school = School.objects.create(is_public=False)
private_student = Student.objects.create(school=private_school)
# Only one school is available via all() due to the custom default manager.
self.assertQuerysetEqual(
School.objects.all(),
["<School: School object>"]
)
self.assertEqual(public_student.school, public_school)
# Make sure the base manager is used so that an student can still access
# its related school even if the default manager doesn't normally
# allow it.
self.assertEqual(private_student.school, private_school)
# If the manager is marked "use_for_related_fields", it'll get used instead
# of the "bare" queryset. Usually you'd define this as a property on the class,
# but this approximates that in a way that's easier in tests.
School.objects.use_for_related_fields = True
try:
private_student = Student.objects.get(pk=private_student.pk)
self.assertRaises(School.DoesNotExist, lambda: private_student.school)
finally:
School.objects.use_for_related_fields = False
def test_hasattr_related_object(self):
# The exception raised on attribute access when a related object
# doesn't exist should be an instance of a subclass of `AttributeError`
# refs #21563
self.assertFalse(hasattr(Article(), 'reporter'))
|
mishravikas/geonode-permissions
|
refs/heads/master
|
geonode/api/api.py
|
1
|
from django.conf.urls import url
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from avatar.templatetags.avatar_tags import avatar_url
from guardian.shortcuts import get_objects_for_user
from geonode.base.models import TopicCategory
from geonode.layers.models import Layer
from geonode.maps.models import Map
from geonode.documents.models import Document
from geonode.groups.models import GroupProfile
from taggit.models import Tag
from tastypie import fields
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from tastypie.utils import trailing_slash
FILTER_TYPES = {
'layer': Layer,
'map': Map,
'document': Document
}
class TypeFilteredResource(ModelResource):
""" Common resource used to apply faceting to categories and keywords
based on the type passed as query parameter in the form
type:layer/map/document"""
count = fields.IntegerField()
type_filter = None
def dehydrate_count(self, bundle):
raise Exception('dehydrate_count not implemented in the child class')
def build_filters(self, filters={}):
orm_filters = super(TypeFilteredResource, self).build_filters(filters)
if 'type' in filters and filters['type'] in FILTER_TYPES.keys():
self.type_filter = FILTER_TYPES[filters['type']]
else:
self.type_filter = None
return orm_filters
class TagResource(TypeFilteredResource):
"""Tags api"""
def dehydrate_count(self, bundle):
count = 0
if settings.SKIP_PERMS_FILTER:
if self.type_filter:
ctype = ContentType.objects.get_for_model(self.type_filter)
count = bundle.obj.taggit_taggeditem_items.filter(
content_type=ctype).count()
else:
count = bundle.obj.taggit_taggeditem_items.count()
else:
resources = get_objects_for_user(
bundle.request.user,
'base.view_resourcebase').values_list(
'id',
flat=True)
if self.type_filter:
ctype = ContentType.objects.get_for_model(self.type_filter)
count = bundle.obj.taggit_taggeditem_items.filter(content_type=ctype).filter(object_id__in=resources)\
.count()
else:
count = bundle.obj.taggit_taggeditem_items.filter(
object_id__in=resources).count()
return count
class Meta:
queryset = Tag.objects.all()
resource_name = 'keywords'
allowed_methods = ['get']
filtering = {
'slug': ALL,
}
class TopicCategoryResource(TypeFilteredResource):
"""Category api"""
def dehydrate_count(self, bundle):
if settings.SKIP_PERMS_FILTER:
return bundle.obj.resourcebase_set.instance_of(self.type_filter).count() if \
self.type_filter else bundle.obj.resourcebase_set.all().count()
else:
resources = bundle.obj.resourcebase_set.instance_of(self.type_filter) if \
self.type_filter else bundle.obj.resourcebase_set.all()
permitted = get_objects_for_user(
bundle.request.user,
'base.view_resourcebase').values_list(
'id',
flat=True)
return resources.filter(id__in=permitted).count()
class Meta:
queryset = TopicCategory.objects.all()
resource_name = 'categories'
allowed_methods = ['get']
filtering = {
'identifier': ALL,
}
class GroupResource(ModelResource):
"""Groups api"""
detail_url = fields.CharField()
member_count = fields.IntegerField()
manager_count = fields.IntegerField()
def dehydrate_member_count(self, bundle):
return bundle.obj.member_queryset().count()
def dehydrate_manager_count(self, bundle):
return bundle.obj.get_managers().count()
def dehydrate_detail_url(self, bundle):
return reverse('group_detail', args=[bundle.obj.slug])
class Meta:
queryset = GroupProfile.objects.all()
resource_name = 'groups'
allowed_methods = ['get']
filtering = {
'name': ALL
}
ordering = ['title', 'last_modified']
class ProfileResource(ModelResource):
"""Profile api"""
avatar_100 = fields.CharField(null=True)
profile_detail_url = fields.CharField()
email = fields.CharField(default='')
layers_count = fields.IntegerField(default=0)
maps_count = fields.IntegerField(default=0)
documents_count = fields.IntegerField(default=0)
current_user = fields.BooleanField(default=False)
activity_stream_url = fields.CharField(null=True)
def build_filters(self, filters={}):
"""adds filtering by group functionality"""
orm_filters = super(ProfileResource, self).build_filters(filters)
if 'group' in filters:
orm_filters['group'] = filters['group']
return orm_filters
def apply_filters(self, request, applicable_filters):
"""filter by group if applicable by group functionality"""
group = applicable_filters.pop('group', None)
semi_filtered = super(
ProfileResource,
self).apply_filters(
request,
applicable_filters)
if group is not None:
semi_filtered = semi_filtered.filter(
groupmember__group__slug=group)
return semi_filtered
def dehydrate_email(self, bundle):
email = ''
if bundle.request.user.is_authenticated():
email = bundle.obj.email
return email
def dehydrate_layers_count(self, bundle):
return bundle.obj.resourcebase_set.instance_of(Layer).count()
def dehydrate_maps_count(self, bundle):
return bundle.obj.resourcebase_set.instance_of(Map).count()
def dehydrate_documents_count(self, bundle):
return bundle.obj.resourcebase_set.instance_of(Document).count()
def dehydrate_avatar_100(self, bundle):
return avatar_url(bundle.obj, 100)
def dehydrate_profile_detail_url(self, bundle):
return bundle.obj.get_absolute_url()
def dehydrate_current_user(self, bundle):
return bundle.request.user.username == bundle.obj.username
def dehydrate_activity_stream_url(self, bundle):
return reverse(
'actstream_actor',
kwargs={
'content_type_id': ContentType.objects.get_for_model(
bundle.obj).pk,
'object_id': bundle.obj.pk})
def prepend_urls(self):
if settings.HAYSTACK_SEARCH:
return [
url(r"^(?P<resource_name>%s)/search%s$" % (
self._meta.resource_name, trailing_slash()
),
self.wrap_view('get_search'), name="api_get_search"),
]
else:
return []
class Meta:
queryset = get_user_model().objects.exclude(username='AnonymousUser')
resource_name = 'profiles'
allowed_methods = ['get']
ordering = ['name']
excludes = ['is_staff', 'password', 'is_superuser',
'is_active', 'date_joined', 'last_login']
filtering = {
'username': ALL,
}
|
Sapphirine/Sapphirine-find_people_like_you
|
refs/heads/master
|
restapi/use_followings/mutal_friend_score.py
|
1
|
from pyspark.mllib.regression import LabeledPoint
from pyspark.mllib.tree import DecisionTree
from pyspark.mllib.linalg import SparseVector
from pyspark import SparkContext
from operator import add
import time
import numpy
from pyspark.mllib.linalg import Vectors
import pyspark.mllib.clustering as cl
import os
sc = SparkContext("local", "Myapp")
#####Read input files
#Get the all the file names
filenames = next(os.walk("/Users/panpan/Desktop/linkedin/followings/group8"))[2]
#save the all files to variable <files>
files=list()
for filename in filenames:
f=open("/Users/panpan/Desktop/linkedin/followings/group8/%s" %filename,"r")
files.append(f.readline())
#initialize mutual_list
mutual_list=numpy.zeros((len(filenames),len(filenames)))
#pick two users each time, and calculate their common freinds
for i in range(0,len(files)):
if i+1>=len(files):
continue
for j in range(i,len(files)):
file_1 =files[i].split(",")
file_2 =files[j].split(",")
file1 =sc.parallelize(file_1)
file2 =sc.parallelize(file_2)
#common friends of the two users
file_12=file1.intersection(file2)
mutual=len(file_12.collect())
#define a way to cauculate how much percent they are similar to each other
mutual_proportion=1.0/2*mutual*(1.0/len(file_1)+1.0/len(file_2))
mutual_list[i][j]=mutual_list[j][i]=mutual_proportion
###Cluster the models
model = cl.KMeans.train(sc.parallelize(mutual_list), 4, maxIterations=10, runs=30, initializationMode="random",
seed=50, initializationSteps=5, epsilon=1e-4)
for i in range(0,len(mutual_list)):
print model.predict(mutual_list[i])
#further optimization on parameter needed
|
Azure/azure-sdk-for-python
|
refs/heads/sync-eng/common-js-nightly-docs-2-1768-ForTestPipeline
|
sdk/communication/azure-communication-chat/samples/chat_client_sample.py
|
1
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: chat_client_sample.py
DESCRIPTION:
These samples demonstrate create a chat client, get a chat thread client,
create a chat thread, get a chat thread by id, list chat threads, delete
a chat thread by id.
You need to use azure.communication.configuration module to get user access
token and user identity before run this sample
USAGE:
python chat_client_sample.py
Set the environment variables with your own values before running the sample:
1) AZURE_COMMUNICATION_SERVICE_ENDPOINT - Communication Service endpoint url
2) TOKEN - the user access token, from token_response.token
3) USER_ID - the user id, from token_response.identity
"""
import os
class ChatClientSamples(object):
from azure.communication.identity import CommunicationIdentityClient
connection_string = os.environ.get("COMMUNICATION_SAMPLES_CONNECTION_STRING", None)
if not connection_string:
raise ValueError("Set COMMUNICATION_SAMPLES_CONNECTION_STRING env before run this sample.")
identity_client = CommunicationIdentityClient.from_connection_string(connection_string)
user = identity_client.create_user()
tokenresponse = identity_client.get_token(user, scopes=["chat"])
token = tokenresponse.token
endpoint = os.environ.get("AZURE_COMMUNICATION_SERVICE_ENDPOINT", None)
if not endpoint:
raise ValueError("Set AZURE_COMMUNICATION_SERVICE_ENDPOINT env before run this sample.")
_thread_id = None
def create_chat_client(self):
token = self.token
endpoint = self.endpoint
# [START create_chat_client]
from azure.communication.chat import ChatClient, CommunicationTokenCredential
# set `endpoint` to an existing ACS endpoint
chat_client = ChatClient(endpoint, CommunicationTokenCredential(token))
# [END create_chat_client]
def create_thread(self):
token = self.token
endpoint = self.endpoint
user = self.user
# [START create_thread]
from datetime import datetime
from azure.communication.chat import(
ChatClient,
ChatParticipant,
CommunicationUserIdentifier,
CommunicationTokenCredential
)
# set `endpoint` to an existing ACS endpoint
chat_client = ChatClient(endpoint, CommunicationTokenCredential(token))
topic = "test topic"
participants = [ChatParticipant(
identifier=user,
display_name='name',
share_history_time=datetime.utcnow()
)]
# creates a new chat_thread everytime
create_chat_thread_result = chat_client.create_chat_thread(topic, thread_participants=participants)
# creates a new chat_thread if not exists
idempotency_token = 'b66d6031-fdcc-41df-8306-e524c9f226b8' # unique identifier
create_chat_thread_result_w_repeatability_id = chat_client.create_chat_thread(
topic,
thread_participants=participants,
idempotency_token=idempotency_token
)
# [END create_thread]
self._thread_id = create_chat_thread_result.chat_thread.id
print("thread created, id: " + self._thread_id)
def get_chat_thread_client(self):
token = self.token
endpoint = self.endpoint
thread_id = self._thread_id
# [START get_chat_thread_client]
from azure.communication.chat import ChatClient, CommunicationTokenCredential
# set `endpoint` to an existing ACS endpoint
chat_client = ChatClient(endpoint, CommunicationTokenCredential(token))
# set `thread_id` to an existing chat thread id
chat_thread_client = chat_client.get_chat_thread_client(thread_id)
# [END get_chat_thread_client]
print("get_chat_thread_client succeeded with thread id: ", chat_thread_client.thread_id)
def list_threads(self):
token = self.token
endpoint = self.endpoint
# [START list_threads]
from azure.communication.chat import ChatClient, CommunicationTokenCredential
from datetime import datetime, timedelta
# set `endpoint` to an existing ACS endpoint
chat_client = ChatClient(endpoint, CommunicationTokenCredential(token))
start_time = datetime.utcnow() - timedelta(days=2)
chat_threads = chat_client.list_chat_threads(results_per_page=5, start_time=start_time)
print("list_threads succeeded with results_per_page is 5, and were created since 2 days ago.")
for chat_thread_item_page in chat_threads.by_page():
for chat_thread_item in chat_thread_item_page:
print("thread id:", chat_thread_item.id)
# [END list_threads]
def delete_thread(self):
token = self.token
endpoint = self.endpoint
thread_id = self._thread_id
# [START delete_thread]
from azure.communication.chat import ChatClient, CommunicationTokenCredential
# set `endpoint` to an existing ACS endpoint
chat_client = ChatClient(endpoint, CommunicationTokenCredential(token))
# set `thread_id` to an existing chat thread id
chat_client.delete_chat_thread(thread_id)
# [END delete_thread]
print("delete_thread succeeded")
def clean_up(self):
print("cleaning up: deleting created user.")
self.identity_client.delete_user(self.user)
if __name__ == '__main__':
sample = ChatClientSamples()
sample.create_chat_client()
sample.create_thread()
sample.get_chat_thread_client()
sample.list_threads()
sample.delete_thread()
sample.clean_up()
|
pelikanchik/edx-platform
|
refs/heads/master
|
lms/djangoapps/dashboard/management/commands/git_add_course.py
|
9
|
"""
Script for importing courseware from git/xml into a mongo modulestore
"""
import os
import re
import StringIO
import subprocess
import logging
from django.core import management
from django.core.management.base import BaseCommand, CommandError
from django.utils.translation import ugettext as _
import dashboard.git_import
from dashboard.git_import import GitImportError
from dashboard.models import CourseImportLog
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.xml import XMLModuleStore
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Pull a git repo and import into the mongo based content database.
"""
help = _('Import the specified git repository into the '
'modulestore and directory')
def handle(self, *args, **options):
"""Check inputs and run the command"""
if isinstance(modulestore, XMLModuleStore):
raise CommandError('This script requires a mongo module store')
if len(args) < 1:
raise CommandError('This script requires at least one argument, '
'the git URL')
if len(args) > 2:
raise CommandError('This script requires no more than two '
'arguments')
rdir_arg = None
if len(args) > 1:
rdir_arg = args[1]
try:
dashboard.git_import.add_repo(args[0], rdir_arg)
except GitImportError as ex:
raise CommandError(str(ex))
|
josherick/bokeh
|
refs/heads/master
|
sphinx/source/docs/tutorials/solutions/image.py
|
23
|
from __future__ import division
import numpy as np
from bokeh.plotting import figure, output_file, show, VBox
# NOTE: if you do have numba installed, uncomment out this import,
# and the 'autojit' lines below (the example will run more quickly).
#from numba import autojit
# These functions generate the Mandelbrot set image. Don't worry if
# you are not familiar with them. The import thing is just to know
# that they create a 2D array of numbers that we can colormap.
#@autojit
def mandel(x, y, max_iters):
"""
Given the real and imaginary parts of a complex number,
determine if it is a candidate for membership in the Mandelbrot
set given a fixed number of iterations.
"""
c = complex(x, y)
z = 0.0j
for i in range(max_iters):
z = z*z + c
if (z.real*z.real + z.imag*z.imag) >= 4:
return i
return max_iters
#@autojit
def create_fractal(min_x, max_x, min_y, max_y, image, iters):
height = image.shape[0]
width = image.shape[1]
pixel_size_x = (max_x - min_x) / width
pixel_size_y = (max_y - min_y) / height
for x in range(width):
real = min_x + x * pixel_size_x
for y in range(height):
imag = min_y + y * pixel_size_y
color = mandel(real, imag, iters)
image[y, x] = color
# Define the bounding coordinates to generate the Mandelbrot image in. You
# can play around with these.
min_x = -2.0
max_x = 1.0
min_y = -1.0
max_y = 1.0
# Use the functions above to create a scalar image (2D array of numbers)
img = np.zeros((1024, 1536), dtype = np.uint8)
create_fractal(min_x, max_x, min_y, max_y, img, 20)
# EXERCISE: output static HTML file
output_file("image.html")
# create a figure, setting the x and y ranges to the appropriate data bounds
p1 = figure(title="Mandelbrot", plot_width=900, plot_height=600,
x_range = [min_x, max_x], y_range = [min_y, max_y])
# EXERCISE: Fill in the missing parameters to use the `image` renderer to
# display the Mandelbrot image color mapped with the palette 'Spectral11'
#
# NOTE: the `image` renderer can display many images at once, so it takes
# **lists** of images, coordinates, and palettes. Remember to supply sequences
# for these parameters, even if you are just supplying one.
p1.image(image=[img], # image data
x=[min_x], # lower left x coord
y=[min_y], # lower left y coord
dw=[max_x-min_x], # *data space* width of image
dh=[max_y-min_y], # *data space* height of image
palette="Spectral11", # palette name
)
# create a new figure
p2 = figure(title="RGBA image", x_range = [0,10], y_range = [0,10])
# We can also use the `image_rgba` renderer to display RGBA images that
# we have color mapped ourselves.
N = 20
img = np.empty((N,N), dtype=np.uint32)
view = img.view(dtype=np.uint8).reshape((N, N, 4))
for i in range(N):
for j in range(N):
view[i, j, 0] = int(i/N*255) # red
view[i, j, 1] = 158 # green
view[i, j, 2] = int(j/N*255) # blue
view[i, j, 3] = 255 # alpha
# EXERCISE: use `image_rgba` to display the image above. Use the following
# cordinates: (x0,y0) = (0,0) and (x1,y1) = (10,10). Remember to set the
# x_range and y_range explicitly as above.
p2.image_rgba(image=[img], x=[0], y=[0], dw=[10], dh=[10])
# show the plots arrayed in a VBox
show(VBox(p1, p2))
|
saurabh6790/medsynaptic-app
|
refs/heads/develop
|
stock/report/stock_ageing/stock_ageing.py
|
29
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import date_diff
def execute(filters=None):
columns = get_columns()
item_details = get_fifo_queue(filters)
to_date = filters["to_date"]
data = []
for item, item_dict in item_details.items():
fifo_queue = item_dict["fifo_queue"]
details = item_dict["details"]
if not fifo_queue: continue
average_age = get_average_age(fifo_queue, to_date)
earliest_age = date_diff(to_date, fifo_queue[0][1])
latest_age = date_diff(to_date, fifo_queue[-1][1])
data.append([item, details.item_name, details.description, details.item_group,
details.brand, average_age, earliest_age, latest_age, details.stock_uom])
return columns, data
def get_average_age(fifo_queue, to_date):
batch_age = age_qty = total_qty = 0.0
for batch in fifo_queue:
batch_age = date_diff(to_date, batch[1])
age_qty += batch_age * batch[0]
total_qty += batch[0]
return (age_qty / total_qty) if total_qty else 0.0
def get_columns():
return ["Item Code:Link/Item:100", "Item Name::100", "Description::200",
"Item Group:Link/Item Group:100", "Brand:Link/Brand:100", "Average Age:Float:100",
"Earliest:Int:80", "Latest:Int:80", "UOM:Link/UOM:100"]
def get_fifo_queue(filters):
item_details = {}
for d in get_stock_ledger_entries(filters):
item_details.setdefault(d.name, {"details": d, "fifo_queue": []})
fifo_queue = item_details[d.name]["fifo_queue"]
if d.actual_qty > 0:
fifo_queue.append([d.actual_qty, d.posting_date])
else:
qty_to_pop = abs(d.actual_qty)
while qty_to_pop:
batch = fifo_queue[0] if fifo_queue else [0, None]
if 0 < batch[0] <= qty_to_pop:
# if batch qty > 0
# not enough or exactly same qty in current batch, clear batch
qty_to_pop -= batch[0]
fifo_queue.pop(0)
else:
# all from current batch
batch[0] -= qty_to_pop
qty_to_pop = 0
return item_details
def get_stock_ledger_entries(filters):
return webnotes.conn.sql("""select
item.name, item.item_name, item_group, brand, description, item.stock_uom,
actual_qty, posting_date
from `tabStock Ledger Entry` sle,
(select name, item_name, description, stock_uom, brand, item_group
from `tabItem` {item_conditions}) item
where item_code = item.name and
company = %(company)s and
posting_date <= %(to_date)s
{sle_conditions}
order by posting_date, posting_time, sle.name"""\
.format(item_conditions=get_item_conditions(filters),
sle_conditions=get_sle_conditions(filters)), filters, as_dict=True)
def get_item_conditions(filters):
conditions = []
if filters.get("item_code"):
conditions.append("item_code=%(item_code)s")
if filters.get("brand"):
conditions.append("brand=%(brand)s")
return "where {}".format(" and ".join(conditions)) if conditions else ""
def get_sle_conditions(filters):
conditions = []
if filters.get("warehouse"):
conditions.append("warehouse=%(warehouse)s")
return "and {}".format(" and ".join(conditions)) if conditions else ""
|
camilonova/django
|
refs/heads/master
|
django/contrib/messages/views.py
|
131
|
from django.contrib import messages
class SuccessMessageMixin:
"""
Add a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super().form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
Foxfanmedium/python_training
|
refs/heads/master
|
OnlineCoursera/mail_ru/Python_1/env/Lib/site-packages/ipython_genutils/tests/test_tempdir.py
|
20
|
import os
from ..tempdir import NamedFileInTemporaryDirectory
from ..tempdir import TemporaryWorkingDirectory
def test_named_file_in_temporary_directory():
with NamedFileInTemporaryDirectory('filename') as file:
name = file.name
assert not file.closed
assert os.path.exists(name)
file.write(b'test')
assert file.closed
assert not os.path.exists(name)
def test_temporary_working_directory():
with TemporaryWorkingDirectory() as dir:
assert os.path.exists(dir)
assert os.path.realpath(os.curdir) == os.path.realpath(dir)
assert not os.path.exists(dir)
assert os.path.abspath(os.curdir) != dir
|
kenshay/ImageScript
|
refs/heads/master
|
ProgramData/SystemFiles/Python/Lib/site-packages/pip-9.0.1-py2.7.egg/pip/_vendor/requests/packages/chardet/universaldetector.py
|
1775
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
import codecs
from .latin1prober import Latin1Prober # windows-1252
from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets
from .sbcsgroupprober import SBCSGroupProber # single-byte character sets
from .escprober import EscCharSetProber # ISO-2122, etc.
import re
MINIMUM_THRESHOLD = 0.20
ePureAscii = 0
eEscAscii = 1
eHighbyte = 2
class UniversalDetector:
def __init__(self):
self._highBitDetector = re.compile(b'[\x80-\xFF]')
self._escDetector = re.compile(b'(\033|~{)')
self._mEscCharSetProber = None
self._mCharSetProbers = []
self.reset()
def reset(self):
self.result = {'encoding': None, 'confidence': 0.0}
self.done = False
self._mStart = True
self._mGotData = False
self._mInputState = ePureAscii
self._mLastChar = b''
if self._mEscCharSetProber:
self._mEscCharSetProber.reset()
for prober in self._mCharSetProbers:
prober.reset()
def feed(self, aBuf):
if self.done:
return
aLen = len(aBuf)
if not aLen:
return
if not self._mGotData:
# If the data starts with BOM, we know it is UTF
if aBuf[:3] == codecs.BOM_UTF8:
# EF BB BF UTF-8 with BOM
self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_LE:
# FF FE 00 00 UTF-32, little-endian BOM
self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_BE:
# 00 00 FE FF UTF-32, big-endian BOM
self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
elif aBuf[:4] == b'\xFE\xFF\x00\x00':
# FE FF 00 00 UCS-4, unusual octet order BOM (3412)
self.result = {
'encoding': "X-ISO-10646-UCS-4-3412",
'confidence': 1.0
}
elif aBuf[:4] == b'\x00\x00\xFF\xFE':
# 00 00 FF FE UCS-4, unusual octet order BOM (2143)
self.result = {
'encoding': "X-ISO-10646-UCS-4-2143",
'confidence': 1.0
}
elif aBuf[:2] == codecs.BOM_LE:
# FF FE UTF-16, little endian BOM
self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
elif aBuf[:2] == codecs.BOM_BE:
# FE FF UTF-16, big endian BOM
self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
self._mGotData = True
if self.result['encoding'] and (self.result['confidence'] > 0.0):
self.done = True
return
if self._mInputState == ePureAscii:
if self._highBitDetector.search(aBuf):
self._mInputState = eHighbyte
elif ((self._mInputState == ePureAscii) and
self._escDetector.search(self._mLastChar + aBuf)):
self._mInputState = eEscAscii
self._mLastChar = aBuf[-1:]
if self._mInputState == eEscAscii:
if not self._mEscCharSetProber:
self._mEscCharSetProber = EscCharSetProber()
if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': self._mEscCharSetProber.get_charset_name(),
'confidence': self._mEscCharSetProber.get_confidence()}
self.done = True
elif self._mInputState == eHighbyte:
if not self._mCharSetProbers:
self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
Latin1Prober()]
for prober in self._mCharSetProbers:
if prober.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': prober.get_charset_name(),
'confidence': prober.get_confidence()}
self.done = True
break
def close(self):
if self.done:
return
if not self._mGotData:
if constants._debug:
sys.stderr.write('no data received!\n')
return
self.done = True
if self._mInputState == ePureAscii:
self.result = {'encoding': 'ascii', 'confidence': 1.0}
return self.result
if self._mInputState == eHighbyte:
proberConfidence = None
maxProberConfidence = 0.0
maxProber = None
for prober in self._mCharSetProbers:
if not prober:
continue
proberConfidence = prober.get_confidence()
if proberConfidence > maxProberConfidence:
maxProberConfidence = proberConfidence
maxProber = prober
if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
self.result = {'encoding': maxProber.get_charset_name(),
'confidence': maxProber.get_confidence()}
return self.result
if constants._debug:
sys.stderr.write('no probers hit minimum threshhold\n')
for prober in self._mCharSetProbers[0].mProbers:
if not prober:
continue
sys.stderr.write('%s confidence = %s\n' %
(prober.get_charset_name(),
prober.get_confidence()))
|
FusionSP/android_external_skia
|
refs/heads/lp5.1
|
platform_tools/android/bin/adb_list_devices.py
|
153
|
#!/usr/bin/python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" adb_list_devices: list information about attached Android devices. """
import os
import re
import shlex
import subprocess
import sys
# This file, which resides on every Android device, contains a great deal of
# information about the device.
INFO_FILE = '/system/build.prop'
# Default set of properties to query about a device.
DEFAULT_PROPS_TO_GET = ['ro.product.device', 'ro.build.version.release',
'ro.build.type']
def GetDeviceInfo(adb, serial, props_to_get):
""" Return a list of values (or "<Unknown>" if no value can be found) for the
given set of properties for the device with the given serial number.
adb: path to the ADB program.
serial: serial number of the target device.
props_to_get: list of strings indicating which properties to determine.
"""
device_proc = subprocess.Popen([adb, '-s', serial, 'shell', 'cat',
INFO_FILE], stdout=subprocess.PIPE)
code = device_proc.wait()
if code != 0:
raise Exception('Could not query device with serial number %s.' % serial)
output = device_proc.stdout.read()
device_info = []
for prop in props_to_get:
# Find the property in the outputs
search_str = r'%s=(\S+)' % prop
match = re.search(search_str, output)
if not match:
value = '<Unknown>'
else:
value = match.group(1)
device_info.append(value)
return device_info
def PrintPrettyTable(data, file=None):
""" Print out the given data in a nicely-spaced format. This function scans
the list multiple times and uses extra memory, so don't use it for big data
sets.
data: list of lists of strings, where each list represents a row of data.
This table is assumed to be rectangular; if the length of any list differs
some of the output may not get printed.
file: file-like object into which the table should be written. If none is
provided, the table is written to stdout.
"""
if not file:
file = sys.stdout
column_widths = [0 for length in data[0]]
for line in data:
column_widths = [max(longest_len, len(prop)) for \
longest_len, prop in zip(column_widths, line)]
for line in data:
for prop, width in zip(line, column_widths):
file.write(prop.ljust(width + 1))
file.write('\n')
def FindADB(hint=None):
""" Attempt to find the ADB program using the following sequence of steps.
Returns the path to ADB if it can be found, or None otherwise.
1. If a hint was provided, is it a valid path to ADB?
2. Is ADB in PATH?
3. Is there an environment variable for ADB?
4. If the ANDROID_SDK_ROOT variable is set, try to find ADB in the SDK
directory.
hint: string indicating a possible path to ADB.
"""
# 1. If a hint was provided, does it point to ADB?
if hint:
if os.path.basename(hint) == 'adb':
adb = hint
else:
adb = os.path.join(hint, 'adb')
if subprocess.Popen([adb, 'version'], stdout=subprocess.PIPE).wait() == 0:
return adb
# 2. Is 'adb' in our PATH?
adb = 'adb'
if subprocess.Popen([adb, 'version'], stdout=subprocess.PIPE).wait() == 0:
return adb
# 3. Is there an environment variable for ADB?
try:
adb = os.environ.get('ADB')
if subprocess.Popen([adb, 'version'], stdout=subprocess.PIPE).wait() == 0:
return adb
except:
pass
# 4. If ANDROID_SDK_ROOT is set, try to find ADB in the SDK directory.
try:
sdk_dir = os.environ.get('ANDROID_SDK_ROOT')
adb = os.path.join(sdk_dir, 'platform-tools', 'adb')
if subprocess.Popen([adb, 'version'], stdout=subprocess.PIPE).wait() == 0:
return adb
except:
pass
return None
def main(argv):
""" Print out information about connected Android devices. By default, print
the serial number, status, device name, OS version, and build type of each
device. If any arguments are supplied on the command line, print the serial
number and status for each device along with values for those arguments
interpreted as properties.
"""
if len(argv) > 1:
props_to_get = argv[1:]
else:
props_to_get = DEFAULT_PROPS_TO_GET
adb = FindADB()
if not adb:
raise Exception('Could not find ADB!')
proc = subprocess.Popen([adb, 'devices'], stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
raise Exception('Failure in ADB: could not find attached devices.')
header = ['Serial', 'Status']
header.extend(props_to_get)
output_lines = [header]
for line in proc.stdout:
line = line.rstrip()
if line != 'List of devices attached' and line != '':
line_list = shlex.split(line)
serial = line_list[0]
status = line_list[1]
device_info = [serial, status]
device_info.extend(GetDeviceInfo(adb, serial, props_to_get))
output_lines.append(device_info)
PrintPrettyTable(output_lines)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
John-Boik/Principled-Societies-Project
|
refs/heads/master
|
leddaApp/static/brython/src/Lib/encodings/cp1140.py
|
37
|
""" Python Character Mapping Codec cp1140 generated from 'python-mappings/CP1140.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1140',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x9c' # 0x04 -> CONTROL
'\t' # 0x05 -> HORIZONTAL TABULATION
'\x86' # 0x06 -> CONTROL
'\x7f' # 0x07 -> DELETE
'\x97' # 0x08 -> CONTROL
'\x8d' # 0x09 -> CONTROL
'\x8e' # 0x0A -> CONTROL
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x9d' # 0x14 -> CONTROL
'\x85' # 0x15 -> CONTROL
'\x08' # 0x16 -> BACKSPACE
'\x87' # 0x17 -> CONTROL
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x92' # 0x1A -> CONTROL
'\x8f' # 0x1B -> CONTROL
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
'\x80' # 0x20 -> CONTROL
'\x81' # 0x21 -> CONTROL
'\x82' # 0x22 -> CONTROL
'\x83' # 0x23 -> CONTROL
'\x84' # 0x24 -> CONTROL
'\n' # 0x25 -> LINE FEED
'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
'\x1b' # 0x27 -> ESCAPE
'\x88' # 0x28 -> CONTROL
'\x89' # 0x29 -> CONTROL
'\x8a' # 0x2A -> CONTROL
'\x8b' # 0x2B -> CONTROL
'\x8c' # 0x2C -> CONTROL
'\x05' # 0x2D -> ENQUIRY
'\x06' # 0x2E -> ACKNOWLEDGE
'\x07' # 0x2F -> BELL
'\x90' # 0x30 -> CONTROL
'\x91' # 0x31 -> CONTROL
'\x16' # 0x32 -> SYNCHRONOUS IDLE
'\x93' # 0x33 -> CONTROL
'\x94' # 0x34 -> CONTROL
'\x95' # 0x35 -> CONTROL
'\x96' # 0x36 -> CONTROL
'\x04' # 0x37 -> END OF TRANSMISSION
'\x98' # 0x38 -> CONTROL
'\x99' # 0x39 -> CONTROL
'\x9a' # 0x3A -> CONTROL
'\x9b' # 0x3B -> CONTROL
'\x14' # 0x3C -> DEVICE CONTROL FOUR
'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
'\x9e' # 0x3E -> CONTROL
'\x1a' # 0x3F -> SUBSTITUTE
' ' # 0x40 -> SPACE
'\xa0' # 0x41 -> NO-BREAK SPACE
'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE
'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE
'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA
'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE
'\xa2' # 0x4A -> CENT SIGN
'.' # 0x4B -> FULL STOP
'<' # 0x4C -> LESS-THAN SIGN
'(' # 0x4D -> LEFT PARENTHESIS
'+' # 0x4E -> PLUS SIGN
'|' # 0x4F -> VERTICAL LINE
'&' # 0x50 -> AMPERSAND
'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE
'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS
'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE
'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN)
'!' # 0x5A -> EXCLAMATION MARK
'$' # 0x5B -> DOLLAR SIGN
'*' # 0x5C -> ASTERISK
')' # 0x5D -> RIGHT PARENTHESIS
';' # 0x5E -> SEMICOLON
'\xac' # 0x5F -> NOT SIGN
'-' # 0x60 -> HYPHEN-MINUS
'/' # 0x61 -> SOLIDUS
'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE
'\xa6' # 0x6A -> BROKEN BAR
',' # 0x6B -> COMMA
'%' # 0x6C -> PERCENT SIGN
'_' # 0x6D -> LOW LINE
'>' # 0x6E -> GREATER-THAN SIGN
'?' # 0x6F -> QUESTION MARK
'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE
'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE
'`' # 0x79 -> GRAVE ACCENT
':' # 0x7A -> COLON
'#' # 0x7B -> NUMBER SIGN
'@' # 0x7C -> COMMERCIAL AT
"'" # 0x7D -> APOSTROPHE
'=' # 0x7E -> EQUALS SIGN
'"' # 0x7F -> QUOTATION MARK
'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE
'a' # 0x81 -> LATIN SMALL LETTER A
'b' # 0x82 -> LATIN SMALL LETTER B
'c' # 0x83 -> LATIN SMALL LETTER C
'd' # 0x84 -> LATIN SMALL LETTER D
'e' # 0x85 -> LATIN SMALL LETTER E
'f' # 0x86 -> LATIN SMALL LETTER F
'g' # 0x87 -> LATIN SMALL LETTER G
'h' # 0x88 -> LATIN SMALL LETTER H
'i' # 0x89 -> LATIN SMALL LETTER I
'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC)
'\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE
'\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC)
'\xb1' # 0x8F -> PLUS-MINUS SIGN
'\xb0' # 0x90 -> DEGREE SIGN
'j' # 0x91 -> LATIN SMALL LETTER J
'k' # 0x92 -> LATIN SMALL LETTER K
'l' # 0x93 -> LATIN SMALL LETTER L
'm' # 0x94 -> LATIN SMALL LETTER M
'n' # 0x95 -> LATIN SMALL LETTER N
'o' # 0x96 -> LATIN SMALL LETTER O
'p' # 0x97 -> LATIN SMALL LETTER P
'q' # 0x98 -> LATIN SMALL LETTER Q
'r' # 0x99 -> LATIN SMALL LETTER R
'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR
'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR
'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE
'\xb8' # 0x9D -> CEDILLA
'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE
'\u20ac' # 0x9F -> EURO SIGN
'\xb5' # 0xA0 -> MICRO SIGN
'~' # 0xA1 -> TILDE
's' # 0xA2 -> LATIN SMALL LETTER S
't' # 0xA3 -> LATIN SMALL LETTER T
'u' # 0xA4 -> LATIN SMALL LETTER U
'v' # 0xA5 -> LATIN SMALL LETTER V
'w' # 0xA6 -> LATIN SMALL LETTER W
'x' # 0xA7 -> LATIN SMALL LETTER X
'y' # 0xA8 -> LATIN SMALL LETTER Y
'z' # 0xA9 -> LATIN SMALL LETTER Z
'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK
'\xbf' # 0xAB -> INVERTED QUESTION MARK
'\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC)
'\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE
'\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC)
'\xae' # 0xAF -> REGISTERED SIGN
'^' # 0xB0 -> CIRCUMFLEX ACCENT
'\xa3' # 0xB1 -> POUND SIGN
'\xa5' # 0xB2 -> YEN SIGN
'\xb7' # 0xB3 -> MIDDLE DOT
'\xa9' # 0xB4 -> COPYRIGHT SIGN
'\xa7' # 0xB5 -> SECTION SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF
'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS
'[' # 0xBA -> LEFT SQUARE BRACKET
']' # 0xBB -> RIGHT SQUARE BRACKET
'\xaf' # 0xBC -> MACRON
'\xa8' # 0xBD -> DIAERESIS
'\xb4' # 0xBE -> ACUTE ACCENT
'\xd7' # 0xBF -> MULTIPLICATION SIGN
'{' # 0xC0 -> LEFT CURLY BRACKET
'A' # 0xC1 -> LATIN CAPITAL LETTER A
'B' # 0xC2 -> LATIN CAPITAL LETTER B
'C' # 0xC3 -> LATIN CAPITAL LETTER C
'D' # 0xC4 -> LATIN CAPITAL LETTER D
'E' # 0xC5 -> LATIN CAPITAL LETTER E
'F' # 0xC6 -> LATIN CAPITAL LETTER F
'G' # 0xC7 -> LATIN CAPITAL LETTER G
'H' # 0xC8 -> LATIN CAPITAL LETTER H
'I' # 0xC9 -> LATIN CAPITAL LETTER I
'\xad' # 0xCA -> SOFT HYPHEN
'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE
'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE
'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE
'}' # 0xD0 -> RIGHT CURLY BRACKET
'J' # 0xD1 -> LATIN CAPITAL LETTER J
'K' # 0xD2 -> LATIN CAPITAL LETTER K
'L' # 0xD3 -> LATIN CAPITAL LETTER L
'M' # 0xD4 -> LATIN CAPITAL LETTER M
'N' # 0xD5 -> LATIN CAPITAL LETTER N
'O' # 0xD6 -> LATIN CAPITAL LETTER O
'P' # 0xD7 -> LATIN CAPITAL LETTER P
'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
'R' # 0xD9 -> LATIN CAPITAL LETTER R
'\xb9' # 0xDA -> SUPERSCRIPT ONE
'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS
'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE
'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS
'\\' # 0xE0 -> REVERSE SOLIDUS
'\xf7' # 0xE1 -> DIVISION SIGN
'S' # 0xE2 -> LATIN CAPITAL LETTER S
'T' # 0xE3 -> LATIN CAPITAL LETTER T
'U' # 0xE4 -> LATIN CAPITAL LETTER U
'V' # 0xE5 -> LATIN CAPITAL LETTER V
'W' # 0xE6 -> LATIN CAPITAL LETTER W
'X' # 0xE7 -> LATIN CAPITAL LETTER X
'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
'\xb2' # 0xEA -> SUPERSCRIPT TWO
'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE
'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE
'0' # 0xF0 -> DIGIT ZERO
'1' # 0xF1 -> DIGIT ONE
'2' # 0xF2 -> DIGIT TWO
'3' # 0xF3 -> DIGIT THREE
'4' # 0xF4 -> DIGIT FOUR
'5' # 0xF5 -> DIGIT FIVE
'6' # 0xF6 -> DIGIT SIX
'7' # 0xF7 -> DIGIT SEVEN
'8' # 0xF8 -> DIGIT EIGHT
'9' # 0xF9 -> DIGIT NINE
'\xb3' # 0xFA -> SUPERSCRIPT THREE
'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE
'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
geraldoandradee/pytest
|
refs/heads/master
|
doc/ja/example/multipython.py
|
3
|
"""
module containing a parametrized tests testing cross-python
serialization via the pickle module.
"""
import py, pytest
pythonlist = ['python2.4', 'python2.5', 'python2.6', 'python2.7', 'python2.8']
def pytest_generate_tests(metafunc):
# we parametrize all "python1" and "python2" arguments to iterate
# over the python interpreters of our list above - the actual
# setup and lookup of interpreters in the python1/python2 factories
# respectively.
for arg in metafunc.fixturenames:
if arg in ("python1", "python2"):
metafunc.parametrize(arg, pythonlist, indirect=True)
@pytest.mark.parametrize("obj", [42, {}, {1:3},])
def test_basic_objects(python1, python2, obj):
python1.dumps(obj)
python2.load_and_is_true("obj == %s" % obj)
def pytest_funcarg__python1(request):
tmpdir = request.getfuncargvalue("tmpdir")
picklefile = tmpdir.join("data.pickle")
return Python(request.param, picklefile)
def pytest_funcarg__python2(request):
python1 = request.getfuncargvalue("python1")
return Python(request.param, python1.picklefile)
class Python:
def __init__(self, version, picklefile):
self.pythonpath = py.path.local.sysfind(version)
if not self.pythonpath:
py.test.skip("%r not found" %(version,))
self.picklefile = picklefile
def dumps(self, obj):
dumpfile = self.picklefile.dirpath("dump.py")
dumpfile.write(py.code.Source("""
import pickle
f = open(%r, 'wb')
s = pickle.dump(%r, f)
f.close()
""" % (str(self.picklefile), obj)))
py.process.cmdexec("%s %s" %(self.pythonpath, dumpfile))
def load_and_is_true(self, expression):
loadfile = self.picklefile.dirpath("load.py")
loadfile.write(py.code.Source("""
import pickle
f = open(%r, 'rb')
obj = pickle.load(f)
f.close()
res = eval(%r)
if not res:
raise SystemExit(1)
""" % (str(self.picklefile), expression)))
print (loadfile)
py.process.cmdexec("%s %s" %(self.pythonpath, loadfile))
|
ASCrookes/django
|
refs/heads/master
|
django/core/serializers/xml_serializer.py
|
124
|
"""
XML serializer.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from xml.dom import pulldom
from xml.sax import handler
from xml.sax.expatreader import ExpatParser as _ExpatParser
from django.apps import apps
from django.conf import settings
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils.encoding import smart_text
from django.utils.xmlutils import (
SimplerXMLGenerator, UnserializableContentError,
)
class Serializer(base.Serializer):
"""
Serializes a QuerySet to XML.
"""
def indent(self, level):
if self.options.get('indent') is not None:
self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent') * level)
def start_serialization(self):
"""
Start serialization -- open the XML document and the root element.
"""
self.xml = SimplerXMLGenerator(self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET))
self.xml.startDocument()
self.xml.startElement("django-objects", {"version": "1.0"})
def end_serialization(self):
"""
End serialization -- end the document.
"""
self.indent(0)
self.xml.endElement("django-objects")
self.xml.endDocument()
def start_object(self, obj):
"""
Called as each object is handled.
"""
if not hasattr(obj, "_meta"):
raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
self.indent(1)
model = obj._meta.proxy_for_model if obj._deferred else obj.__class__
attrs = OrderedDict([("model", smart_text(model._meta))])
if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'):
obj_pk = obj._get_pk_val()
if obj_pk is not None:
attrs['pk'] = smart_text(obj_pk)
self.xml.startElement("object", attrs)
def end_object(self, obj):
"""
Called after handling all fields for an object.
"""
self.indent(1)
self.xml.endElement("object")
def handle_field(self, obj, field):
"""
Called to handle each field on an object (except for ForeignKeys and
ManyToManyFields)
"""
self.indent(2)
self.xml.startElement("field", OrderedDict([
("name", field.name),
("type", field.get_internal_type()),
]))
# Get a "string version" of the object's data.
if getattr(obj, field.name) is not None:
try:
self.xml.characters(field.value_to_string(obj))
except UnserializableContentError:
raise ValueError("%s.%s (pk:%s) contains unserializable characters" % (
obj.__class__.__name__, field.name, obj._get_pk_val()))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey (we need to treat them slightly
differently from regular fields).
"""
self._start_relational_field(field)
related_att = getattr(obj, field.get_attname())
if related_att is not None:
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
related = getattr(obj, field.name)
# If related object has a natural key, use it
related = related.natural_key()
# Iterable natural keys are rolled out as subelements
for key_value in related:
self.xml.startElement("natural", {})
self.xml.characters(smart_text(key_value))
self.xml.endElement("natural")
else:
self.xml.characters(smart_text(related_att))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_m2m_field(self, obj, field):
"""
Called to handle a ManyToManyField. Related objects are only
serialized as references to the object's PK (i.e. the related *data*
is not dumped, just the relation).
"""
if field.remote_field.through._meta.auto_created:
self._start_relational_field(field)
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
# If the objects in the m2m have a natural key, use it
def handle_m2m(value):
natural = value.natural_key()
# Iterable natural keys are rolled out as subelements
self.xml.startElement("object", {})
for key_value in natural:
self.xml.startElement("natural", {})
self.xml.characters(smart_text(key_value))
self.xml.endElement("natural")
self.xml.endElement("object")
else:
def handle_m2m(value):
self.xml.addQuickElement("object", attrs={
'pk': smart_text(value._get_pk_val())
})
for relobj in getattr(obj, field.name).iterator():
handle_m2m(relobj)
self.xml.endElement("field")
def _start_relational_field(self, field):
"""
Helper to output the <field> element for relational fields
"""
self.indent(2)
self.xml.startElement("field", OrderedDict([
("name", field.name),
("rel", field.remote_field.__class__.__name__),
("to", smart_text(field.remote_field.model._meta)),
]))
class Deserializer(base.Deserializer):
"""
Deserialize XML.
"""
def __init__(self, stream_or_string, **options):
super(Deserializer, self).__init__(stream_or_string, **options)
self.event_stream = pulldom.parse(self.stream, self._make_parser())
self.db = options.pop('using', DEFAULT_DB_ALIAS)
self.ignore = options.pop('ignorenonexistent', False)
def _make_parser(self):
"""Create a hardened XML parser (no custom/external entities)."""
return DefusedExpatParser()
def __next__(self):
for event, node in self.event_stream:
if event == "START_ELEMENT" and node.nodeName == "object":
self.event_stream.expandNode(node)
return self._handle_object(node)
raise StopIteration
def _handle_object(self, node):
"""
Convert an <object> node to a DeserializedObject.
"""
# Look up the model using the model loading mechanism. If this fails,
# bail.
Model = self._get_model_from_node(node, "model")
# Start building a data dictionary from the object.
data = {}
if node.hasAttribute('pk'):
data[Model._meta.pk.attname] = Model._meta.pk.to_python(
node.getAttribute('pk'))
# Also start building a dict of m2m data (this is saved as
# {m2m_accessor_attribute : [list_of_related_objects]})
m2m_data = {}
field_names = {f.name for f in Model._meta.get_fields()}
# Deserialize each field.
for field_node in node.getElementsByTagName("field"):
# If the field is missing the name attribute, bail (are you
# sensing a pattern here?)
field_name = field_node.getAttribute("name")
if not field_name:
raise base.DeserializationError("<field> node is missing the 'name' attribute")
# Get the field from the Model. This will raise a
# FieldDoesNotExist if, well, the field doesn't exist, which will
# be propagated correctly unless ignorenonexistent=True is used.
if self.ignore and field_name not in field_names:
continue
field = Model._meta.get_field(field_name)
# As is usually the case, relation fields get the special treatment.
if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel):
m2m_data[field.name] = self._handle_m2m_field_node(field_node, field)
elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel):
data[field.attname] = self._handle_fk_field_node(field_node, field)
else:
if field_node.getElementsByTagName('None'):
value = None
else:
value = field.to_python(getInnerText(field_node).strip())
data[field.name] = value
obj = base.build_instance(Model, data, self.db)
# Return a DeserializedObject so that the m2m data has a place to live.
return base.DeserializedObject(obj, m2m_data)
def _handle_fk_field_node(self, node, field):
"""
Handle a <field> node for a ForeignKey
"""
# Check if there is a child node named 'None', returning None if so.
if node.getElementsByTagName('None'):
return None
else:
if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'):
keys = node.getElementsByTagName('natural')
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
obj = field.remote_field.model._default_manager.db_manager(self.db).get_by_natural_key(*field_value)
obj_pk = getattr(obj, field.remote_field.field_name)
# If this is a natural foreign key to an object that
# has a FK/O2O as the foreign key, use the FK value
if field.remote_field.model._meta.pk.remote_field:
obj_pk = obj_pk.pk
else:
# Otherwise, treat like a normal PK
field_value = getInnerText(node).strip()
obj_pk = field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value)
return obj_pk
else:
field_value = getInnerText(node).strip()
return field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value)
def _handle_m2m_field_node(self, node, field):
"""
Handle a <field> node for a ManyToManyField.
"""
if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'):
def m2m_convert(n):
keys = n.getElementsByTagName('natural')
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
obj_pk = field.remote_field.model._default_manager.db_manager(self.db).get_by_natural_key(*field_value).pk
else:
# Otherwise, treat like a normal PK value.
obj_pk = field.remote_field.model._meta.pk.to_python(n.getAttribute('pk'))
return obj_pk
else:
m2m_convert = lambda n: field.remote_field.model._meta.pk.to_python(n.getAttribute('pk'))
return [m2m_convert(c) for c in node.getElementsByTagName("object")]
def _get_model_from_node(self, node, attr):
"""
Helper to look up a model from a <object model=...> or a <field
rel=... to=...> node.
"""
model_identifier = node.getAttribute(attr)
if not model_identifier:
raise base.DeserializationError(
"<%s> node is missing the required '%s' attribute"
% (node.nodeName, attr))
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError(
"<%s> node has invalid model identifier: '%s'"
% (node.nodeName, model_identifier))
def getInnerText(node):
"""
Get all the inner text of a DOM node (recursively).
"""
# inspired by http://mail.python.org/pipermail/xml-sig/2005-March/011022.html
inner_text = []
for child in node.childNodes:
if child.nodeType == child.TEXT_NODE or child.nodeType == child.CDATA_SECTION_NODE:
inner_text.append(child.data)
elif child.nodeType == child.ELEMENT_NODE:
inner_text.extend(getInnerText(child))
else:
pass
return "".join(inner_text)
# Below code based on Christian Heimes' defusedxml
class DefusedExpatParser(_ExpatParser):
"""
An expat parser hardened against XML bomb attacks.
Forbids DTDs, external entity references
"""
def __init__(self, *args, **kwargs):
_ExpatParser.__init__(self, *args, **kwargs)
self.setFeature(handler.feature_external_ges, False)
self.setFeature(handler.feature_external_pes, False)
def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
raise DTDForbidden(name, sysid, pubid)
def entity_decl(self, name, is_parameter_entity, value, base,
sysid, pubid, notation_name):
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
# expat 1.2
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
def external_entity_ref_handler(self, context, base, sysid, pubid):
raise ExternalReferenceForbidden(context, base, sysid, pubid)
def reset(self):
_ExpatParser.reset(self)
parser = self._parser
parser.StartDoctypeDeclHandler = self.start_doctype_decl
parser.EntityDeclHandler = self.entity_decl
parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
parser.ExternalEntityRefHandler = self.external_entity_ref_handler
class DefusedXmlException(ValueError):
"""Base exception."""
def __repr__(self):
return str(self)
class DTDForbidden(DefusedXmlException):
"""Document type definition is forbidden."""
def __init__(self, name, sysid, pubid):
super(DTDForbidden, self).__init__()
self.name = name
self.sysid = sysid
self.pubid = pubid
def __str__(self):
tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl.format(self.name, self.sysid, self.pubid)
class EntitiesForbidden(DefusedXmlException):
"""Entity definition is forbidden."""
def __init__(self, name, value, base, sysid, pubid, notation_name):
super(EntitiesForbidden, self).__init__()
self.name = name
self.value = value
self.base = base
self.sysid = sysid
self.pubid = pubid
self.notation_name = notation_name
def __str__(self):
tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl.format(self.name, self.sysid, self.pubid)
class ExternalReferenceForbidden(DefusedXmlException):
"""Resolving an external reference is forbidden."""
def __init__(self, context, base, sysid, pubid):
super(ExternalReferenceForbidden, self).__init__()
self.context = context
self.base = base
self.sysid = sysid
self.pubid = pubid
def __str__(self):
tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
return tpl.format(self.sysid, self.pubid)
|
lcpt/xc
|
refs/heads/master
|
python_modules/postprocess/reports/report_material.py
|
1
|
# -*- coding: utf-8 -*-
__author__= "Ana Ortega (AO_O) "
__copyright__= "Copyright 2016, AO_O"
__license__= "GPL"
__version__= "3.0"
__email__= "ana.ortega@ciccp.es "
import matplotlib.pyplot as plt
from postprocess.reports import graph_material
class ConcreteReport(object):
'''Constructs LaTex tables for depicting porperties of concrete
materials.
:ivar concrDiag: concrete material (of type concrete01 or concrete02)
'''
def __init__(self,concrDiag):
self.concr=concrDiag
def tableCompress(self):
''' returns a LaTeX table that depicts the concrete compressive
properties.
'''
retval='\\multicolumn{2}{c}{Compressive properties} \\\\ \n '
retval+='\\hline \n '
retval+='$f_{c}$ & ' + str(self.concr.fpc*1e-6) + ' MPa \\\\ \n '
retval+='$\\epsilon_{c0}$ & ' + str(self.concr.epsc0*1e3) + ' $\\permil$ \\\\ \n '
retval+='$f_{cu}$ & ' + str(self.concr.fpcu*1e-6) + ' MPa \\\\ \n '
retval+='$\\epsilon_{cu}$ & ' + str(self.concr.epscu*1e3) + ' $\\permil$ \\\\ \n '
retval+='$E_{c0}$ & ' + str(self.concr.getInitialTangent()*1e-9) + ' GPa \\\\ \n '
return retval
def tableTens(self):
''' returns a LaTeX table that depicts the concrete tensile
properties.
'''
retval='\\multicolumn{2}{c}{Tensile properties} \\\\ \n '
retval+='\\hline \n '
retval+='$f_{ct}$ & ' + str(round(self.concr.ft*1e-6,2)) + ' MPa \\\\ \n '
return retval
def tableTensStiff(self,paramTensStiffness):
'''returns a LaTeX table that depicts the concrete tension
stiffening properties (concrete of type concrete02).
:param paramTensStiffness: instance of the object
concrete_base.paramTensStiffness that defines the parameters
of tension stiffness
'''
retval='\\multicolumn{2}{c}{Tension-stiffening} \\\\ \n '
retval+='\\hline \n '
retval+='$\\nu$ & ' + str(paramTensStiffness.nu())+' \\\\ \n '
retval+='$\\rho_{eff}$ & ' + str(paramTensStiffness.ro*1e2) +'\% \\\\ \n '
retval+='\\multicolumn{2}{l}{exponential curve $\\alpha$=' + str(round(paramTensStiffness.alfa(),3)) +'} \\\\ \n '
retval+='\\multicolumn{2}{l}{slope regression line:' + str(round(paramTensStiffness.regresLine()['slope']*1e-9,2)) +' } \\\\ \n '
retval+='\\multicolumn{2}{l}{inters. point (onset carcking):} \\\\ \n '
retval+='\\multicolumn{2}{c}{($\\epsilon$='+str(round(paramTensStiffness.pointOnsetCracking()['eps_ct']*1e2,2)) +'\\%, $\\sigma$='+ str(round(paramTensStiffness.pointOnsetCracking()['ft']*1e-6,2)) + 'MPa)} \\\\ \n '
return retval
def report_concrete02(concrDiag,paramTensStiffening,grTitle,grFileName,texFileName):
'''return a LaTeX table that depicts the concrete compressive,
tensile and tension-stiffening properties as well as its
stress-strain graphic.
:param concrDiag: concrete material of type concrete02
:param paramTensStiffening: instance of the object
concrete_base.paramTensStiffness that defines the parameters
of tension stiffness.
:param grTitle: title for the graphic
:param grFileName: name of the graphics file
:param texFileName: name of the LaTeX file
'''
grph= graph_material.UniaxialMaterialDiagramGraphic(epsMin=concrDiag.epscu,epsMax=paramTensStiffening.eps_y,title=grTitle)
grph.setupGraphic(plt,concrDiag)
grph.savefig(plt,grFileName+'.jpeg')
grph.savefig(plt,grFileName+'.eps')
concrRep= ConcreteReport(concrDiag)
grFileNameNoPath=grFileName.split('/')[grFileName.count('/')]
f=open(texFileName,'w')
f.write('\\documentclass{article} \n')
f.write('\\usepackage{graphicx} \n')
f.write('\\usepackage{multirow} \n')
f.write('\\usepackage{wasysym} \n')
f.write('\\usepackage{gensymb} \n')
f.write('\\begin{document} \n')
f.write('\\begin{table} \n')
f.write('\\begin{center} \n')
f.write('\\begin{tabular}{ll} \n')
f.write('\\begin{minipage}{95mm} \n')
f.write('\\vspace{2mm} \n')
f.write('\\begin{center} \n')
f.write('\\includegraphics[width=90mm]{'+grFileNameNoPath+'} \n')
f.write('\\end{center} \n')
f.write('\\vspace{1pt} \n')
f.write('\\end{minipage} & \n')
f.write('\\begin{tabular}{lr} \n')
f.write(concrRep.tableCompress())
f.write(concrRep.tableTens())
f.write(concrRep.tableTensStiff(paramTensStiffening))
f.write('\\end{tabular} \n')
f.write('\\end{tabular} \n')
f.write('\\end{center} \n')
f.write('\\end{table} \n')
f.write('\\end{document} \n')
f.close()
return
|
indictranstech/biggift-erpnext
|
refs/heads/develop
|
erpnext/selling/doctype/customer/test_customer.py
|
12
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.test_runner import make_test_records
from erpnext.exceptions import CustomerFrozen
test_ignore = ["Price List"]
test_records = frappe.get_test_records('Customer')
class TestCustomer(unittest.TestCase):
def test_party_details(self):
from erpnext.accounts.party import get_party_details
to_check = {
'selling_price_list': None,
'customer_group': '_Test Customer Group',
'contact_designation': None,
'customer_address': '_Test Address-Office',
'contact_department': None,
'contact_email': 'test_contact_customer@example.com',
'contact_mobile': None,
'sales_team': [],
'contact_display': '_Test Contact For _Test Customer',
'contact_person': '_Test Contact For _Test Customer-_Test Customer',
'territory': u'_Test Territory',
'contact_phone': '+91 0000000000',
'customer_name': '_Test Customer'
}
make_test_records("Address")
make_test_records("Contact")
frappe.db.set_value("Contact", "_Test Contact For _Test Customer-_Test Customer",
"is_primary_contact", 1)
details = get_party_details("_Test Customer")
for key, value in to_check.iteritems():
self.assertEquals(value, details.get(key))
def test_rename(self):
for name in ("_Test Customer 1", "_Test Customer 1 Renamed"):
frappe.db.sql("""delete from `tabComment` where comment_doctype=%s and comment_docname=%s""",
("Customer", name))
comment = frappe.new_doc("Comment")
comment.update({
"comment": "Test Comment for Rename",
"comment_doctype": "Customer",
"comment_docname": "_Test Customer 1"
})
comment.insert()
frappe.rename_doc("Customer", "_Test Customer 1", "_Test Customer 1 Renamed")
self.assertTrue(frappe.db.exists("Customer", "_Test Customer 1 Renamed"))
self.assertFalse(frappe.db.exists("Customer", "_Test Customer 1"))
# test that comment gets renamed
self.assertEquals(frappe.db.get_value("Comment",
{"comment_doctype": "Customer", "comment_docname": "_Test Customer 1 Renamed"}), comment.name)
frappe.rename_doc("Customer", "_Test Customer 1 Renamed", "_Test Customer 1")
def test_freezed_customer(self):
make_test_records("Customer")
make_test_records("Item")
frappe.db.set_value("Customer", "_Test Customer", "is_frozen", 1)
from erpnext.selling.doctype.sales_order.test_sales_order import make_sales_order
so = make_sales_order(do_not_save= True)
self.assertRaises(CustomerFrozen, so.save)
frappe.db.set_value("Customer", "_Test Customer", "is_frozen", 0)
so.save()
def test_duplicate_customer(self):
if not frappe.db.get_value("Customer", "_Test Customer 1"):
test_customer_1 = frappe.get_doc({
"customer_group": "_Test Customer Group",
"customer_name": "_Test Customer 1",
"customer_type": "Individual",
"doctype": "Customer",
"territory": "_Test Territory"
}).insert(ignore_permissions=True)
else:
test_customer_1 = frappe.get_doc("Customer", "_Test Customer 1")
duplicate_customer = frappe.get_doc({
"customer_group": "_Test Customer Group",
"customer_name": "_Test Customer 1",
"customer_type": "Individual",
"doctype": "Customer",
"territory": "_Test Territory"
}).insert(ignore_permissions=True)
self.assertEquals("_Test Customer 1", test_customer_1.name)
self.assertEquals("_Test Customer 1 - 1", duplicate_customer.name)
self.assertEquals(test_customer_1.customer_name, duplicate_customer.customer_name)
|
craigderington/studentloan5
|
refs/heads/master
|
studentloan5/Lib/site-packages/django/conf/locale/zh_TW/formats.py
|
634
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
TIME_FORMAT = 'H:i' # 20:45
DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
YEAR_MONTH_FORMAT = 'Y年n月' # 2016年9月
MONTH_DAY_FORMAT = 'm月j日' # 9月5日
SHORT_DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
SHORT_DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
FIRST_DAY_OF_WEEK = 1 # 星期一 (Monday)
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y/%m/%d', # '2016/09/05'
'%Y-%m-%d', # '2016-09-05'
'%Y年%n月%j日', # '2016年9月5日'
)
TIME_INPUT_FORMATS = (
'%H:%M', # '20:45'
'%H:%M:%S', # '20:45:29'
'%H:%M:%S.%f', # '20:45:29.000200'
)
DATETIME_INPUT_FORMATS = (
'%Y/%m/%d %H:%M', # '2016/09/05 20:45'
'%Y-%m-%d %H:%M', # '2016-09-05 20:45'
'%Y年%n月%j日 %H:%M', # '2016年9月5日 14:45'
'%Y/%m/%d %H:%M:%S', # '2016/09/05 20:45:29'
'%Y-%m-%d %H:%M:%S', # '2016-09-05 20:45:29'
'%Y年%n月%j日 %H:%M:%S', # '2016年9月5日 20:45:29'
'%Y/%m/%d %H:%M:%S.%f', # '2016/09/05 20:45:29.000200'
'%Y-%m-%d %H:%M:%S.%f', # '2016-09-05 20:45:29.000200'
'%Y年%n月%j日 %H:%n:%S.%f', # '2016年9月5日 20:45:29.000200'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ''
NUMBER_GROUPING = 4
|
m-kuhn/QGIS
|
refs/heads/master
|
python/plugins/processing/algs/gdal/PointsAlongLines.py
|
5
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
PointsAlongLines.py
---------------------
Date : Janaury 2015
Copyright : (C) 2015 by Giovanni Manghi
Email : giovanni dot manghi at naturalgis dot pt
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Giovanni Manghi'
__date__ = 'January 2015'
__copyright__ = '(C) 2015, Giovanni Manghi'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.core import (QgsProcessingException,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterString,
QgsProcessingParameterNumber,
QgsProcessingParameterVectorDestination,
QgsProcessingParameterDefinition,
QgsProcessing)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import isWindows
class PointsAlongLines(GdalAlgorithm):
INPUT = 'INPUT'
GEOMETRY = 'GEOMETRY'
DISTANCE = 'DISTANCE'
OPTIONS = 'OPTIONS'
OUTPUT = 'OUTPUT'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Input layer'),
[QgsProcessing.TypeVectorLine]))
self.addParameter(QgsProcessingParameterString(self.GEOMETRY,
self.tr('Geometry column name'),
defaultValue='geometry'))
self.addParameter(QgsProcessingParameterNumber(self.DISTANCE,
self.tr('Distance from line start represented as fraction of line length'),
type=QgsProcessingParameterNumber.Double,
minValue=0,
maxValue=1,
defaultValue=0.5))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation options'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(options_param)
self.addParameter(QgsProcessingParameterVectorDestination(self.OUTPUT,
self.tr('Points along lines'),
QgsProcessing.TypeVectorPoint))
def name(self):
return 'pointsalonglines'
def displayName(self):
return self.tr('Points along lines')
def group(self):
return self.tr('Vector geoprocessing')
def groupId(self):
return 'vectorgeoprocessing'
def commandName(self):
return 'ogr2ogr'
def getConsoleCommands(self, parameters, context, feedback, executing=True):
source = self.parameterAsSource(parameters, self.INPUT, context)
if source is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT))
fields = source.fields()
ogrLayer, layerName = self.getOgrCompatibleSource(self.INPUT, parameters, context, feedback, executing)
distance = self.parameterAsDouble(parameters, self.DISTANCE, context)
geometry = self.parameterAsString(parameters, self.GEOMETRY, context)
outFile = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
self.setOutputValue(self.OUTPUT, outFile)
options = self.parameterAsString(parameters, self.OPTIONS, context)
output, outputFormat = GdalUtils.ogrConnectionStringAndFormat(outFile, context)
other_fields = []
for f in fields:
if f.name() == geometry:
continue
other_fields.append(f.name())
if other_fields:
other_fields = ',*'
else:
other_fields = ''
arguments = []
arguments.append(output)
arguments.append(ogrLayer)
arguments.append('-dialect')
arguments.append('sqlite')
arguments.append('-sql')
sql = "SELECT ST_Line_Interpolate_Point({}, {}) AS {}{} FROM '{}'".format(geometry, distance, geometry, other_fields, layerName)
arguments.append(sql)
if options:
arguments.append(options)
if outputFormat:
arguments.append('-f {}'.format(outputFormat))
return ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)]
|
tuxfux-hlp-notes/python-batches
|
refs/heads/master
|
archieves/batch-56/modules/sheets/lib/python2.7/_weakrefset.py
|
4
|
/usr/lib/python2.7/_weakrefset.py
|
Anonymouslemming/ansible
|
refs/heads/devel
|
test/units/plugins/test_plugins.py
|
37
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.compat.tests import BUILTINS, unittest
from ansible.compat.tests.mock import mock_open, patch, MagicMock
from ansible.plugins import MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, PluginLoader
class TestErrors(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch.object(PluginLoader, '_get_paths')
def test_print_paths(self, mock_method):
mock_method.return_value = ['/path/one', '/path/two', '/path/three']
pl = PluginLoader('foo', 'foo', '', 'test_plugins')
paths = pl.print_paths()
expected_paths = os.pathsep.join(['/path/one', '/path/two', '/path/three'])
self.assertEqual(paths, expected_paths)
def test_plugins__get_package_paths_no_package(self):
pl = PluginLoader('test', '', 'test', 'test_plugin')
self.assertEqual(pl._get_package_paths(), [])
def test_plugins__get_package_paths_with_package(self):
# the _get_package_paths() call uses __import__ to load a
# python library, and then uses the __file__ attribute of
# the result for that to get the library path, so we mock
# that here and patch the builtin to use our mocked result
foo = MagicMock()
bar = MagicMock()
bam = MagicMock()
bam.__file__ = '/path/to/my/foo/bar/bam/__init__.py'
bar.bam = bam
foo.return_value.bar = bar
pl = PluginLoader('test', 'foo.bar.bam', 'test', 'test_plugin')
with patch('{0}.__import__'.format(BUILTINS), foo):
self.assertEqual(pl._get_package_paths(), ['/path/to/my/foo/bar/bam'])
def test_plugins__get_paths(self):
pl = PluginLoader('test', '', 'test', 'test_plugin')
pl._paths = ['/path/one', '/path/two']
self.assertEqual(pl._get_paths(), ['/path/one', '/path/two'])
# NOT YET WORKING
# def fake_glob(path):
# if path == 'test/*':
# return ['test/foo', 'test/bar', 'test/bam']
# elif path == 'test/*/*'
# m._paths = None
# mock_glob = MagicMock()
# mock_glob.return_value = []
# with patch('glob.glob', mock_glob):
# pass
def assertPluginLoaderConfigBecomes(self, arg, expected):
pl = PluginLoader('test', '', arg, 'test_plugin')
self.assertEqual(pl.config, expected)
def test_plugin__init_config_list(self):
config = ['/one', '/two']
self.assertPluginLoaderConfigBecomes(config, config)
def test_plugin__init_config_str(self):
self.assertPluginLoaderConfigBecomes('test', ['test'])
def test_plugin__init_config_none(self):
self.assertPluginLoaderConfigBecomes(None, [])
|
sanguinariojoe/FreeCAD
|
refs/heads/master
|
src/Mod/Path/PathScripts/PathVcarve.py
|
6
|
# -*- coding: utf-8 -*-
# ***************************************************************************
# * *
# * Copyright (c) 2020 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import FreeCAD
import Part
import Path
import PathScripts.PathEngraveBase as PathEngraveBase
import PathScripts.PathLog as PathLog
import PathScripts.PathOp as PathOp
import PathScripts.PathUtils as PathUtils
import PathScripts.PathGeom as PathGeom
import PathScripts.PathPreferences as PathPreferences
import traceback
import math
from PySide import QtCore
__doc__ = "Class and implementation of Path Vcarve operation"
PRIMARY = 0
SECONDARY = 1
EXTERIOR1 = 2
EXTERIOR2 = 3
COLINEAR = 4
TWIN = 5
PathLog.setLevel(PathLog.Level.INFO, PathLog.thisModule())
# PathLog.trackModule(PathLog.thisModule())
# Qt translation handling
def translate(context, text, disambig=None):
return QtCore.QCoreApplication.translate(context, text, disambig)
VD = []
Vertex = {}
_sorting = 'global'
def _collectVoronoiWires(vd):
edges = [e for e in vd.Edges if e.Color == PRIMARY]
vertex = {}
for e in edges:
for v in e.Vertices:
i = v.Index
j = vertex.get(i, [])
j.append(e)
vertex[i] = j
Vertex.clear()
for v in vertex:
Vertex[v] = vertex[v]
# knots are the start and end points of a wire
knots = [i for i in vertex if len(vertex[i]) == 1]
knots.extend([i for i in vertex if len(vertex[i]) > 2])
if len(knots) == 0:
for i in vertex:
if len(vertex[i]) > 0:
knots.append(i)
break
def consume(v, edge):
vertex[v] = [e for e in vertex[v] if e.Index != edge.Index]
return len(vertex[v]) == 0
def traverse(vStart, edge, edges):
if vStart == edge.Vertices[0].Index:
vEnd = edge.Vertices[1].Index
edges.append(edge)
else:
vEnd = edge.Vertices[0].Index
edges.append(edge.Twin)
consume(vStart, edge)
if consume(vEnd, edge):
return None
return vEnd
wires = []
while knots:
we = []
vFirst = knots[0]
vStart = vFirst
vLast = vFirst
if len(vertex[vStart]):
while vStart is not None:
vLast = vStart
edges = vertex[vStart]
if len(edges) > 0:
edge = edges[0]
vStart = traverse(vStart, edge, we)
else:
vStart = None
wires.append(we)
if len(vertex[vFirst]) == 0:
knots = [v for v in knots if v != vFirst]
if len(vertex[vLast]) == 0:
knots = [v for v in knots if v != vLast]
return wires
def _sortVoronoiWires(wires, start=FreeCAD.Vector(0, 0, 0)):
def closestTo(start, point):
p = None
l = None
for i in point:
if l is None or l > start.distanceToPoint(point[i]):
l = start.distanceToPoint(point[i])
p = i
return (p, l)
begin = {}
end = {}
for i, w in enumerate(wires):
begin[i] = w[0].Vertices[0].toPoint()
end[i] = w[-1].Vertices[1].toPoint()
result = []
while begin:
(bIdx, bLen) = closestTo(start, begin)
(eIdx, eLen) = closestTo(start, end)
if bLen < eLen:
result.append(wires[bIdx])
start = end[bIdx]
del begin[bIdx]
del end[bIdx]
else:
result.append([e.Twin for e in reversed(wires[eIdx])])
start = begin[eIdx]
del begin[eIdx]
del end[eIdx]
return result
class _Geometry(object):
'''POD class so the limits only have to be calculated once.'''
def __init__(self, zStart, zStop, zScale):
self.start = zStart
self.stop = zStop
self.scale = zScale
@classmethod
def FromTool(cls, tool, zStart, zFinal):
rMax = float(tool.Diameter) / 2.0
rMin = float(tool.TipDiameter) / 2.0
toolangle = math.tan(math.radians(tool.CuttingEdgeAngle.Value / 2.0))
zScale = 1.0 / toolangle
zStop = zStart - rMax * zScale
zOff = rMin * zScale
return _Geometry(zStart + zOff, max(zStop + zOff, zFinal), zScale)
@classmethod
def FromObj(cls, obj, model):
zStart = model.Shape.BoundBox.ZMax
finalDepth = obj.FinalDepth.Value
return cls.FromTool(obj.ToolController.Tool, zStart, finalDepth)
def _calculate_depth(MIC, geom):
# given a maximum inscribed circle (MIC) and tool angle,
# return depth of cut relative to zStart.
depth = geom.start - round(MIC / geom.scale, 4)
PathLog.debug('zStart value: {} depth: {}'.format(geom.start, depth))
return max(depth, geom.stop)
def _getPartEdge(edge, depths):
dist = edge.getDistances()
zBegin = _calculate_depth(dist[0], depths)
zEnd = _calculate_depth(dist[1], depths)
return edge.toShape(zBegin, zEnd)
class ObjectVcarve(PathEngraveBase.ObjectOp):
'''Proxy class for Vcarve operation.'''
def opFeatures(self, obj):
'''opFeatures(obj) ... return all standard features and edges based geomtries'''
return PathOp.FeatureTool | PathOp.FeatureHeights | PathOp.FeatureDepths | PathOp.FeatureBaseFaces | PathOp.FeatureCoolant
def setupAdditionalProperties(self, obj):
if not hasattr(obj, 'BaseShapes'):
obj.addProperty("App::PropertyLinkList", "BaseShapes", "Path",
QtCore.QT_TRANSLATE_NOOP("PathVcarve",
"Additional base objects to be engraved"))
obj.setEditorMode('BaseShapes', 2) # hide
def initOperation(self, obj):
'''initOperation(obj) ... create vcarve specific properties.'''
obj.addProperty("App::PropertyFloat", "Discretize", "Path",
QtCore.QT_TRANSLATE_NOOP("PathVcarve",
"The deflection value for discretizing arcs"))
obj.addProperty("App::PropertyFloat", "Colinear", "Path",
QtCore.QT_TRANSLATE_NOOP("PathVcarve",
"Cutoff for removing colinear segments (degrees). \
default=10.0."))
obj.addProperty("App::PropertyFloat", "Tolerance", "Path",
QtCore.QT_TRANSLATE_NOOP("PathVcarve", ""))
obj.Colinear = 10.0
obj.Discretize = 0.01
obj.Tolerance = PathPreferences.defaultGeometryTolerance()
self.setupAdditionalProperties(obj)
def opOnDocumentRestored(self, obj):
# upgrade ...
self.setupAdditionalProperties(obj)
def _getPartEdges(self, obj, vWire, geom):
edges = []
for e in vWire:
edges.append(_getPartEdge(e, geom))
return edges
def buildPathMedial(self, obj, faces):
'''constructs a medial axis path using openvoronoi'''
def insert_many_wires(vd, wires):
for wire in wires:
PathLog.debug('discretize value: {}'.format(obj.Discretize))
pts = wire.discretize(QuasiDeflection=obj.Discretize)
ptv = [FreeCAD.Vector(p.x, p.y) for p in pts]
ptv.append(ptv[0])
for i in range(len(pts)):
vd.addSegment(ptv[i], ptv[i+1])
def cutWire(edges):
path = []
path.append(Path.Command("G0 Z{}".format(obj.SafeHeight.Value)))
e = edges[0]
p = e.valueAt(e.FirstParameter)
path.append(Path.Command("G0 X{} Y{} Z{}".format(p.x, p.y, obj.SafeHeight.Value)))
hSpeed = obj.ToolController.HorizFeed.Value
vSpeed = obj.ToolController.VertFeed.Value
path.append(Path.Command("G1 X{} Y{} Z{} F{}".format(p.x, p.y, p.z, vSpeed)))
for e in edges:
path.extend(PathGeom.cmdsForEdge(e, hSpeed=hSpeed, vSpeed=vSpeed))
return path
VD.clear()
voronoiWires = []
for f in faces:
vd = Path.Voronoi()
insert_many_wires(vd, f.Wires)
vd.construct()
for e in vd.Edges:
e.Color = PRIMARY if e.isPrimary() else SECONDARY
vd.colorExterior(EXTERIOR1)
vd.colorExterior(EXTERIOR2,
lambda v: not f.isInside(v.toPoint(f.BoundBox.ZMin),
obj.Tolerance, True))
vd.colorColinear(COLINEAR, obj.Colinear)
vd.colorTwins(TWIN)
wires = _collectVoronoiWires(vd)
if _sorting != 'global':
wires = _sortVoronoiWires(wires)
voronoiWires.extend(wires)
VD.append((f, vd, wires))
if _sorting == 'global':
voronoiWires = _sortVoronoiWires(voronoiWires)
geom = _Geometry.FromObj(obj, self.model[0])
pathlist = []
pathlist.append(Path.Command("(starting)"))
for w in voronoiWires:
pWire = self._getPartEdges(obj, w, geom)
if pWire:
wires.append(pWire)
pathlist.extend(cutWire(pWire))
self.commandlist = pathlist
def opExecute(self, obj):
'''opExecute(obj) ... process engraving operation'''
PathLog.track()
if not hasattr(obj.ToolController.Tool, "CuttingEdgeAngle"):
PathLog.error(translate("Path_Vcarve", "VCarve requires an engraving cutter with CuttingEdgeAngle"))
if obj.ToolController.Tool.CuttingEdgeAngle >= 180.0:
PathLog.error(translate("Path_Vcarve", "Engraver Cutting Edge Angle must be < 180 degrees."))
return
try:
faces = []
for base in obj.BaseShapes:
faces.extend(base.Shape.Faces)
for base in obj.Base:
for sub in base[1]:
shape = getattr(base[0].Shape, sub)
if isinstance(shape, Part.Face):
faces.append(shape)
if not faces:
for model in self.model:
if model.isDerivedFrom('Sketcher::SketchObject') or model.isDerivedFrom('Part::Part2DObject'):
faces.extend(model.Shape.Faces)
if faces:
self.buildPathMedial(obj, faces)
else:
PathLog.error(translate('PathVcarve', 'The Job Base Object has no engraveable element. Engraving operation will produce no output.'))
except Exception as e:
#PathLog.error(e)
#traceback.print_exc()
PathLog.error(translate('PathVcarve', 'Error processing Base object. Engraving operation will produce no output.'))
#raise e
def opUpdateDepths(self, obj, ignoreErrors=False):
'''updateDepths(obj) ... engraving is always done at the top most z-value'''
job = PathUtils.findParentJob(obj)
self.opSetDefaultValues(obj, job)
def opSetDefaultValues(self, obj, job):
'''opSetDefaultValues(obj) ... set depths for vcarving'''
if PathOp.FeatureDepths & self.opFeatures(obj):
if job and len(job.Model.Group) > 0:
bb = job.Proxy.modelBoundBox(job)
obj.OpStartDepth = bb.ZMax
obj.OpFinalDepth = job.Stock.Shape.BoundBox.ZMin
else:
obj.OpFinalDepth = -0.1
def isToolSupported(self, obj, tool):
'''isToolSupported(obj, tool) ... returns True if v-carve op can work with tool.'''
return hasattr(tool, 'Diameter') and hasattr(tool, 'CuttingEdgeAngle') and hasattr(tool, 'TipDiameter')
def SetupProperties():
return ["Discretize"]
def Create(name, obj=None):
'''Create(name) ... Creates and returns a Vcarve operation.'''
if obj is None:
obj = FreeCAD.ActiveDocument.addObject("Path::FeaturePython", name)
ObjectVcarve(obj, name)
return obj
|
jacquerie/inspire-next
|
refs/heads/master
|
inspirehep/modules/search/api.py
|
2
|
# -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014-2017 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
from __future__ import absolute_import, division, print_function
import logging
from flask import request
from flask_security import current_user
from elasticsearch import RequestError
from elasticsearch_dsl.query import Q
from invenio_search.api import DefaultFilter, RecordsSearch
from invenio_search import current_search_client as es
from inspirehep.modules.records.permissions import (
all_restricted_collections,
user_collections
)
from .query_factory import inspire_query_factory
logger = logging.getLogger(__name__)
IQ = inspire_query_factory()
class SearchMixin(object):
"""Mixin that adds helper functions to ElasticSearch DSL classes."""
def query_from_iq(self, query_string):
"""Initialize ES DSL object using INSPIRE query parser.
:param query_string: Query string as a user would input in INSPIRE's search box.
:type query_string: string
:returns: Elasticsearch DSL search class
"""
return self.query(IQ(query_string, self))
def get_source(self, uuid, **kwargs):
"""Get source from a given uuid.
This function mimics the behaviour from the low level ES library
get_source function.
:param uuid: uuid of document to be retrieved.
:type uuid: UUID
:returns: dict
"""
return es.get_source(
index=self.Meta.index,
doc_type=self.Meta.doc_types,
id=uuid,
**kwargs
)
def mget(self, uuids, **kwargs):
"""Get source from a list of uuids.
:param uuids: uuids of documents to be retrieved.
:type uuids: list of strings representing uuids
:returns: list of JSON documents
"""
results = []
try:
documents = es.mget(
index=self.Meta.index,
doc_type=self.Meta.doc_types,
body={'ids': uuids},
**kwargs
)
results = [document['_source'] for document in documents['docs']]
except RequestError:
pass
return results
def inspire_filter():
"""Filter applied to all queries."""
if request:
collection = request.values.get('cc', 'Literature')
user_roles = [r.name for r in current_user.roles]
if 'superuser' in user_roles:
user_coll = all_restricted_collections
else:
user_coll = user_collections
query = Q('match', _collections=collection)
for collection in list(all_restricted_collections - user_coll):
query = query & ~Q('match', _collections=collection)
return query
class LiteratureSearch(RecordsSearch, SearchMixin):
"""Elasticsearch-dsl specialized class to search in Literature database."""
class Meta:
index = 'records-hep'
doc_types = 'hep'
default_filter = DefaultFilter(inspire_filter)
def default_fields(self):
"""What fields to use when no keyword is specified."""
return ['_all']
class AuthorsSearch(RecordsSearch, SearchMixin):
"""Elasticsearch-dsl specialized class to search in Authors database."""
class Meta:
index = 'records-authors'
doc_types = 'authors'
def default_fields(self):
"""What fields to use when no keyword is specified."""
return ['_all']
class DataSearch(RecordsSearch, SearchMixin):
"""Elasticsearch-dsl specialized class to search in Data database."""
class Meta:
index = 'records-data'
doc_types = 'data'
def default_fields(self):
"""What fields to use when no keyword is specified."""
return ['_all']
class ConferencesSearch(RecordsSearch, SearchMixin):
"""Elasticsearch-dsl specialized class to search in Conferences database."""
class Meta:
index = 'records-conferences'
doc_types = 'conferences'
def default_fields(self):
"""What fields to use when no keyword is specified."""
return ['_all']
class JobsSearch(RecordsSearch, SearchMixin):
"""Elasticsearch-dsl specialized class to search in Jobs database."""
class Meta:
index = 'records-jobs'
doc_types = 'jobs'
def default_fields(self):
"""What fields to use when no keyword is specified."""
return ['_all']
class InstitutionsSearch(RecordsSearch, SearchMixin):
"""Elasticsearch-dsl specialized class to search in Institutions database."""
class Meta:
index = 'records-institutions'
doc_types = 'institutions'
def default_fields(self):
"""What fields to use when no keyword is specified."""
return ['_all']
class ExperimentsSearch(RecordsSearch, SearchMixin):
"""Elasticsearch-dsl specialized class to search in Experiments database."""
class Meta:
index = 'records-experiments'
doc_types = 'experiments'
def default_fields(self):
"""What fields to use when no keyword is specified."""
return ['_all']
class JournalsSearch(RecordsSearch, SearchMixin):
"""Elasticsearch-dsl specialized class to search in Journals database."""
class Meta:
index = 'records-journals'
doc_types = 'journals'
def default_fields(self):
"""What fields to use when no keyword is specified."""
return ['_all']
|
endorphinl/horizon
|
refs/heads/master
|
openstack_dashboard/dashboards/project/networks/forms.py
|
41
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class UpdateNetwork(forms.SelfHandlingForm):
name = forms.CharField(label=_("Name"), required=False)
tenant_id = forms.CharField(widget=forms.HiddenInput)
network_id = forms.CharField(label=_("ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
admin_state = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
required=False,
label=_("Admin State"))
failure_url = 'horizon:project:networks:index'
def handle(self, request, data):
try:
params = {'admin_state_up': (data['admin_state'] == 'True'),
'name': data['name']}
network = api.neutron.network_update(request,
data['network_id'],
**params)
msg = _('Network %s was successfully updated.') % data['name']
LOG.debug(msg)
messages.success(request, msg)
return network
except Exception:
msg = _('Failed to update network %s') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
|
brownnrl/moneyguru
|
refs/heads/master
|
core/gui/graph.py
|
2
|
# Copyright 2019 Virgil Dupras
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from datetime import date
from math import ceil, floor, log10
from .geometry import Point, Rect
from ..model._ccore import inc_date
from ..model.date import RepeatType
from .chart import Chart
# A graph is a chart or drawing that shows the relationship between changing things.
# For the code, a Graph is a Chart with x and y axis.
class FontID:
Title = 1
AxisLabel = 2
class PenID:
Axis = 1
AxisOverlay = 2
class GraphContext:
def __init__(self, xfactor, yfactor, xoffset, yoffset):
self.xfactor = xfactor
self.yfactor = yfactor
self.xoffset = xoffset
self.yoffset = yoffset
def trpoint(self, p):
x, y = p
x += self.xoffset
y += self.yoffset
return Point(x, y)
def trpoints(self, pts):
return [self.trpoint(p) for p in pts]
def trrect(self, r):
x, y, w, h = r
x += self.xoffset
y += self.yoffset
return Rect(x, y, w, h)
class Graph(Chart):
PADDING = 16
TITLE_PADDING = 4
TICKMARKS_LENGTH = 5
XLABELS_PADDING = 3
YLABELS_PADDING = 8
YAXIS_EXTRA_SPACE_ON_NEGATIVE = 3
# --- Private
def _offset_xpos(self, xpos):
return xpos - self._xoffset
# --- Public
def compute_x_axis(self, min_date=None, max_date=None):
# By default, xmin and xmax are determined by date range's start and end, but you can
# override that by specifying min_date and max_date.
date_range = self.document.date_range
if min_date is None:
min_date = date_range.start
if max_date is None:
max_date = date_range.end
# Our X data is based on ordinal date values, which can be quite big. On Qt, we get some
# weird overflow problem when translating our painter by this large offset. Therefore, it's
# better to offset this X value in the model.
self._xoffset = min_date.toordinal()
self.xmin = self._offset_xpos(min_date.toordinal())
self.xmax = self._offset_xpos(max_date.toordinal() + 1)
tick = date_range.start
self.xtickmarks = [self._offset_xpos(tick.toordinal())]
self.xlabels = []
days = date_range.days
if days > 366:
tick_format = '%Y'
inc_type = RepeatType.Yearly
tick = date(tick.year, 1, 1)
else:
inc_type = RepeatType.Monthly
tick = date(tick.year, tick.month, 1)
tick_format = '%b' if days > 150 else '%B'
while tick < date_range.end:
newtick = inc_date(tick, inc_type, 1)
# 'tick' might be lower than xmin. ensure that it's not (for label pos)
tick = tick if tick > date_range.start else date_range.start
tick_pos = self._offset_xpos(tick.toordinal()) + (newtick - tick).days / 2
self.xlabels.append(dict(text=tick.strftime(tick_format), pos=tick_pos))
tick = newtick
self.xtickmarks.append(self._offset_xpos(tick.toordinal()))
def compute_y_axis(self):
ymin, ymax = self.yrange()
if ymin >= ymax: # max must always be higher than min
ymax = ymin + 1
ydelta = float(ymax - ymin)
# our minimum yfactor is 100 or else the graphs are too squeezed with low datapoints
yfactor = max(100, 10 ** int(log10(ydelta)))
# We add/remove 0.05 so that datapoints being exactly on yfactors get some wiggle room.
def adjust(amount, by):
if amount == 0:
return 0
result = amount + by
return result if (amount > 0) == (result > 0) else 0
ymin = int(yfactor * floor(adjust(ymin/yfactor, -0.05)))
ymax = int(yfactor * ceil(adjust(ymax/yfactor, 0.05)))
ydelta = ymax - ymin
ydelta_msd = ydelta // yfactor
if ydelta_msd == 1:
ystep = yfactor // 5
elif ydelta_msd < 5:
ystep = yfactor // 2
else:
ystep = yfactor
self.ymin = ymin
self.ymax = ymax
self.ytickmarks = list(range(ymin, ymax + 1, ystep))
self.ylabels = [dict(text=str(x), pos=x) for x in self.ytickmarks]
def compute(self):
# The order in which we compute axis and data is important. We start with the xaxis because
# this is what will give us our xoffset, which is then used in compute_data() to offset
# our data points. Then, we compute data before the yaxis because we need the data to know
# how big our yaxis is.
self.compute_x_axis()
self.compute_data()
self.compute_y_axis()
def draw_graph(self, context):
pass
def draw_graph_after_axis(self, context):
pass
def draw_chart(self):
if not hasattr(self, 'xmax'): # we haven't computed yet
return
view_rect = Rect(0, 0, *self.view_size)
data_width = self.xmax - self.xmin
data_height = self.ymax - self.ymin
y_labels_width = max(self.view.text_size(label['text'], FontID.AxisLabel)[0] for label in self.ylabels)
labels_height = self.view.text_size('', FontID.AxisLabel)[1]
title = "{} ({})".format(self.title, self.currency)
title_width, title_height = self.view.text_size(title, FontID.Title)
titley = view_rect.h - self.TITLE_PADDING - title_height
graphx = y_labels_width + self.PADDING
graphy = labels_height + self.PADDING
graph_width = view_rect.w - graphx - self.PADDING
graph_height = view_rect.h - graphy - title_height - self.TITLE_PADDING
graph_rect = Rect(graphx, graphy, graph_width, graph_height)
xfactor = graph_width / data_width
yfactor = graph_height / data_height
graph_left = round(self.xmin * xfactor)
graph_bottom = round(self.ymin * yfactor)
if graph_bottom < 0:
# We have a graph with negative values and we need some extra space to draw the lowest values
graph_bottom -= self.YAXIS_EXTRA_SPACE_ON_NEGATIVE
graph_top = round(self.ymax * yfactor)
xoffset = graph_rect.left
yoffset = -(graph_bottom - graph_rect.y)
context = GraphContext(xfactor, yfactor, xoffset, yoffset)
self.draw_graph(context)
# X/Y axis
p1 = context.trpoint(Point(0, graph_bottom))
p2 = context.trpoint(Point(graph_width, graph_bottom))
p3 = context.trpoint(Point(0, graph_top))
self.view.draw_line(p1, p2, PenID.Axis)
self.view.draw_line(p1, p3, PenID.Axis)
if graph_bottom < 0:
p1 = context.trpoint(Point(0, 0))
p2 = context.trpoint(Point(graph_width, 0))
self.view.draw_line(p1, p2, PenID.Axis)
# X tickmarks
tickBottomY = graph_bottom - self.TICKMARKS_LENGTH
for tickPos in self.xtickmarks:
tickX = tickPos * xfactor
p1 = context.trpoint(Point(tickX, graph_bottom))
p2 = context.trpoint(Point(tickX, tickBottomY))
self.view.draw_line(p1, p2, PenID.Axis)
# Y tickmarks
tickLeftX = graph_left - self.TICKMARKS_LENGTH
for tickPos in self.ytickmarks:
tickY = tickPos * yfactor
p1 = context.trpoint(Point(graph_left, tickY))
p2 = context.trpoint(Point(tickLeftX, tickY))
self.view.draw_line(p1, p2, PenID.Axis)
# X Labels
labelY = graph_bottom - labels_height - self.XLABELS_PADDING
for label in self.xlabels:
labelText = label['text']
labelWidth = self.view.text_size(labelText, FontID.AxisLabel)[0]
labelX = (label['pos'] * xfactor) - (labelWidth / 2)
text_rect = context.trrect(Rect(labelX, labelY, labelWidth, labels_height))
self.view.draw_text(labelText, text_rect, FontID.AxisLabel)
# Y Labels
for label in self.ylabels:
labelText = label['text']
labelWidth = self.view.text_size(labelText, FontID.AxisLabel)[0]
labelX = graph_left - self.YLABELS_PADDING - labelWidth
labelY = (label['pos'] * yfactor) - (labels_height / 2)
text_rect = context.trrect(Rect(labelX, labelY, labelWidth, labels_height))
self.view.draw_text(labelText, text_rect, FontID.AxisLabel)
# Title
self.view.draw_text(title, Rect(0, titley, view_rect.w, title_height), FontID.Title)
self.draw_graph_after_axis(context)
def draw_axis_overlay_x(self, context):
graph_bottom = round(self.ymin * context.yfactor)
if graph_bottom < 0:
graph_bottom -= self.YAXIS_EXTRA_SPACE_ON_NEGATIVE
graph_top = round(self.ymax * context.yfactor)
for tickPos in self.xtickmarks[:-1]:
tickX = tickPos * context.xfactor
p1 = context.trpoint(Point(tickX, graph_bottom))
p2 = context.trpoint(Point(tickX, graph_top))
self.view.draw_line(p1, p2, PenID.AxisOverlay)
def draw_axis_overlay_y(self, context):
graph_left = round(self.xmin * context.xfactor)
graph_right = round(self.xmax * context.xfactor)
for tickPos in self.ytickmarks[:-1]:
tickY = tickPos * context.yfactor
p1 = context.trpoint(Point(graph_left, tickY))
p2 = context.trpoint(Point(graph_right, tickY))
self.view.draw_line(p1, p2, PenID.AxisOverlay)
|
nolouch/shadowsocks
|
refs/heads/master
|
setup.py
|
2
|
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name="shadowsocks",
version="2.6.5",
license='MIT',
description="A fast tunnel proxy that help you get through firewalls",
author='clowwindy',
author_email='clowwindy42@gmail.com',
url='https://github.com/shadowsocks/shadowsocks',
packages=['shadowsocks', 'shadowsocks.crypto'],
package_data={
'shadowsocks': ['README.rst', 'LICENSE']
},
install_requires=[],
entry_points="""
[console_scripts]
sslocal = shadowsocks.local:main
ssserver = shadowsocks.server:main
""",
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: Proxy Servers',
],
long_description=long_description,
)
|
khkaminska/scikit-learn
|
refs/heads/master
|
sklearn/utils/testing.py
|
71
|
"""Testing utilities."""
# Copyright (c) 2011, 2012
# Authors: Pietro Berkes,
# Andreas Muller
# Mathieu Blondel
# Olivier Grisel
# Arnaud Joly
# Denis Engemann
# License: BSD 3 clause
import os
import inspect
import pkgutil
import warnings
import sys
import re
import platform
import scipy as sp
import scipy.io
from functools import wraps
try:
# Python 2
from urllib2 import urlopen
from urllib2 import HTTPError
except ImportError:
# Python 3+
from urllib.request import urlopen
from urllib.error import HTTPError
import tempfile
import shutil
import os.path as op
import atexit
# WindowsError only exist on Windows
try:
WindowsError
except NameError:
WindowsError = None
import sklearn
from sklearn.base import BaseEstimator
from sklearn.externals import joblib
# Conveniently import all assertions in one place.
from nose.tools import assert_equal
from nose.tools import assert_not_equal
from nose.tools import assert_true
from nose.tools import assert_false
from nose.tools import assert_raises
from nose.tools import raises
from nose import SkipTest
from nose import with_setup
from numpy.testing import assert_almost_equal
from numpy.testing import assert_array_equal
from numpy.testing import assert_array_almost_equal
from numpy.testing import assert_array_less
import numpy as np
from sklearn.base import (ClassifierMixin, RegressorMixin, TransformerMixin,
ClusterMixin)
__all__ = ["assert_equal", "assert_not_equal", "assert_raises",
"assert_raises_regexp", "raises", "with_setup", "assert_true",
"assert_false", "assert_almost_equal", "assert_array_equal",
"assert_array_almost_equal", "assert_array_less",
"assert_less", "assert_less_equal",
"assert_greater", "assert_greater_equal"]
try:
from nose.tools import assert_in, assert_not_in
except ImportError:
# Nose < 1.0.0
def assert_in(x, container):
assert_true(x in container, msg="%r in %r" % (x, container))
def assert_not_in(x, container):
assert_false(x in container, msg="%r in %r" % (x, container))
try:
from nose.tools import assert_raises_regex
except ImportError:
# for Python 2
def assert_raises_regex(expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Helper function to check for message patterns in exceptions"""
not_raised = False
try:
callable_obj(*args, **kwargs)
not_raised = True
except expected_exception as e:
error_message = str(e)
if not re.compile(expected_regexp).search(error_message):
raise AssertionError("Error message should match pattern "
"%r. %r does not." %
(expected_regexp, error_message))
if not_raised:
raise AssertionError("%s not raised by %s" %
(expected_exception.__name__,
callable_obj.__name__))
# assert_raises_regexp is deprecated in Python 3.4 in favor of
# assert_raises_regex but lets keep the bacward compat in scikit-learn with
# the old name for now
assert_raises_regexp = assert_raises_regex
def _assert_less(a, b, msg=None):
message = "%r is not lower than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a < b, message
def _assert_greater(a, b, msg=None):
message = "%r is not greater than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a > b, message
def assert_less_equal(a, b, msg=None):
message = "%r is not lower than or equal to %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a <= b, message
def assert_greater_equal(a, b, msg=None):
message = "%r is not greater than or equal to %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a >= b, message
def assert_warns(warning_class, func, *args, **kw):
"""Test that a certain warning occurs.
Parameters
----------
warning_class : the warning class
The class to test for, e.g. UserWarning.
func : callable
Calable object to trigger warnings.
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`
Returns
-------
result : the return value of `func`
"""
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
result = func(*args, **kw)
if hasattr(np, 'VisibleDeprecationWarning'):
# Filter out numpy-specific warnings in numpy >= 1.9
w = [e for e in w
if e.category is not np.VisibleDeprecationWarning]
# Verify some things
if not len(w) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
found = any(warning.category is warning_class for warning in w)
if not found:
raise AssertionError("%s did not give warning: %s( is %s)"
% (func.__name__, warning_class, w))
return result
def assert_warns_message(warning_class, message, func, *args, **kw):
# very important to avoid uncontrolled state propagation
"""Test that a certain warning occurs and with a certain message.
Parameters
----------
warning_class : the warning class
The class to test for, e.g. UserWarning.
message : str | callable
The entire message or a substring to test for. If callable,
it takes a string as argument and will trigger an assertion error
if it returns `False`.
func : callable
Calable object to trigger warnings.
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`.
Returns
-------
result : the return value of `func`
"""
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
if hasattr(np, 'VisibleDeprecationWarning'):
# Let's not catch the numpy internal DeprecationWarnings
warnings.simplefilter('ignore', np.VisibleDeprecationWarning)
# Trigger a warning.
result = func(*args, **kw)
# Verify some things
if not len(w) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
found = [issubclass(warning.category, warning_class) for warning in w]
if not any(found):
raise AssertionError("No warning raised for %s with class "
"%s"
% (func.__name__, warning_class))
message_found = False
# Checks the message of all warnings belong to warning_class
for index in [i for i, x in enumerate(found) if x]:
# substring will match, the entire message with typo won't
msg = w[index].message # For Python 3 compatibility
msg = str(msg.args[0] if hasattr(msg, 'args') else msg)
if callable(message): # add support for certain tests
check_in_message = message
else:
check_in_message = lambda msg: message in msg
if check_in_message(msg):
message_found = True
break
if not message_found:
raise AssertionError("Did not receive the message you expected "
"('%s') for <%s>, got: '%s'"
% (message, func.__name__, msg))
return result
# To remove when we support numpy 1.7
def assert_no_warnings(func, *args, **kw):
# XXX: once we may depend on python >= 2.6, this can be replaced by the
# warnings module context manager.
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
result = func(*args, **kw)
if hasattr(np, 'VisibleDeprecationWarning'):
# Filter out numpy-specific warnings in numpy >= 1.9
w = [e for e in w
if e.category is not np.VisibleDeprecationWarning]
if len(w) > 0:
raise AssertionError("Got warnings when calling %s: %s"
% (func.__name__, w))
return result
def ignore_warnings(obj=None):
""" Context manager and decorator to ignore warnings
Note. Using this (in both variants) will clear all warnings
from all python modules loaded. In case you need to test
cross-module-warning-logging this is not your tool of choice.
Examples
--------
>>> with ignore_warnings():
... warnings.warn('buhuhuhu')
>>> def nasty_warn():
... warnings.warn('buhuhuhu')
... print(42)
>>> ignore_warnings(nasty_warn)()
42
"""
if callable(obj):
return _ignore_warnings(obj)
else:
return _IgnoreWarnings()
def _ignore_warnings(fn):
"""Decorator to catch and hide warnings without visual nesting"""
@wraps(fn)
def wrapper(*args, **kwargs):
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
return fn(*args, **kwargs)
w[:] = []
return wrapper
class _IgnoreWarnings(object):
"""Improved and simplified Python warnings context manager
Copied from Python 2.7.5 and modified as required.
"""
def __init__(self):
"""
Parameters
==========
category : warning class
The category to filter. Defaults to Warning. If None,
all categories will be muted.
"""
self._record = True
self._module = sys.modules['warnings']
self._entered = False
self.log = []
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
clean_warning_registry() # be safe and not propagate state + chaos
warnings.simplefilter('always')
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
self.log = []
def showwarning(*args, **kwargs):
self.log.append(warnings.WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return self.log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
self.log[:] = []
clean_warning_registry() # be safe and not propagate state + chaos
try:
from nose.tools import assert_less
except ImportError:
assert_less = _assert_less
try:
from nose.tools import assert_greater
except ImportError:
assert_greater = _assert_greater
def _assert_allclose(actual, desired, rtol=1e-7, atol=0,
err_msg='', verbose=True):
actual, desired = np.asanyarray(actual), np.asanyarray(desired)
if np.allclose(actual, desired, rtol=rtol, atol=atol):
return
msg = ('Array not equal to tolerance rtol=%g, atol=%g: '
'actual %s, desired %s') % (rtol, atol, actual, desired)
raise AssertionError(msg)
if hasattr(np.testing, 'assert_allclose'):
assert_allclose = np.testing.assert_allclose
else:
assert_allclose = _assert_allclose
def assert_raise_message(exceptions, message, function, *args, **kwargs):
"""Helper function to test error messages in exceptions
Parameters
----------
exceptions : exception or tuple of exception
Name of the estimator
func : callable
Calable object to raise error
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`
"""
try:
function(*args, **kwargs)
except exceptions as e:
error_message = str(e)
if message not in error_message:
raise AssertionError("Error message does not include the expected"
" string: %r. Observed error message: %r" %
(message, error_message))
else:
# concatenate exception names
if isinstance(exceptions, tuple):
names = " or ".join(e.__name__ for e in exceptions)
else:
names = exceptions.__name__
raise AssertionError("%s not raised by %s" %
(names, function.__name__))
def fake_mldata(columns_dict, dataname, matfile, ordering=None):
"""Create a fake mldata data set.
Parameters
----------
columns_dict : dict, keys=str, values=ndarray
Contains data as columns_dict[column_name] = array of data.
dataname : string
Name of data set.
matfile : string or file object
The file name string or the file-like object of the output file.
ordering : list, default None
List of column_names, determines the ordering in the data set.
Notes
-----
This function transposes all arrays, while fetch_mldata only transposes
'data', keep that into account in the tests.
"""
datasets = dict(columns_dict)
# transpose all variables
for name in datasets:
datasets[name] = datasets[name].T
if ordering is None:
ordering = sorted(list(datasets.keys()))
# NOTE: setting up this array is tricky, because of the way Matlab
# re-packages 1D arrays
datasets['mldata_descr_ordering'] = sp.empty((1, len(ordering)),
dtype='object')
for i, name in enumerate(ordering):
datasets['mldata_descr_ordering'][0, i] = name
scipy.io.savemat(matfile, datasets, oned_as='column')
class mock_mldata_urlopen(object):
def __init__(self, mock_datasets):
"""Object that mocks the urlopen function to fake requests to mldata.
`mock_datasets` is a dictionary of {dataset_name: data_dict}, or
{dataset_name: (data_dict, ordering).
`data_dict` itself is a dictionary of {column_name: data_array},
and `ordering` is a list of column_names to determine the ordering
in the data set (see `fake_mldata` for details).
When requesting a dataset with a name that is in mock_datasets,
this object creates a fake dataset in a StringIO object and
returns it. Otherwise, it raises an HTTPError.
"""
self.mock_datasets = mock_datasets
def __call__(self, urlname):
dataset_name = urlname.split('/')[-1]
if dataset_name in self.mock_datasets:
resource_name = '_' + dataset_name
from io import BytesIO
matfile = BytesIO()
dataset = self.mock_datasets[dataset_name]
ordering = None
if isinstance(dataset, tuple):
dataset, ordering = dataset
fake_mldata(dataset, resource_name, matfile, ordering)
matfile.seek(0)
return matfile
else:
raise HTTPError(urlname, 404, dataset_name + " is not available",
[], None)
def install_mldata_mock(mock_datasets):
# Lazy import to avoid mutually recursive imports
from sklearn import datasets
datasets.mldata.urlopen = mock_mldata_urlopen(mock_datasets)
def uninstall_mldata_mock():
# Lazy import to avoid mutually recursive imports
from sklearn import datasets
datasets.mldata.urlopen = urlopen
# Meta estimators need another estimator to be instantiated.
META_ESTIMATORS = ["OneVsOneClassifier",
"OutputCodeClassifier", "OneVsRestClassifier", "RFE",
"RFECV", "BaseEnsemble"]
# estimators that there is no way to default-construct sensibly
OTHER = ["Pipeline", "FeatureUnion", "GridSearchCV",
"RandomizedSearchCV"]
# some trange ones
DONT_TEST = ['SparseCoder', 'EllipticEnvelope', 'DictVectorizer',
'LabelBinarizer', 'LabelEncoder',
'MultiLabelBinarizer', 'TfidfTransformer',
'TfidfVectorizer', 'IsotonicRegression',
'OneHotEncoder', 'RandomTreesEmbedding',
'FeatureHasher', 'DummyClassifier', 'DummyRegressor',
'TruncatedSVD', 'PolynomialFeatures',
'GaussianRandomProjectionHash', 'HashingVectorizer',
'CheckingClassifier', 'PatchExtractor', 'CountVectorizer',
# GradientBoosting base estimators, maybe should
# exclude them in another way
'ZeroEstimator', 'ScaledLogOddsEstimator',
'QuantileEstimator', 'MeanEstimator',
'LogOddsEstimator', 'PriorProbabilityEstimator',
'_SigmoidCalibration', 'VotingClassifier']
def all_estimators(include_meta_estimators=False,
include_other=False, type_filter=None,
include_dont_test=False):
"""Get a list of all estimators from sklearn.
This function crawls the module and gets all classes that inherit
from BaseEstimator. Classes that are defined in test-modules are not
included.
By default meta_estimators such as GridSearchCV are also not included.
Parameters
----------
include_meta_estimators : boolean, default=False
Whether to include meta-estimators that can be constructed using
an estimator as their first argument. These are currently
BaseEnsemble, OneVsOneClassifier, OutputCodeClassifier,
OneVsRestClassifier, RFE, RFECV.
include_other : boolean, default=False
Wether to include meta-estimators that are somehow special and can
not be default-constructed sensibly. These are currently
Pipeline, FeatureUnion and GridSearchCV
include_dont_test : boolean, default=False
Whether to include "special" label estimator or test processors.
type_filter : string, list of string, or None, default=None
Which kind of estimators should be returned. If None, no filter is
applied and all estimators are returned. Possible values are
'classifier', 'regressor', 'cluster' and 'transformer' to get
estimators only of these specific types, or a list of these to
get the estimators that fit at least one of the types.
Returns
-------
estimators : list of tuples
List of (name, class), where ``name`` is the class name as string
and ``class`` is the actuall type of the class.
"""
def is_abstract(c):
if not(hasattr(c, '__abstractmethods__')):
return False
if not len(c.__abstractmethods__):
return False
return True
all_classes = []
# get parent folder
path = sklearn.__path__
for importer, modname, ispkg in pkgutil.walk_packages(
path=path, prefix='sklearn.', onerror=lambda x: None):
if ".tests." in modname:
continue
module = __import__(modname, fromlist="dummy")
classes = inspect.getmembers(module, inspect.isclass)
all_classes.extend(classes)
all_classes = set(all_classes)
estimators = [c for c in all_classes
if (issubclass(c[1], BaseEstimator)
and c[0] != 'BaseEstimator')]
# get rid of abstract base classes
estimators = [c for c in estimators if not is_abstract(c[1])]
if not include_dont_test:
estimators = [c for c in estimators if not c[0] in DONT_TEST]
if not include_other:
estimators = [c for c in estimators if not c[0] in OTHER]
# possibly get rid of meta estimators
if not include_meta_estimators:
estimators = [c for c in estimators if not c[0] in META_ESTIMATORS]
if type_filter is not None:
if not isinstance(type_filter, list):
type_filter = [type_filter]
else:
type_filter = list(type_filter) # copy
filtered_estimators = []
filters = {'classifier': ClassifierMixin,
'regressor': RegressorMixin,
'transformer': TransformerMixin,
'cluster': ClusterMixin}
for name, mixin in filters.items():
if name in type_filter:
type_filter.remove(name)
filtered_estimators.extend([est for est in estimators
if issubclass(est[1], mixin)])
estimators = filtered_estimators
if type_filter:
raise ValueError("Parameter type_filter must be 'classifier', "
"'regressor', 'transformer', 'cluster' or None, got"
" %s." % repr(type_filter))
# drop duplicates, sort for reproducibility
return sorted(set(estimators))
def set_random_state(estimator, random_state=0):
if "random_state" in estimator.get_params().keys():
estimator.set_params(random_state=random_state)
def if_matplotlib(func):
"""Test decorator that skips test if matplotlib not installed. """
@wraps(func)
def run_test(*args, **kwargs):
try:
import matplotlib
matplotlib.use('Agg', warn=False)
# this fails if no $DISPLAY specified
import matplotlib.pyplot as plt
plt.figure()
except ImportError:
raise SkipTest('Matplotlib not available.')
else:
return func(*args, **kwargs)
return run_test
def if_not_mac_os(versions=('10.7', '10.8', '10.9'),
message='Multi-process bug in Mac OS X >= 10.7 '
'(see issue #636)'):
"""Test decorator that skips test if OS is Mac OS X and its
major version is one of ``versions``.
"""
warnings.warn("if_not_mac_os is deprecated in 0.17 and will be removed"
" in 0.19: use the safer and more generic"
" if_safe_multiprocessing_with_blas instead",
DeprecationWarning)
mac_version, _, _ = platform.mac_ver()
skip = '.'.join(mac_version.split('.')[:2]) in versions
def decorator(func):
if skip:
@wraps(func)
def func(*args, **kwargs):
raise SkipTest(message)
return func
return decorator
def if_safe_multiprocessing_with_blas(func):
"""Decorator for tests involving both BLAS calls and multiprocessing
Under Python < 3.4 and POSIX (e.g. Linux or OSX), using multiprocessing in
conjunction with some implementation of BLAS (or other libraries that
manage an internal posix thread pool) can cause a crash or a freeze of the
Python process.
Under Python 3.4 and later, joblib uses the forkserver mode of
multiprocessing which does not trigger this problem.
In practice all known packaged distributions (from Linux distros or
Anaconda) of BLAS under Linux seems to be safe. So we this problem seems to
only impact OSX users.
This wrapper makes it possible to skip tests that can possibly cause
this crash under OSX with.
"""
@wraps(func)
def run_test(*args, **kwargs):
if sys.platform == 'darwin' and sys.version_info[:2] < (3, 4):
raise SkipTest(
"Possible multi-process bug with some BLAS under Python < 3.4")
return func(*args, **kwargs)
return run_test
def clean_warning_registry():
"""Safe way to reset warnings """
warnings.resetwarnings()
reg = "__warningregistry__"
for mod_name, mod in list(sys.modules.items()):
if 'six.moves' in mod_name:
continue
if hasattr(mod, reg):
getattr(mod, reg).clear()
def check_skip_network():
if int(os.environ.get('SKLEARN_SKIP_NETWORK_TESTS', 0)):
raise SkipTest("Text tutorial requires large dataset download")
def check_skip_travis():
"""Skip test if being run on Travis."""
if os.environ.get('TRAVIS') == "true":
raise SkipTest("This test needs to be skipped on Travis")
def _delete_folder(folder_path, warn=False):
"""Utility function to cleanup a temporary folder if still existing.
Copy from joblib.pool (for independance)"""
try:
if os.path.exists(folder_path):
# This can fail under windows,
# but will succeed when called by atexit
shutil.rmtree(folder_path)
except WindowsError:
if warn:
warnings.warn("Could not delete temporary folder %s" % folder_path)
class TempMemmap(object):
def __init__(self, data, mmap_mode='r'):
self.temp_folder = tempfile.mkdtemp(prefix='sklearn_testing_')
self.mmap_mode = mmap_mode
self.data = data
def __enter__(self):
fpath = op.join(self.temp_folder, 'data.pkl')
joblib.dump(self.data, fpath)
data_read_only = joblib.load(fpath, mmap_mode=self.mmap_mode)
atexit.register(lambda: _delete_folder(self.temp_folder, warn=True))
return data_read_only
def __exit__(self, exc_type, exc_val, exc_tb):
_delete_folder(self.temp_folder)
with_network = with_setup(check_skip_network)
with_travis = with_setup(check_skip_travis)
|
grgbr/linux-iio
|
refs/heads/iio-rpi-4.7
|
tools/perf/scripts/python/call-graph-from-postgresql.py
|
758
|
#!/usr/bin/python2
# call-graph-from-postgresql.py: create call-graph from postgresql database
# Copyright (c) 2014, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
# To use this script you will need to have exported data using the
# export-to-postgresql.py script. Refer to that script for details.
#
# Following on from the example in the export-to-postgresql.py script, a
# call-graph can be displayed for the pt_example database like this:
#
# python tools/perf/scripts/python/call-graph-from-postgresql.py pt_example
#
# Note this script supports connecting to remote databases by setting hostname,
# port, username, password, and dbname e.g.
#
# python tools/perf/scripts/python/call-graph-from-postgresql.py "hostname=myhost username=myuser password=mypassword dbname=pt_example"
#
# The result is a GUI window with a tree representing a context-sensitive
# call-graph. Expanding a couple of levels of the tree and adjusting column
# widths to suit will display something like:
#
# Call Graph: pt_example
# Call Path Object Count Time(ns) Time(%) Branch Count Branch Count(%)
# v- ls
# v- 2638:2638
# v- _start ld-2.19.so 1 10074071 100.0 211135 100.0
# |- unknown unknown 1 13198 0.1 1 0.0
# >- _dl_start ld-2.19.so 1 1400980 13.9 19637 9.3
# >- _d_linit_internal ld-2.19.so 1 448152 4.4 11094 5.3
# v-__libc_start_main@plt ls 1 8211741 81.5 180397 85.4
# >- _dl_fixup ld-2.19.so 1 7607 0.1 108 0.1
# >- __cxa_atexit libc-2.19.so 1 11737 0.1 10 0.0
# >- __libc_csu_init ls 1 10354 0.1 10 0.0
# |- _setjmp libc-2.19.so 1 0 0.0 4 0.0
# v- main ls 1 8182043 99.6 180254 99.9
#
# Points to note:
# The top level is a command name (comm)
# The next level is a thread (pid:tid)
# Subsequent levels are functions
# 'Count' is the number of calls
# 'Time' is the elapsed time until the function returns
# Percentages are relative to the level above
# 'Branch Count' is the total number of branches for that function and all
# functions that it calls
import sys
from PySide.QtCore import *
from PySide.QtGui import *
from PySide.QtSql import *
from decimal import *
class TreeItem():
def __init__(self, db, row, parent_item):
self.db = db
self.row = row
self.parent_item = parent_item
self.query_done = False;
self.child_count = 0
self.child_items = []
self.data = ["", "", "", "", "", "", ""]
self.comm_id = 0
self.thread_id = 0
self.call_path_id = 1
self.branch_count = 0
self.time = 0
if not parent_item:
self.setUpRoot()
def setUpRoot(self):
self.query_done = True
query = QSqlQuery(self.db)
ret = query.exec_('SELECT id, comm FROM comms')
if not ret:
raise Exception("Query failed: " + query.lastError().text())
while query.next():
if not query.value(0):
continue
child_item = TreeItem(self.db, self.child_count, self)
self.child_items.append(child_item)
self.child_count += 1
child_item.setUpLevel1(query.value(0), query.value(1))
def setUpLevel1(self, comm_id, comm):
self.query_done = True;
self.comm_id = comm_id
self.data[0] = comm
self.child_items = []
self.child_count = 0
query = QSqlQuery(self.db)
ret = query.exec_('SELECT thread_id, ( SELECT pid FROM threads WHERE id = thread_id ), ( SELECT tid FROM threads WHERE id = thread_id ) FROM comm_threads WHERE comm_id = ' + str(comm_id))
if not ret:
raise Exception("Query failed: " + query.lastError().text())
while query.next():
child_item = TreeItem(self.db, self.child_count, self)
self.child_items.append(child_item)
self.child_count += 1
child_item.setUpLevel2(comm_id, query.value(0), query.value(1), query.value(2))
def setUpLevel2(self, comm_id, thread_id, pid, tid):
self.comm_id = comm_id
self.thread_id = thread_id
self.data[0] = str(pid) + ":" + str(tid)
def getChildItem(self, row):
return self.child_items[row]
def getParentItem(self):
return self.parent_item
def getRow(self):
return self.row
def timePercent(self, b):
if not self.time:
return "0.0"
x = (b * Decimal(100)) / self.time
return str(x.quantize(Decimal('.1'), rounding=ROUND_HALF_UP))
def branchPercent(self, b):
if not self.branch_count:
return "0.0"
x = (b * Decimal(100)) / self.branch_count
return str(x.quantize(Decimal('.1'), rounding=ROUND_HALF_UP))
def addChild(self, call_path_id, name, dso, count, time, branch_count):
child_item = TreeItem(self.db, self.child_count, self)
child_item.comm_id = self.comm_id
child_item.thread_id = self.thread_id
child_item.call_path_id = call_path_id
child_item.branch_count = branch_count
child_item.time = time
child_item.data[0] = name
if dso == "[kernel.kallsyms]":
dso = "[kernel]"
child_item.data[1] = dso
child_item.data[2] = str(count)
child_item.data[3] = str(time)
child_item.data[4] = self.timePercent(time)
child_item.data[5] = str(branch_count)
child_item.data[6] = self.branchPercent(branch_count)
self.child_items.append(child_item)
self.child_count += 1
def selectCalls(self):
self.query_done = True;
query = QSqlQuery(self.db)
ret = query.exec_('SELECT id, call_path_id, branch_count, call_time, return_time, '
'( SELECT name FROM symbols WHERE id = ( SELECT symbol_id FROM call_paths WHERE id = call_path_id ) ), '
'( SELECT short_name FROM dsos WHERE id = ( SELECT dso_id FROM symbols WHERE id = ( SELECT symbol_id FROM call_paths WHERE id = call_path_id ) ) ), '
'( SELECT ip FROM call_paths where id = call_path_id ) '
'FROM calls WHERE parent_call_path_id = ' + str(self.call_path_id) + ' AND comm_id = ' + str(self.comm_id) + ' AND thread_id = ' + str(self.thread_id) +
'ORDER BY call_path_id')
if not ret:
raise Exception("Query failed: " + query.lastError().text())
last_call_path_id = 0
name = ""
dso = ""
count = 0
branch_count = 0
total_branch_count = 0
time = 0
total_time = 0
while query.next():
if query.value(1) == last_call_path_id:
count += 1
branch_count += query.value(2)
time += query.value(4) - query.value(3)
else:
if count:
self.addChild(last_call_path_id, name, dso, count, time, branch_count)
last_call_path_id = query.value(1)
name = query.value(5)
dso = query.value(6)
count = 1
total_branch_count += branch_count
total_time += time
branch_count = query.value(2)
time = query.value(4) - query.value(3)
if count:
self.addChild(last_call_path_id, name, dso, count, time, branch_count)
total_branch_count += branch_count
total_time += time
# Top level does not have time or branch count, so fix that here
if total_branch_count > self.branch_count:
self.branch_count = total_branch_count
if self.branch_count:
for child_item in self.child_items:
child_item.data[6] = self.branchPercent(child_item.branch_count)
if total_time > self.time:
self.time = total_time
if self.time:
for child_item in self.child_items:
child_item.data[4] = self.timePercent(child_item.time)
def childCount(self):
if not self.query_done:
self.selectCalls()
return self.child_count
def columnCount(self):
return 7
def columnHeader(self, column):
headers = ["Call Path", "Object", "Count ", "Time (ns) ", "Time (%) ", "Branch Count ", "Branch Count (%) "]
return headers[column]
def getData(self, column):
return self.data[column]
class TreeModel(QAbstractItemModel):
def __init__(self, db, parent=None):
super(TreeModel, self).__init__(parent)
self.db = db
self.root = TreeItem(db, 0, None)
def columnCount(self, parent):
return self.root.columnCount()
def rowCount(self, parent):
if parent.isValid():
parent_item = parent.internalPointer()
else:
parent_item = self.root
return parent_item.childCount()
def headerData(self, section, orientation, role):
if role == Qt.TextAlignmentRole:
if section > 1:
return Qt.AlignRight
if role != Qt.DisplayRole:
return None
if orientation != Qt.Horizontal:
return None
return self.root.columnHeader(section)
def parent(self, child):
child_item = child.internalPointer()
if child_item is self.root:
return QModelIndex()
parent_item = child_item.getParentItem()
return self.createIndex(parent_item.getRow(), 0, parent_item)
def index(self, row, column, parent):
if parent.isValid():
parent_item = parent.internalPointer()
else:
parent_item = self.root
child_item = parent_item.getChildItem(row)
return self.createIndex(row, column, child_item)
def data(self, index, role):
if role == Qt.TextAlignmentRole:
if index.column() > 1:
return Qt.AlignRight
if role != Qt.DisplayRole:
return None
index_item = index.internalPointer()
return index_item.getData(index.column())
class MainWindow(QMainWindow):
def __init__(self, db, dbname, parent=None):
super(MainWindow, self).__init__(parent)
self.setObjectName("MainWindow")
self.setWindowTitle("Call Graph: " + dbname)
self.move(100, 100)
self.resize(800, 600)
style = self.style()
icon = style.standardIcon(QStyle.SP_MessageBoxInformation)
self.setWindowIcon(icon);
self.model = TreeModel(db)
self.view = QTreeView()
self.view.setModel(self.model)
self.setCentralWidget(self.view)
if __name__ == '__main__':
if (len(sys.argv) < 2):
print >> sys.stderr, "Usage is: call-graph-from-postgresql.py <database name>"
raise Exception("Too few arguments")
dbname = sys.argv[1]
db = QSqlDatabase.addDatabase('QPSQL')
opts = dbname.split()
for opt in opts:
if '=' in opt:
opt = opt.split('=')
if opt[0] == 'hostname':
db.setHostName(opt[1])
elif opt[0] == 'port':
db.setPort(int(opt[1]))
elif opt[0] == 'username':
db.setUserName(opt[1])
elif opt[0] == 'password':
db.setPassword(opt[1])
elif opt[0] == 'dbname':
dbname = opt[1]
else:
dbname = opt
db.setDatabaseName(dbname)
if not db.open():
raise Exception("Failed to open database " + dbname + " error: " + db.lastError().text())
app = QApplication(sys.argv)
window = MainWindow(db, dbname)
window.show()
err = app.exec_()
db.close()
sys.exit(err)
|
RodolfoQuispeC/CPproblemsSolved
|
refs/heads/master
|
pizza.py
|
1
|
def gcd(n, m):
if m==0 :
return n
return gcd(m, n%m)
n=input()
m=input()
GCD= gcd(n, m)
print ((n*m)/GCD)/n
|
jcpowermac/ansible
|
refs/heads/devel
|
test/integration/targets/aws_lambda/files/mini_lambda.py
|
139
|
from __future__ import print_function
import json
import os
def handler(event, context):
"""
The handler function is the function which gets called each time
the lambda is run.
"""
# printing goes to the cloudwatch log allowing us to simply debug the lambda if we can find
# the log entry.
print("got event:\n" + json.dumps(event))
# if the name parameter isn't present this can throw an exception
# which will result in an amazon chosen failure from the lambda
# which can be completely fine.
name = event["name"]
# we can use environment variables as part of the configuration of the lambda
# which can change the behaviour of the lambda without needing a new upload
extra = os.environ.get("EXTRA_MESSAGE")
if extra is not None and len(extra) > 0:
greeting = "hello {0}. {1}".format(name, extra)
else:
greeting = "hello " + name
return {"message": greeting}
def main():
"""
This main function will normally never be called during normal
lambda use. It is here for testing the lambda program only.
"""
event = {"name": "james"}
context = None
print(handler(event, context))
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.