repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
sbidoul/pip | refs/heads/main | tests/unit/test_wheel.py | 4 | """Tests for wheel binary packages and .dist-info."""
import csv
import logging
import os
import textwrap
from email import message_from_string
from unittest.mock import patch
import pytest
from pip._vendor.packaging.requirements import Requirement
from pip._internal.exceptions import InstallationError
from pip._internal.locations import get_scheme
from pip._internal.models.direct_url import (
DIRECT_URL_METADATA_NAME,
ArchiveInfo,
DirectUrl,
)
from pip._internal.models.scheme import Scheme
from pip._internal.operations.build.wheel_legacy import get_legacy_build_wheel_path
from pip._internal.operations.install import wheel
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.misc import hash_file
from pip._internal.utils.unpacking import unpack_file
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
from tests.lib import DATA_DIR, assert_paths_equal
from tests.lib.wheel import make_wheel
def call_get_legacy_build_wheel_path(caplog, names):
wheel_path = get_legacy_build_wheel_path(
names=names,
temp_dir='/tmp/abcd',
name='pendulum',
command_args=['arg1', 'arg2'],
command_output='output line 1\noutput line 2\n',
)
return wheel_path
def test_get_legacy_build_wheel_path(caplog):
actual = call_get_legacy_build_wheel_path(caplog, names=['name'])
assert_paths_equal(actual, '/tmp/abcd/name')
assert not caplog.records
def test_get_legacy_build_wheel_path__no_names(caplog):
caplog.set_level(logging.INFO)
actual = call_get_legacy_build_wheel_path(caplog, names=[])
assert actual is None
assert len(caplog.records) == 1
record = caplog.records[0]
assert record.levelname == 'WARNING'
assert record.message.splitlines() == [
"Legacy build of wheel for 'pendulum' created no files.",
"Command arguments: arg1 arg2",
'Command output: [use --verbose to show]',
]
def test_get_legacy_build_wheel_path__multiple_names(caplog):
caplog.set_level(logging.INFO)
# Deliberately pass the names in non-sorted order.
actual = call_get_legacy_build_wheel_path(
caplog, names=['name2', 'name1'],
)
assert_paths_equal(actual, '/tmp/abcd/name1')
assert len(caplog.records) == 1
record = caplog.records[0]
assert record.levelname == 'WARNING'
assert record.message.splitlines() == [
"Legacy build of wheel for 'pendulum' created more than one file.",
"Filenames (choosing first): ['name1', 'name2']",
"Command arguments: arg1 arg2",
'Command output: [use --verbose to show]',
]
@pytest.mark.parametrize(
"console_scripts",
[
"pip = pip._internal.main:pip",
"pip:pip = pip._internal.main:pip",
"進入點 = 套件.模組:函式",
],
)
def test_get_entrypoints(console_scripts):
entry_points_text = """
[console_scripts]
{}
[section]
common:one = module:func
common:two = module:other_func
""".format(console_scripts)
wheel_zip = make_wheel(
"simple",
"0.1.0",
extra_metadata_files={
"entry_points.txt": entry_points_text,
},
).as_zipfile()
distribution = pkg_resources_distribution_for_wheel(
wheel_zip, "simple", "<in memory>"
)
assert wheel.get_entrypoints(distribution) == (
dict([console_scripts.split(' = ')]),
{},
)
def test_get_entrypoints_no_entrypoints():
wheel_zip = make_wheel("simple", "0.1.0").as_zipfile()
distribution = pkg_resources_distribution_for_wheel(
wheel_zip, "simple", "<in memory>"
)
console, gui = wheel.get_entrypoints(distribution)
assert console == {}
assert gui == {}
@pytest.mark.parametrize("outrows, expected", [
([
('', '', 'a'),
('', '', ''),
], [
('', '', ''),
('', '', 'a'),
]),
([
# Include an int to check avoiding the following error:
# > TypeError: '<' not supported between instances of 'str' and 'int'
('', '', 1),
('', '', ''),
], [
('', '', ''),
('', '', '1'),
]),
([
# Test the normalization correctly encode everything for csv.writer().
('😉', '', 1),
('', '', ''),
], [
('', '', ''),
('😉', '', '1'),
]),
])
def test_normalized_outrows(outrows, expected):
actual = wheel._normalized_outrows(outrows)
assert actual == expected
def call_get_csv_rows_for_installed(tmpdir, text):
path = tmpdir.joinpath('temp.txt')
path.write_text(text)
# Test that an installed file appearing in RECORD has its filename
# updated in the new RECORD file.
installed = {'a': 'z'}
changed = set()
generated = []
lib_dir = '/lib/dir'
with open(path, **wheel.csv_io_kwargs('r')) as f:
record_rows = list(csv.reader(f))
outrows = wheel.get_csv_rows_for_installed(
record_rows, installed=installed, changed=changed,
generated=generated, lib_dir=lib_dir,
)
return outrows
def test_get_csv_rows_for_installed(tmpdir, caplog):
text = textwrap.dedent("""\
a,b,c
d,e,f
""")
outrows = call_get_csv_rows_for_installed(tmpdir, text)
expected = [
('z', 'b', 'c'),
('d', 'e', 'f'),
]
assert outrows == expected
# Check there were no warnings.
assert len(caplog.records) == 0
def test_get_csv_rows_for_installed__long_lines(tmpdir, caplog):
text = textwrap.dedent("""\
a,b,c,d
e,f,g
h,i,j,k
""")
outrows = call_get_csv_rows_for_installed(tmpdir, text)
expected = [
('z', 'b', 'c'),
('e', 'f', 'g'),
('h', 'i', 'j'),
]
assert outrows == expected
messages = [rec.message for rec in caplog.records]
expected = [
"RECORD line has more than three elements: ['a', 'b', 'c', 'd']",
"RECORD line has more than three elements: ['h', 'i', 'j', 'k']"
]
assert messages == expected
@pytest.mark.parametrize("text,expected", [
("Root-Is-Purelib: true", True),
("Root-Is-Purelib: false", False),
("Root-Is-Purelib: hello", False),
("", False),
("root-is-purelib: true", True),
("root-is-purelib: True", True),
])
def test_wheel_root_is_purelib(text, expected):
assert wheel.wheel_root_is_purelib(message_from_string(text)) == expected
class TestWheelFile:
def test_unpack_wheel_no_flatten(self, tmpdir):
filepath = os.path.join(DATA_DIR, 'packages',
'meta-1.0-py2.py3-none-any.whl')
unpack_file(filepath, tmpdir)
assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info'))
class TestInstallUnpackedWheel:
"""
Tests for moving files from wheel src to scheme paths
"""
def prep(self, data, tmpdir):
# Since Path implements __add__, os.path.join returns a Path object.
# Passing Path objects to interfaces expecting str (like
# `compileall.compile_file`) can cause failures, so we normalize it
# to a string here.
tmpdir = str(tmpdir)
self.name = 'sample'
self.wheelpath = make_wheel(
"sample",
"1.2.0",
metadata_body=textwrap.dedent(
"""
A sample Python project
=======================
...
"""
),
metadata_updates={
"Requires-Dist": ["peppercorn"],
},
extra_files={
"sample/__init__.py": textwrap.dedent(
'''
__version__ = '1.2.0'
def main():
"""Entry point for the application script"""
print("Call your main application code here")
'''
),
"sample/package_data.dat": "some data",
},
extra_metadata_files={
"DESCRIPTION.rst": textwrap.dedent(
"""
A sample Python project
=======================
...
"""
),
"top_level.txt": "sample\n",
"empty_dir/empty_dir/": "",
},
extra_data_files={
"data/my_data/data_file": "some data",
},
entry_points={
"console_scripts": ["sample = sample:main"],
"gui_scripts": ["sample2 = sample:main"],
},
).save_to_dir(tmpdir)
self.req = Requirement('sample')
self.src = os.path.join(tmpdir, 'src')
self.dest = os.path.join(tmpdir, 'dest')
self.scheme = Scheme(
purelib=os.path.join(self.dest, 'lib'),
platlib=os.path.join(self.dest, 'lib'),
headers=os.path.join(self.dest, 'headers'),
scripts=os.path.join(self.dest, 'bin'),
data=os.path.join(self.dest, 'data'),
)
self.src_dist_info = os.path.join(
self.src, 'sample-1.2.0.dist-info')
self.dest_dist_info = os.path.join(
self.scheme.purelib, 'sample-1.2.0.dist-info')
def assert_permission(self, path, mode):
target_mode = os.stat(path).st_mode & 0o777
assert (target_mode & mode) == mode, oct(target_mode)
def assert_installed(self, expected_permission):
# lib
assert os.path.isdir(
os.path.join(self.scheme.purelib, 'sample'))
# dist-info
metadata = os.path.join(self.dest_dist_info, 'METADATA')
self.assert_permission(metadata, expected_permission)
record = os.path.join(self.dest_dist_info, 'RECORD')
self.assert_permission(record, expected_permission)
# data files
data_file = os.path.join(self.scheme.data, 'my_data', 'data_file')
assert os.path.isfile(data_file)
# package data
pkg_data = os.path.join(
self.scheme.purelib, 'sample', 'package_data.dat')
assert os.path.isfile(pkg_data)
def test_std_install(self, data, tmpdir):
self.prep(data, tmpdir)
wheel.install_wheel(
self.name,
self.wheelpath,
scheme=self.scheme,
req_description=str(self.req),
)
self.assert_installed(0o644)
@pytest.mark.parametrize("user_mask, expected_permission", [
(0o27, 0o640)
])
def test_std_install_with_custom_umask(self, data, tmpdir,
user_mask, expected_permission):
"""Test that the files created after install honor the permissions
set when the user sets a custom umask"""
prev_umask = os.umask(user_mask)
try:
self.prep(data, tmpdir)
wheel.install_wheel(
self.name,
self.wheelpath,
scheme=self.scheme,
req_description=str(self.req),
)
self.assert_installed(expected_permission)
finally:
os.umask(prev_umask)
def test_std_install_requested(self, data, tmpdir):
self.prep(data, tmpdir)
wheel.install_wheel(
self.name,
self.wheelpath,
scheme=self.scheme,
req_description=str(self.req),
requested=True,
)
self.assert_installed(0o644)
requested_path = os.path.join(self.dest_dist_info, 'REQUESTED')
assert os.path.isfile(requested_path)
def test_std_install_with_direct_url(self, data, tmpdir):
"""Test that install_wheel creates direct_url.json metadata when
provided with a direct_url argument. Also test that the RECORDS
file contains an entry for direct_url.json in that case.
Note direct_url.url is intentionally different from wheelpath,
because wheelpath is typically the result of a local build.
"""
self.prep(data, tmpdir)
direct_url = DirectUrl(
url="file:///home/user/archive.tgz",
info=ArchiveInfo(),
)
wheel.install_wheel(
self.name,
self.wheelpath,
scheme=self.scheme,
req_description=str(self.req),
direct_url=direct_url,
)
direct_url_path = os.path.join(
self.dest_dist_info, DIRECT_URL_METADATA_NAME
)
self.assert_permission(direct_url_path, 0o644)
with open(direct_url_path, 'rb') as f:
expected_direct_url_json = direct_url.to_json()
direct_url_json = f.read().decode("utf-8")
assert direct_url_json == expected_direct_url_json
# check that the direc_url file is part of RECORDS
with open(os.path.join(self.dest_dist_info, "RECORD")) as f:
assert DIRECT_URL_METADATA_NAME in f.read()
def test_install_prefix(self, data, tmpdir):
prefix = os.path.join(os.path.sep, 'some', 'path')
self.prep(data, tmpdir)
scheme = get_scheme(
self.name,
user=False,
home=None,
root=tmpdir,
isolated=False,
prefix=prefix,
)
wheel.install_wheel(
self.name,
self.wheelpath,
scheme=scheme,
req_description=str(self.req),
)
bin_dir = 'Scripts' if WINDOWS else 'bin'
assert os.path.exists(os.path.join(tmpdir, 'some', 'path', bin_dir))
assert os.path.exists(os.path.join(tmpdir, 'some', 'path', 'my_data'))
def test_dist_info_contains_empty_dir(self, data, tmpdir):
"""
Test that empty dirs are not installed
"""
# e.g. https://github.com/pypa/pip/issues/1632#issuecomment-38027275
self.prep(data, tmpdir)
wheel.install_wheel(
self.name,
self.wheelpath,
scheme=self.scheme,
req_description=str(self.req),
)
self.assert_installed(0o644)
assert not os.path.isdir(
os.path.join(self.dest_dist_info, 'empty_dir'))
@pytest.mark.parametrize(
"path",
["/tmp/example", "../example", "./../example"]
)
def test_wheel_install_rejects_bad_paths(self, data, tmpdir, path):
self.prep(data, tmpdir)
wheel_path = make_wheel(
"simple", "0.1.0", extra_files={path: "example contents\n"}
).save_to_dir(tmpdir)
with pytest.raises(InstallationError) as e:
wheel.install_wheel(
"simple",
str(wheel_path),
scheme=self.scheme,
req_description="simple",
)
exc_text = str(e.value)
assert os.path.basename(wheel_path) in exc_text
assert "example" in exc_text
@pytest.mark.xfail(strict=True)
@pytest.mark.parametrize(
"entrypoint", ["hello = hello", "hello = hello:"]
)
@pytest.mark.parametrize(
"entrypoint_type", ["console_scripts", "gui_scripts"]
)
def test_invalid_entrypoints_fail(
self, data, tmpdir, entrypoint, entrypoint_type
):
self.prep(data, tmpdir)
wheel_path = make_wheel(
"simple", "0.1.0", entry_points={entrypoint_type: [entrypoint]}
).save_to_dir(tmpdir)
with pytest.raises(InstallationError) as e:
wheel.install_wheel(
"simple",
str(wheel_path),
scheme=self.scheme,
req_description="simple",
)
exc_text = str(e.value)
assert os.path.basename(wheel_path) in exc_text
assert entrypoint in exc_text
class TestMessageAboutScriptsNotOnPATH:
tilde_warning_msg = (
"NOTE: The current PATH contains path(s) starting with `~`, "
"which may not be expanded by all applications."
)
def _template(self, paths, scripts):
with patch.dict('os.environ', {'PATH': os.pathsep.join(paths)}):
return wheel.message_about_scripts_not_on_PATH(scripts)
def test_no_script(self):
retval = self._template(
paths=['/a/b', '/c/d/bin'],
scripts=[]
)
assert retval is None
def test_single_script__single_dir_not_on_PATH(self):
retval = self._template(
paths=['/a/b', '/c/d/bin'],
scripts=['/c/d/foo']
)
assert retval is not None
assert "--no-warn-script-location" in retval
assert "foo is installed in '/c/d'" in retval
assert self.tilde_warning_msg not in retval
def test_two_script__single_dir_not_on_PATH(self):
retval = self._template(
paths=['/a/b', '/c/d/bin'],
scripts=['/c/d/foo', '/c/d/baz']
)
assert retval is not None
assert "--no-warn-script-location" in retval
assert "baz and foo are installed in '/c/d'" in retval
assert self.tilde_warning_msg not in retval
def test_multi_script__multi_dir_not_on_PATH(self):
retval = self._template(
paths=['/a/b', '/c/d/bin'],
scripts=['/c/d/foo', '/c/d/bar', '/c/d/baz', '/a/b/c/spam']
)
assert retval is not None
assert "--no-warn-script-location" in retval
assert "bar, baz and foo are installed in '/c/d'" in retval
assert "spam is installed in '/a/b/c'" in retval
assert self.tilde_warning_msg not in retval
def test_multi_script_all__multi_dir_not_on_PATH(self):
retval = self._template(
paths=['/a/b', '/c/d/bin'],
scripts=[
'/c/d/foo', '/c/d/bar', '/c/d/baz',
'/a/b/c/spam', '/a/b/c/eggs'
]
)
assert retval is not None
assert "--no-warn-script-location" in retval
assert "bar, baz and foo are installed in '/c/d'" in retval
assert "eggs and spam are installed in '/a/b/c'" in retval
assert self.tilde_warning_msg not in retval
def test_two_script__single_dir_on_PATH(self):
retval = self._template(
paths=['/a/b', '/c/d/bin'],
scripts=['/a/b/foo', '/a/b/baz']
)
assert retval is None
def test_multi_script__multi_dir_on_PATH(self):
retval = self._template(
paths=['/a/b', '/c/d/bin'],
scripts=['/a/b/foo', '/a/b/bar', '/a/b/baz', '/c/d/bin/spam']
)
assert retval is None
def test_multi_script__single_dir_on_PATH(self):
retval = self._template(
paths=['/a/b', '/c/d/bin'],
scripts=['/a/b/foo', '/a/b/bar', '/a/b/baz']
)
assert retval is None
def test_single_script__single_dir_on_PATH(self):
retval = self._template(
paths=['/a/b', '/c/d/bin'],
scripts=['/a/b/foo']
)
assert retval is None
def test_PATH_check_case_insensitive_on_windows(self):
retval = self._template(
paths=['C:\\A\\b'],
scripts=['c:\\a\\b\\c', 'C:/A/b/d']
)
if WINDOWS:
assert retval is None
else:
assert retval is not None
assert self.tilde_warning_msg not in retval
def test_trailing_ossep_removal(self):
retval = self._template(
paths=[os.path.join('a', 'b', '')],
scripts=[os.path.join('a', 'b', 'c')]
)
assert retval is None
def test_missing_PATH_env_treated_as_empty_PATH_env(self, monkeypatch):
scripts = ['a/b/foo']
monkeypatch.delenv('PATH')
retval_missing = wheel.message_about_scripts_not_on_PATH(scripts)
monkeypatch.setenv('PATH', '')
retval_empty = wheel.message_about_scripts_not_on_PATH(scripts)
assert retval_missing == retval_empty
def test_no_script_tilde_in_path(self):
retval = self._template(
paths=['/a/b', '/c/d/bin', '~/e', '/f/g~g'],
scripts=[]
)
assert retval is None
def test_multi_script_all_tilde__multi_dir_not_on_PATH(self):
retval = self._template(
paths=['/a/b', '/c/d/bin', '~e/f'],
scripts=[
'/c/d/foo', '/c/d/bar', '/c/d/baz',
'/a/b/c/spam', '/a/b/c/eggs', '/e/f/tilde'
]
)
assert retval is not None
assert "--no-warn-script-location" in retval
assert "bar, baz and foo are installed in '/c/d'" in retval
assert "eggs and spam are installed in '/a/b/c'" in retval
assert "tilde is installed in '/e/f'" in retval
assert self.tilde_warning_msg in retval
def test_multi_script_all_tilde_not_at_start__multi_dir_not_on_PATH(self):
retval = self._template(
paths=['/e/f~f', '/c/d/bin'],
scripts=[
'/c/d/foo', '/c/d/bar', '/c/d/baz',
'/e/f~f/c/spam', '/e/f~f/c/eggs'
]
)
assert retval is not None
assert "--no-warn-script-location" in retval
assert "bar, baz and foo are installed in '/c/d'" in retval
assert "eggs and spam are installed in '/e/f~f/c'" in retval
assert self.tilde_warning_msg not in retval
class TestWheelHashCalculators:
def prep(self, tmpdir):
self.test_file = tmpdir.joinpath("hash.file")
# Want this big enough to trigger the internal read loops.
self.test_file_len = 2 * 1024 * 1024
with open(str(self.test_file), "w") as fp:
fp.truncate(self.test_file_len)
self.test_file_hash = \
'5647f05ec18958947d32874eeb788fa396a05d0bab7c1b71f112ceb7e9b31eee'
self.test_file_hash_encoded = \
'sha256=VkfwXsGJWJR9ModO63iPo5agXQurfBtx8RLOt-mzHu4'
def test_hash_file(self, tmpdir):
self.prep(tmpdir)
h, length = hash_file(self.test_file)
assert length == self.test_file_len
assert h.hexdigest() == self.test_file_hash
def test_rehash(self, tmpdir):
self.prep(tmpdir)
h, length = wheel.rehash(self.test_file)
assert length == str(self.test_file_len)
assert h == self.test_file_hash_encoded
|
davgibbs/django | refs/heads/master | django/templatetags/i18n.py | 219 | from __future__ import unicode_literals
import sys
from django.conf import settings
from django.template import Library, Node, TemplateSyntaxError, Variable
from django.template.base import TOKEN_TEXT, TOKEN_VAR, render_value_in_context
from django.template.defaulttags import token_kwargs
from django.utils import six, translation
from django.utils.safestring import SafeData, mark_safe
register = Library()
class GetAvailableLanguagesNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES]
return ''
class GetLanguageInfoNode(Node):
def __init__(self, lang_code, variable):
self.lang_code = lang_code
self.variable = variable
def render(self, context):
lang_code = self.lang_code.resolve(context)
context[self.variable] = translation.get_language_info(lang_code)
return ''
class GetLanguageInfoListNode(Node):
def __init__(self, languages, variable):
self.languages = languages
self.variable = variable
def get_language_info(self, language):
# ``language`` is either a language code string or a sequence
# with the language code as its first item
if len(language[0]) > 1:
return translation.get_language_info(language[0])
else:
return translation.get_language_info(str(language))
def render(self, context):
langs = self.languages.resolve(context)
context[self.variable] = [self.get_language_info(lang) for lang in langs]
return ''
class GetCurrentLanguageNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language()
return ''
class GetCurrentLanguageBidiNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language_bidi()
return ''
class TranslateNode(Node):
def __init__(self, filter_expression, noop, asvar=None,
message_context=None):
self.noop = noop
self.asvar = asvar
self.message_context = message_context
self.filter_expression = filter_expression
if isinstance(self.filter_expression.var, six.string_types):
self.filter_expression.var = Variable("'%s'" %
self.filter_expression.var)
def render(self, context):
self.filter_expression.var.translate = not self.noop
if self.message_context:
self.filter_expression.var.message_context = (
self.message_context.resolve(context))
output = self.filter_expression.resolve(context)
value = render_value_in_context(output, context)
# Restore percent signs. Percent signs in template text are doubled
# so they are not interpreted as string format flags.
is_safe = isinstance(value, SafeData)
value = value.replace('%%', '%')
value = mark_safe(value) if is_safe else value
if self.asvar:
context[self.asvar] = value
return ''
else:
return value
class BlockTranslateNode(Node):
def __init__(self, extra_context, singular, plural=None, countervar=None,
counter=None, message_context=None, trimmed=False, asvar=None):
self.extra_context = extra_context
self.singular = singular
self.plural = plural
self.countervar = countervar
self.counter = counter
self.message_context = message_context
self.trimmed = trimmed
self.asvar = asvar
def render_token_list(self, tokens):
result = []
vars = []
for token in tokens:
if token.token_type == TOKEN_TEXT:
result.append(token.contents.replace('%', '%%'))
elif token.token_type == TOKEN_VAR:
result.append('%%(%s)s' % token.contents)
vars.append(token.contents)
msg = ''.join(result)
if self.trimmed:
msg = translation.trim_whitespace(msg)
return msg, vars
def render(self, context, nested=False):
if self.message_context:
message_context = self.message_context.resolve(context)
else:
message_context = None
tmp_context = {}
for var, val in self.extra_context.items():
tmp_context[var] = val.resolve(context)
# Update() works like a push(), so corresponding context.pop() is at
# the end of function
context.update(tmp_context)
singular, vars = self.render_token_list(self.singular)
if self.plural and self.countervar and self.counter:
count = self.counter.resolve(context)
context[self.countervar] = count
plural, plural_vars = self.render_token_list(self.plural)
if message_context:
result = translation.npgettext(message_context, singular,
plural, count)
else:
result = translation.ungettext(singular, plural, count)
vars.extend(plural_vars)
else:
if message_context:
result = translation.pgettext(message_context, singular)
else:
result = translation.ugettext(singular)
default_value = context.template.engine.string_if_invalid
def render_value(key):
if key in context:
val = context[key]
else:
val = default_value % key if '%s' in default_value else default_value
return render_value_in_context(val, context)
data = {v: render_value(v) for v in vars}
context.pop()
try:
result = result % data
except (KeyError, ValueError):
if nested:
# Either string is malformed, or it's a bug
raise TemplateSyntaxError("'blocktrans' is unable to format "
"string returned by gettext: %r using %r" % (result, data))
with translation.override(None):
result = self.render(context, nested=True)
if self.asvar:
context[self.asvar] = result
return ''
else:
return result
class LanguageNode(Node):
def __init__(self, nodelist, language):
self.nodelist = nodelist
self.language = language
def render(self, context):
with translation.override(self.language.resolve(context)):
output = self.nodelist.render(context)
return output
@register.tag("get_available_languages")
def do_get_available_languages(parser, token):
"""
This will store a list of available languages
in the context.
Usage::
{% get_available_languages as languages %}
{% for language in languages %}
...
{% endfor %}
This will just pull the LANGUAGES setting from
your setting file (or the default settings) and
put it into the named variable.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_available_languages' requires 'as variable' (got %r)" % args)
return GetAvailableLanguagesNode(args[2])
@register.tag("get_language_info")
def do_get_language_info(parser, token):
"""
This will store the language information dictionary for the given language
code in a context variable.
Usage::
{% get_language_info for LANGUAGE_CODE as l %}
{{ l.code }}
{{ l.name }}
{{ l.name_translated }}
{{ l.name_local }}
{{ l.bidi|yesno:"bi-directional,uni-directional" }}
"""
args = token.split_contents()
if len(args) != 5 or args[1] != 'for' or args[3] != 'as':
raise TemplateSyntaxError("'%s' requires 'for string as variable' (got %r)" % (args[0], args[1:]))
return GetLanguageInfoNode(parser.compile_filter(args[2]), args[4])
@register.tag("get_language_info_list")
def do_get_language_info_list(parser, token):
"""
This will store a list of language information dictionaries for the given
language codes in a context variable. The language codes can be specified
either as a list of strings or a settings.LANGUAGES style list (or any
sequence of sequences whose first items are language codes).
Usage::
{% get_language_info_list for LANGUAGES as langs %}
{% for l in langs %}
{{ l.code }}
{{ l.name }}
{{ l.name_translated }}
{{ l.name_local }}
{{ l.bidi|yesno:"bi-directional,uni-directional" }}
{% endfor %}
"""
args = token.split_contents()
if len(args) != 5 or args[1] != 'for' or args[3] != 'as':
raise TemplateSyntaxError("'%s' requires 'for sequence as variable' (got %r)" % (args[0], args[1:]))
return GetLanguageInfoListNode(parser.compile_filter(args[2]), args[4])
@register.filter
def language_name(lang_code):
return translation.get_language_info(lang_code)['name']
@register.filter
def language_name_translated(lang_code):
english_name = translation.get_language_info(lang_code)['name']
return translation.ugettext(english_name)
@register.filter
def language_name_local(lang_code):
return translation.get_language_info(lang_code)['name_local']
@register.filter
def language_bidi(lang_code):
return translation.get_language_info(lang_code)['bidi']
@register.tag("get_current_language")
def do_get_current_language(parser, token):
"""
This will store the current language in the context.
Usage::
{% get_current_language as language %}
This will fetch the currently active language and
put it's value into the ``language`` context
variable.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_current_language' requires 'as variable' (got %r)" % args)
return GetCurrentLanguageNode(args[2])
@register.tag("get_current_language_bidi")
def do_get_current_language_bidi(parser, token):
"""
This will store the current language layout in the context.
Usage::
{% get_current_language_bidi as bidi %}
This will fetch the currently active language's layout and
put it's value into the ``bidi`` context variable.
True indicates right-to-left layout, otherwise left-to-right
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_current_language_bidi' requires 'as variable' (got %r)" % args)
return GetCurrentLanguageBidiNode(args[2])
@register.tag("trans")
def do_translate(parser, token):
"""
This will mark a string for translation and will
translate the string for the current language.
Usage::
{% trans "this is a test" %}
This will mark the string for translation so it will
be pulled out by mark-messages.py into the .po files
and will run the string through the translation engine.
There is a second form::
{% trans "this is a test" noop %}
This will only mark for translation, but will return
the string unchanged. Use it when you need to store
values into forms that should be translated later on.
You can use variables instead of constant strings
to translate stuff you marked somewhere else::
{% trans variable %}
This will just try to translate the contents of
the variable ``variable``. Make sure that the string
in there is something that is in the .po file.
It is possible to store the translated string into a variable::
{% trans "this is a test" as var %}
{{ var }}
Contextual translations are also supported::
{% trans "this is a test" context "greeting" %}
This is equivalent to calling pgettext instead of (u)gettext.
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0])
message_string = parser.compile_filter(bits[1])
remaining = bits[2:]
noop = False
asvar = None
message_context = None
seen = set()
invalid_context = {'as', 'noop'}
while remaining:
option = remaining.pop(0)
if option in seen:
raise TemplateSyntaxError(
"The '%s' option was specified more than once." % option,
)
elif option == 'noop':
noop = True
elif option == 'context':
try:
value = remaining.pop(0)
except IndexError:
msg = "No argument provided to the '%s' tag for the context option." % bits[0]
six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
if value in invalid_context:
raise TemplateSyntaxError(
"Invalid argument '%s' provided to the '%s' tag for the context option" % (value, bits[0]),
)
message_context = parser.compile_filter(value)
elif option == 'as':
try:
value = remaining.pop(0)
except IndexError:
msg = "No argument provided to the '%s' tag for the as option." % bits[0]
six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
asvar = value
else:
raise TemplateSyntaxError(
"Unknown argument for '%s' tag: '%s'. The only options "
"available are 'noop', 'context' \"xxx\", and 'as VAR'." % (
bits[0], option,
)
)
seen.add(option)
return TranslateNode(message_string, noop, asvar, message_context)
@register.tag("blocktrans")
def do_block_translate(parser, token):
"""
This will translate a block of text with parameters.
Usage::
{% blocktrans with bar=foo|filter boo=baz|filter %}
This is {{ bar }} and {{ boo }}.
{% endblocktrans %}
Additionally, this supports pluralization::
{% blocktrans count count=var|length %}
There is {{ count }} object.
{% plural %}
There are {{ count }} objects.
{% endblocktrans %}
This is much like ngettext, only in template syntax.
The "var as value" legacy format is still supported::
{% blocktrans with foo|filter as bar and baz|filter as boo %}
{% blocktrans count var|length as count %}
The translated string can be stored in a variable using `asvar`::
{% blocktrans with bar=foo|filter boo=baz|filter asvar var %}
This is {{ bar }} and {{ boo }}.
{% endblocktrans %}
{{ var }}
Contextual translations are supported::
{% blocktrans with bar=foo|filter context "greeting" %}
This is {{ bar }}.
{% endblocktrans %}
This is equivalent to calling pgettext/npgettext instead of
(u)gettext/(u)ngettext.
"""
bits = token.split_contents()
options = {}
remaining_bits = bits[1:]
asvar = None
while remaining_bits:
option = remaining_bits.pop(0)
if option in options:
raise TemplateSyntaxError('The %r option was specified more '
'than once.' % option)
if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=True)
if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least '
'one keyword argument.' % bits[0])
elif option == 'count':
value = token_kwargs(remaining_bits, parser, support_legacy=True)
if len(value) != 1:
raise TemplateSyntaxError('"count" in %r tag expected exactly '
'one keyword argument.' % bits[0])
elif option == "context":
try:
value = remaining_bits.pop(0)
value = parser.compile_filter(value)
except Exception:
msg = (
'"context" in %r tag expected '
'exactly one argument.') % bits[0]
six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
elif option == "trimmed":
value = True
elif option == "asvar":
try:
value = remaining_bits.pop(0)
except IndexError:
msg = "No argument provided to the '%s' tag for the asvar option." % bits[0]
six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
asvar = value
else:
raise TemplateSyntaxError('Unknown argument for %r tag: %r.' %
(bits[0], option))
options[option] = value
if 'count' in options:
countervar, counter = list(options['count'].items())[0]
else:
countervar, counter = None, None
if 'context' in options:
message_context = options['context']
else:
message_context = None
extra_context = options.get('with', {})
trimmed = options.get("trimmed", False)
singular = []
plural = []
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
singular.append(token)
else:
break
if countervar and counter:
if token.contents.strip() != 'plural':
raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags inside it")
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
plural.append(token)
else:
break
if token.contents.strip() != 'endblocktrans':
raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents)
return BlockTranslateNode(extra_context, singular, plural, countervar,
counter, message_context, trimmed=trimmed,
asvar=asvar)
@register.tag
def language(parser, token):
"""
This will enable the given language just for this block.
Usage::
{% language "de" %}
This is {{ bar }} and {{ boo }}.
{% endlanguage %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' takes one argument (language)" % bits[0])
language = parser.compile_filter(bits[1])
nodelist = parser.parse(('endlanguage',))
parser.delete_first_token()
return LanguageNode(nodelist, language)
|
rvmoura96/projeto-almoxarifado | refs/heads/master | myvenv/Lib/site-packages/pip/_vendor/requests/packages/chardet/chardistribution.py | 2754 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,
EUCTW_TYPICAL_DISTRIBUTION_RATIO)
from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,
EUCKR_TYPICAL_DISTRIBUTION_RATIO)
from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,
GB2312_TYPICAL_DISTRIBUTION_RATIO)
from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,
BIG5_TYPICAL_DISTRIBUTION_RATIO)
from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,
JIS_TYPICAL_DISTRIBUTION_RATIO)
from .compat import wrap_ord
ENOUGH_DATA_THRESHOLD = 1024
SURE_YES = 0.99
SURE_NO = 0.01
MINIMUM_DATA_THRESHOLD = 3
class CharDistributionAnalysis:
def __init__(self):
# Mapping table to get frequency order from char order (get from
# GetOrder())
self._mCharToFreqOrder = None
self._mTableSize = None # Size of above table
# This is a constant value which varies from language to language,
# used in calculating confidence. See
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
# for further detail.
self._mTypicalDistributionRatio = None
self.reset()
def reset(self):
"""reset analyser, clear any state"""
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
self._mTotalChars = 0 # Total characters encountered
# The number of characters whose frequency order is less than 512
self._mFreqChars = 0
def feed(self, aBuf, aCharLen):
"""feed a character with known length"""
if aCharLen == 2:
# we only care about 2-bytes character in our distribution analysis
order = self.get_order(aBuf)
else:
order = -1
if order >= 0:
self._mTotalChars += 1
# order is valid
if order < self._mTableSize:
if 512 > self._mCharToFreqOrder[order]:
self._mFreqChars += 1
def get_confidence(self):
"""return confidence based on existing data"""
# if we didn't receive any character in our consideration range,
# return negative answer
if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if self._mTotalChars != self._mFreqChars:
r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)
* self._mTypicalDistributionRatio))
if r < SURE_YES:
return r
# normalize confidence (we don't want to be 100% sure)
return SURE_YES
def got_enough_data(self):
# It is not necessary to receive all data to draw conclusion.
# For charset detection, certain amount of data is enough
return self._mTotalChars > ENOUGH_DATA_THRESHOLD
def get_order(self, aBuf):
# We do not handle characters based on the original encoding string,
# but convert this encoding string to a number, here called order.
# This allows multiple encodings of a language to share one frequency
# table.
return -1
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCTWCharToFreqOrder
self._mTableSize = EUCTW_TABLE_SIZE
self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-TW encoding, we are interested
# first byte range: 0xc4 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xC4:
return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCKRCharToFreqOrder
self._mTableSize = EUCKR_TABLE_SIZE
self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-KR encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xB0:
return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class GB2312DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = GB2312CharToFreqOrder
self._mTableSize = GB2312_TABLE_SIZE
self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for GB2312 encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0xB0) and (second_char >= 0xA1):
return 94 * (first_char - 0xB0) + second_char - 0xA1
else:
return -1
class Big5DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = Big5CharToFreqOrder
self._mTableSize = BIG5_TABLE_SIZE
self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for big5 encoding, we are interested
# first byte range: 0xa4 -- 0xfe
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if first_char >= 0xA4:
if second_char >= 0xA1:
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
else:
return 157 * (first_char - 0xA4) + second_char - 0x40
else:
return -1
class SJISDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for sjis encoding, we are interested
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0x81) and (first_char <= 0x9F):
order = 188 * (first_char - 0x81)
elif (first_char >= 0xE0) and (first_char <= 0xEF):
order = 188 * (first_char - 0xE0 + 31)
else:
return -1
order = order + second_char - 0x40
if second_char > 0x7F:
order = -1
return order
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-JP encoding, we are interested
# first byte range: 0xa0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
char = wrap_ord(aBuf[0])
if char >= 0xA0:
return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1
else:
return -1
|
Transkribus/TranskribusDU | refs/heads/master | usecases/ctdar/DU_CTDAR.py | 1 | # -*- coding: utf-8 -*-
"""
DU task for tagging resolution
graph after the SW re-engineering by JLM during the 2019 summer.
As of June 5th, 2015, this is the exemplary code
Copyright NAVER(C) 2019 Hervé Déjean
Developed for the EU project READ. The READ project has received funding
from the European Union's Horizon 2020 research and innovation programme
under grant agreement No 674943.
"""
import sys, os
# try: #to ease the use without proper Python installation
# import TranskribusDU_version
# except ImportError:
# sys.path.append( os.path.dirname(os.path.dirname( os.path.abspath(sys.argv[0]) )))
# import TranskribusDU_version
# TranskribusDU_version
from common.trace import traceln
from graph.NodeType_PageXml import defaultBBoxDeltaFun
from graph.NodeType_PageXml import NodeType_PageXml_type
from graph.NodeType_PageXml import NodeType_PageXml
from graph.NodeType_PageXml import NodeType_PageXml_type_woText
from tasks.DU_Task_Factory import DU_Task_Factory
from tasks.DU_Task_Features import Features_June19_Simple
from tasks.DU_Task_Features import Features_June19_Simple_Separator
from tasks.DU_Task_Features import Features_June19_Simple_Shift
from tasks.DU_Task_Features import Features_June19_Simple_Separator_Shift
from tasks.DU_Task_Features import Features_June19_Full
from tasks.DU_Task_Features import Features_June19_Full_Separator
from tasks.DU_Task_Features import Features_June19_Full_Shift
from tasks.DU_Task_Features import Features_June19_Full_Separator_Shift
from tasks.DU_Task_Features import FeatureDefinition
from tasks.DU_Task_Features import *
from graph.Graph_Multi_SinglePageXml import Graph_MultiSinglePageXml
from xml_formats.PageXml import PageXml
def getDataToPickle_for_table(doer, mdl, lGraph):
"""
data that is specific to this task, which we want to pickle when --pkl is used
for each node of each graph, we want to store the node text + row and column numbers + rowSpan and colSpan
( (text, (x1, y1, x2, y2), (row, col, rowSpan, colSpan) )
...
"""
def attr_to_int(domnode, sAttr, default_value=None):
s = domnode.get(sAttr)
try:
return int(s)
except (ValueError, TypeError):
return default_value
lDataByGraph = []
for g in lGraph:
lNodeData = []
for nd in g.lNode:
ndCell = nd.node.getparent()
data = (nd.text
, (nd.x1, nd.y1, nd.x2, nd.y2)
, (attr_to_int(ndCell, "row")
, attr_to_int(ndCell, "col")
, attr_to_int(ndCell, "rowSpan")
, attr_to_int(ndCell, "colSpan"))
)
lNodeData.append(data)
lDataByGraph.append(lNodeData)
return lDataByGraph
# ----------------------------------------------------------------------------
class Features_CTDAR(FeatureDefinition):
"""
All features we had historically (some specific to CRF):
NODE: geometry, neighbor count, text
EDGE: type, constant 1, geometry, text of source and target nodes
The features of the edges are shifted by class, apart the 1-hot ones.
"""
n_QUANTILES = 16
bShiftEdgeByClass = False
bSeparator = False
def __init__(self):
FeatureDefinition.__init__(self)
# NODES
self.lNodeFeature = [ \
("geometry" , Node_Geometry()) # one can set nQuantile=...
# , ("neighbor_count" , Node_Neighbour_Count()) # one can set nQuantile=...
# , ("text" , Node_Text_NGram( 'char' # character n-grams
# , 50 # number of N-grams
# , (1,2) # N
# , False # lowercase?))
# ))
]
node_transformer = FeatureUnion(self.lNodeFeature)
# EDGES
# which types of edge can we get??
# It depends on the type of graph!!
lEdgeClass = [HorizontalEdge, VerticalEdge]
# standard set of features, including a constant 1 for CRF
self.lEdgeFeature = [ \
('1hot' , Edge_Type_1Hot(lEdgeClass=lEdgeClass)) # Edge class 1 hot encoded (PUT IT FIRST)
, ('1' , Edge_1()) # optional constant 1 for CRF
, ('geom' , Edge_Geometry()) # one can set nQuantile=...
# , ('src_txt', Edge_Source_Text_NGram( 'char' # character n-grams
# , 50 # number of N-grams
# , (1,2) # N
# , False # lowercase?))
# ))
# , ('tgt_txt', Edge_Target_Text_NGram( 'char' # character n-grams
# , 50 # number of N-grams
# , (1,2) # N
# , False # lowercase?))
# ))
]
if self.bSeparator:
self.lEdgeFeature = self.lEdgeFeature + [
('sprtr_bool', Separator_boolean())
, ('sprtr_num' , Separator_num())
]
fu = FeatureUnion(self.lEdgeFeature)
# you can use directly this union of features!
edge_transformer = fu
# OPTIONNALLY, you can have one range of features per type of edge.
# the 1-hot encoding must be the first part of the union and it will determine
# by how much the rest of the feature are shifted.
#
# IMPORTANT: 1hot is first of union AND the correct number of edge classes
if self.bShiftEdgeByClass:
ppl = Pipeline([
('fu', fu)
, ('shifter', EdgeClassShifter(len(lEdgeClass)))
])
edge_transformer = ppl
self.setTransformers(node_transformer, edge_transformer)
class My_NodeType(NodeType_PageXml_type):
"""
We need this to extract properly the label from the label attribute of the (parent) TableCell element.
"""
def __init__(self, sNodeTypeName, lsLabel, lsIgnoredLabel=None, bOther=True, BBoxDeltaFun=defaultBBoxDeltaFun):
super(NodeType_PageXml_type, self).__init__(sNodeTypeName, lsLabel, lsIgnoredLabel, bOther, BBoxDeltaFun)
def parseDocNodeLabel(self, graph_node, defaultCls=None):
"""
Parse and set the graph node label and return its class index
We rely on the standard self.sLabelAttr
raise a ValueError if the label is missing while bOther was not True
, or if the label is neither a valid one nor an ignored one
"""
ndParent = graph_node.node.getparent()
try:
sLabel = "%s_%s" % ( self.sLabelAttr,
PageXml.getCustomAttr(ndParent, 'structure','type')
)
except :
sLabel='type_None'
return sLabel
# ----------------------------------------------------------------------------
def main(sys_argv_0, sLabelAttribute, cNodeType=NodeType_PageXml_type_woText):
def getConfiguredGraphClass(_doer):
"""
In this class method, we must return a configured graph class
"""
DU_GRAPH = Graph_MultiSinglePageXml
ntClass = cNodeType
lLabels = ['caption','table-footnote','table-binding','table-runningtext','table-caption','table-header-row','table-header-col','None']
nt = ntClass(sLabelAttribute #some short prefix because labels below are prefixed with it
, lLabels # in conjugate, we accept all labels, andNone becomes "none"
, []
, False # unused
, BBoxDeltaFun=lambda v: max(v * 0.066, min(5, v/3)) #we reduce overlap in this way
)
nt.setLabelAttribute(sLabelAttribute)
nt.setXpathExpr( (".//pc:TextLine" #how to find the nodes
#, "./pc:TextEquiv") #how to get their text
, ".//pc:Unicode") #how to get their text
)
DU_GRAPH.addNodeType(nt)
return DU_GRAPH
# standard command line options for CRF- ECN- GAT-based methods
usage, parser = DU_Task_Factory.getStandardOptionsParser(sys_argv_0)
parser.add_option("--separator", dest='bSeparator', action="store_true"
, default=False, help="Use the graphical spearators, if any, as edge features.")
parser.add_option("--text" , dest='bText' , action="store_true"
, default=False, help="Use textual information if any, as node and edge features.")
parser.add_option("--edge_vh", "--edge_hv" , dest='bShift' , action="store_true"
, default=False, help="Shift edge feature by range depending on edge type.")
traceln("VERSION: %s" % DU_Task_Factory.getVersion())
# ---
#parse the command line
(options, args) = parser.parse_args()
try:
sModelDir, sModelName = args
except Exception as e:
traceln("Specify a model folder and a model name!")
DU_Task_Factory.exit(usage, 1, e)
if options.bText : traceln(" - using textual data, if any")
if options.bSeparator: traceln(" - using graphical separators, if any")
if options.bShift : traceln(" - shift edge features by edge type")
cFeatureDefinition = Features_CTDAR
# if options.bText:
# if options.bSeparator:
# if options.bShift:
# cFeatureDefinition = Features_June19_Full_Separator_Shift
# else:
# cFeatureDefinition = Features_June19_Full_Separator
# else:
# if options.bShift:
# cFeatureDefinition = Features_June19_Full_Shift
# else:
# # cFeatureDefinition = Features_June19_Full
# cFeatureDefinition = Features_BAR_Full
#
# else:
# if options.bSeparator:
# if options.bShift:
# cFeatureDefinition = Features_June19_Simple_Separator_Shift
# else:
# cFeatureDefinition = Features_June19_Simple_Separator
# else:
# if options.bShift:
# cFeatureDefinition = Features_June19_Simple_Shift
# else:
# cFeatureDefinition = Features_June19_Simple
# === SETTING the graph type (and its node type) a,d the feature extraction pipe
doer = DU_Task_Factory.getDoer(sModelDir, sModelName
, options = options
, fun_getConfiguredGraphClass= getConfiguredGraphClass
, cFeatureDefinition = cFeatureDefinition
)
# == LEARNER CONFIGURATION ===
# setting the learner configuration, in a standard way
# (from command line options, or from a JSON configuration file)
dLearnerConfig = doer.getStandardLearnerConfig(options)
# # force a balanced weighting
# print("Forcing balanced weights")
dLearnerConfig['balanced'] = True
# of course, you can put yours here instead.
doer.setLearnerConfiguration(dLearnerConfig)
doer.setAdditionalDataProvider(getDataToPickle_for_table)
# === CONJUGATE MODE ===
#doer.setConjugateMode()
# === GO!! ===
# act as per specified in the command line (--trn , --fold-run, ...)
doer.standardDo(options)
del doer
# ----------------------------------------------------------------------------
if __name__ == "__main__":
# import better_exceptions
# better_exceptions.MAX_LENGTH = None
main(sys.argv[0], "type", My_NodeType)
|
adelton/django | refs/heads/master | django/contrib/contenttypes/migrations/0001_initial.py | 585 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.contenttypes.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='ContentType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('app_label', models.CharField(max_length=100)),
('model', models.CharField(max_length=100, verbose_name='python model class name')),
],
options={
'ordering': ('name',),
'db_table': 'django_content_type',
'verbose_name': 'content type',
'verbose_name_plural': 'content types',
},
bases=(models.Model,),
managers=[
('objects', django.contrib.contenttypes.models.ContentTypeManager()),
],
),
migrations.AlterUniqueTogether(
name='contenttype',
unique_together=set([('app_label', 'model')]),
),
]
|
vmax-feihu/hue | refs/heads/master | desktop/core/ext-py/Django-1.6.10/tests/staticfiles_tests/models.py | 12133432 | |
smi96/django-blog_website | refs/heads/master | lib/python2.7/site-packages/django/conf/locale/fr/__init__.py | 12133432 | |
civisanalytics/ansible | refs/heads/civis | lib/ansible/module_utils/ismount.py | 298 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is based on
# Lib/posixpath.py of cpython
# It is licensed under the PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
#
# 1. This LICENSE AGREEMENT is between the Python Software Foundation
# ("PSF"), and the Individual or Organization ("Licensee") accessing and
# otherwise using this software ("Python") in source or binary form and
# its associated documentation.
#
# 2. Subject to the terms and conditions of this License Agreement, PSF hereby
# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
# analyze, test, perform and/or display publicly, prepare derivative works,
# distribute, and otherwise use Python alone or in any derivative version,
# provided, however, that PSF's License Agreement and PSF's notice of copyright,
# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
# 2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved"
# are retained in Python alone or in any derivative version prepared by Licensee.
#
# 3. In the event Licensee prepares a derivative work that is based on
# or incorporates Python or any part thereof, and wants to make
# the derivative work available to others as provided herein, then
# Licensee hereby agrees to include in any such work a brief summary of
# the changes made to Python.
#
# 4. PSF is making Python available to Licensee on an "AS IS"
# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
# INFRINGE ANY THIRD PARTY RIGHTS.
#
# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
#
# 6. This License Agreement will automatically terminate upon a material
# breach of its terms and conditions.
#
# 7. Nothing in this License Agreement shall be deemed to create any
# relationship of agency, partnership, or joint venture between PSF and
# Licensee. This License Agreement does not grant permission to use PSF
# trademarks or trade name in a trademark sense to endorse or promote
# products or services of Licensee, or any third party.
#
# 8. By copying, installing or otherwise using Python, Licensee
# agrees to be bound by the terms and conditions of this License
# Agreement.
import os
def ismount(path):
"""Test whether a path is a mount point
clone of os.path.ismount (from cpython Lib/posixpath.py)
fixed to solve https://github.com/ansible/ansible-modules-core/issues/2186
and workaround non-fixed http://bugs.python.org/issue2466
this should be rewritten as soon as python issue 2466 is fixed
probably check for python version and use os.path.ismount if fixed
to remove replace in this file ismount( -> os.path.ismount( and remove this
function"""
try:
s1 = os.lstat(path)
except OSError:
# the OSError should be handled with more care
# it could be a "permission denied" but path is still a mount
return False
else:
# A symlink can never be a mount point
if os.path.stat.S_ISLNK(s1.st_mode):
return False
parent = os.path.join(path, os.path.pardir)
parent = os.path.realpath(parent)
try:
s2 = os.lstat(parent)
except OSError:
# one should handle the returned OSError with more care to figure
# out whether this is still a mount
return False
if s1.st_dev != s2.st_dev:
return True # path/.. on a different device as path
if s1.st_ino == s2.st_ino:
return True # path/.. is the same i-node as path, i.e. path=='/'
return False
|
AllanNozomu/tecsaladeaula | refs/heads/master | core/migrations/0020_auto__del_field_unit_activity.py | 2 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Unit.activity'
db.delete_column(u'core_unit', 'activity_id')
def backwards(self, orm):
# Adding field 'Unit.activity'
db.add_column(u'core_unit', 'activity',
self.gf('django.db.models.fields.related.ForeignKey')(related_name='units', null=True, to=orm['activities.Activity'], blank=True),
keep_default=False)
models = {
u'accounts.timtecuser': {
'Meta': {'object_name': 'TimtecUser'},
'accepted_terms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'picture': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'site': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'core.class': {
'Meta': {'object_name': 'Class'},
'assistant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'professor_classes'", 'to': u"orm['accounts.TimtecUser']"}),
'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Course']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'students': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'classes'", 'blank': 'True', 'to': u"orm['accounts.TimtecUser']"})
},
u'core.course': {
'Meta': {'object_name': 'Course'},
'abstract': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'application': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'home_position': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'home_published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'home_thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro_video': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Video']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'professors': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'professorcourse_set'", 'symmetrical': 'False', 'through': u"orm['core.CourseProfessor']", 'to': u"orm['accounts.TimtecUser']"}),
'pronatec': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'publication': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'requirement': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'start_date': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '64'}),
'structure': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'students': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'studentcourse_set'", 'symmetrical': 'False', 'through': u"orm['core.CourseStudent']", 'to': u"orm['accounts.TimtecUser']"}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'workload': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
u'core.courseprofessor': {
'Meta': {'unique_together': "(('user', 'course'),)", 'object_name': 'CourseProfessor'},
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Course']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'default': "'assistant'", 'max_length': '128'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.TimtecUser']"})
},
u'core.coursestudent': {
'Meta': {'unique_together': "(('user', 'course'),)", 'object_name': 'CourseStudent'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Course']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.TimtecUser']"})
},
u'core.emailtemplate': {
'Meta': {'object_name': 'EmailTemplate'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'template': ('django.db.models.fields.TextField', [], {})
},
u'core.lesson': {
'Meta': {'ordering': "['position']", 'object_name': 'Lesson'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lessons'", 'to': u"orm['core.Course']"}),
'desc': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '255', 'populate_from': "'name'", 'unique_with': '()'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'draft'", 'max_length': '64'})
},
u'core.professormessage': {
'Meta': {'object_name': 'ProfessorMessage'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Course']", 'null': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'professor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.TimtecUser']"}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'messages'", 'symmetrical': 'False', 'to': u"orm['accounts.TimtecUser']"})
},
u'core.studentprogress': {
'Meta': {'unique_together': "(('user', 'unit'),)", 'object_name': 'StudentProgress'},
'complete': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_access': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'progress'", 'to': u"orm['core.Unit']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.TimtecUser']"})
},
u'core.unit': {
'Meta': {'ordering': "['lesson', 'position']", 'object_name': 'Unit'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lesson': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'units'", 'to': u"orm['core.Lesson']"}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'side_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Video']", 'null': 'True', 'blank': 'True'})
},
u'core.video': {
'Meta': {'object_name': 'Video'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'youtube_id': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['core'] |
petteyg/intellij-community | refs/heads/master | python/testData/inspections/PyMethodOverridingInspection/LessArgumentsPlusDefaults.py | 74 | class B:
def foo(self, arg1, arg2=None):
pass
class C(B):
def foo<warning descr="Signature of method 'C.foo()' does not match signature of base method in class 'B'">(self, arg1=None)</warning>: #fail
pass
|
sorz/sstp-server | refs/heads/master | sstpd/__init__.py | 1 | """A Secure Socket Tunneling Protocol (SSTP) server.
https://github.com/sorz/sstp-server
"""
__version__ = '0.6.0'
def run():
from .__main__ import main
main()
|
mlperf/training_results_v0.5 | refs/heads/master | v0.5.0/google/cloud_v3.8/resnet-tpuv3-8/code/resnet/model/tpu/models/experimental/qanet/preprocess.py | 5 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Preprocess data into TFRecords and construct pretrained embedding set.
If embedding_path is provided, then also filter down the vocab to only words
present in the dataset.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
from six import string_types
from six import text_type
import tensorflow as tf
import data
flags.DEFINE_string('input_path', '', 'Comma separated path to JSON files.')
flags.DEFINE_integer('max_shard_size', 11000, 'Number of examples per shard.')
flags.DEFINE_string('output_path', '/tmp', 'TFRecord path/name prefix. ')
flags.DEFINE_string('embedding_path', '', 'Path to embeddings in GLOVE format.')
FLAGS = flags.FLAGS
def get_tf_example(example):
"""Get `tf.train.Example` object from example dict.
Args:
example: tokenized, indexed example.
Returns:
`tf.train.Example` object corresponding to the example.
Raises:
ValueError: if a key in `example` is invalid.
"""
feature = {}
for key, val in sorted(example.items()):
if not isinstance(val, list):
val = [val]
if isinstance(val[0], string_types):
dtype = 'bytes'
elif isinstance(val[0], int):
dtype = 'int64'
else:
raise TypeError('`%s` has an invalid type: %r' % (key, type(val[0])))
if dtype == 'bytes':
# Transform unicode into bytes if necessary.
if isinstance(val[0], text_type):
val = [each.encode('utf-8') for each in val]
feature[key] = tf.train.Feature(bytes_list=tf.train.BytesList(value=val))
elif dtype == 'int64':
feature[key] = tf.train.Feature(int64_list=tf.train.Int64List(value=val))
else:
raise TypeError('`%s` has an invalid type: %r' % (key, type(val[0])))
return tf.train.Example(features=tf.train.Features(feature=feature))
def write_as_tf_records(out_dir, name, examples):
"""Dumps examples as TFRecord files.
Args:
out_dir: Output directory.
name: Name of this split.
examples: a `list` of `dict`, where each dict is indexed example.
"""
tf.gfile.MakeDirs(out_dir)
writer = None
counter = 0
num_shards = 0
for example in examples:
if writer is None:
path = os.path.join(
out_dir, '{name}_{shards}.tfrecord'.format(
name=name, shards=str(num_shards).zfill(4)))
writer = tf.python_io.TFRecordWriter(path)
tf_example = get_tf_example(example)
writer.write(tf_example.SerializeToString())
counter += 1
if counter == FLAGS.max_shard_size:
counter = 0
writer.close()
writer = None
num_shards += 1
if writer is not None:
writer.close()
def main(argv):
del argv # Unused.
paths = FLAGS.input_path.split(',')
tf.logging.info('Loading data from: %s', paths)
vocab = set()
for path in paths:
_, name = os.path.split(path)
tf.logging.info(name)
if '-v1.1.json' not in name:
raise ValueError('Input must be named <split_name>-v1.1.json')
name = name.split('-')[0]
generator = data.squad_generator(path=path)
examples = list(generator)
write_as_tf_records(FLAGS.output_path, name, examples)
for example in examples:
for k in ['question_tokens', 'context_tokens']:
for word in example[k]:
# The decode to utf-8 is important to ensure the comparisons occur
# properly when we filter below.
vocab.add(word.decode('utf-8'))
del examples
if FLAGS.embedding_path:
tf.logging.info('Filtering down embeddings from: %s' % FLAGS.embedding_path)
filtered = data.get_embedding_map(FLAGS.embedding_path, word_subset=vocab)
ordered = []
if 'UNK' not in filtered:
# We add a fixed UNK token to the vocab consisting of all zeros.
# Get the embedding size by looking at one of the embeddings we already
# have.
embed_size = len(filtered[filtered.keys()[0]])
ordered.append(('UNK', [0.0] * embed_size))
else:
ordered.append(('UNK', filtered['UNK']))
del filtered['UNK']
for k, v in filtered.iteritems():
ordered.append((k, v))
tf.logging.info('Vocab filtered to %s tokens.' % len(filtered))
tf.logging.info('Writing out vocab.')
with tf.gfile.Open(os.path.join(FLAGS.output_path, 'vocab.vec'), 'w') as f:
for k, v in ordered:
f.write('%s %s\n' % (k, ' '.join(str(x) for x in v)))
if __name__ == '__main__':
app.run(main)
|
guorendong/iridium-browser-ubuntu | refs/heads/ubuntu/precise | build/android/pylib/instrumentation/test_result.py | 87 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from pylib.base import base_test_result
class InstrumentationTestResult(base_test_result.BaseTestResult):
"""Result information for a single instrumentation test."""
def __init__(self, full_name, test_type, start_date, dur, log=''):
"""Construct an InstrumentationTestResult object.
Args:
full_name: Full name of the test.
test_type: Type of the test result as defined in ResultType.
start_date: Date in milliseconds when the test began running.
dur: Duration of the test run in milliseconds.
log: A string listing any errors.
"""
super(InstrumentationTestResult, self).__init__(
full_name, test_type, dur, log)
name_pieces = full_name.rsplit('#')
if len(name_pieces) > 1:
self._test_name = name_pieces[1]
self._class_name = name_pieces[0]
else:
self._class_name = full_name
self._test_name = full_name
self._start_date = start_date
|
rafalo1333/kivy | refs/heads/master | kivy/modules/keybinding.py | 81 | '''Keybinding
==========
This module forces the mapping of some keys to functions:
* F11: Rotate the Window through 0, 90, 180 and 270 degrees
* Shift + F11: Switches between portrait and landscape on desktops
* F12: Take a screenshot
Note: this does't work if the application requests the keyboard beforehand.
Usage
-----
For normal module usage, please see the :mod:`~kivy.modules` documentation.
The Keybinding module, however, can also be imported and used just
like a normal python module. This has the added advantage of being
able to activate and deactivate the module programmatically::
from kivy.app import App
from kivy.uix.button import Button
from kivy.modules import keybinding
from kivy.core.window import Window
class Demo(App):
def build(self):
button = Button(text="Hello")
keybinding.start(Window, button)
return button
Demo().run()
To remove the Keybinding, you can do the following::
Keybinding.stop(Window, button)
'''
from kivy.utils import platform
__all__ = ('start', 'stop')
def _on_keyboard_handler(instance, key, scancode, codepoint, modifiers):
if key == 293 and modifiers == []: # F12
instance.screenshot()
elif key == 292 and modifiers == []: # F11
instance.rotation += 90
elif key == 292 and modifiers == ['shift']: # Shift + F11
if platform in ('win', 'linux', 'macosx'):
instance.rotation = 0
w, h = instance.size
w, h = h, w
instance.size = (w, h)
def start(win, ctx):
win.bind(on_keyboard=_on_keyboard_handler)
def stop(win, ctx):
win.unbind(on_keyboard=_on_keyboard_handler)
|
imply/chuu | refs/heads/master | ppapi/native_client/src/tools/srpcgen.py | 79 | #!/usr/bin/env python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Build "SRPC" interfaces from specifications.
SRPC interfaces consist of one or more interface classes, typically defined
in a set of .srpc files. The specifications are Python dictionaries, with a
top level 'name' element and an 'rpcs' element. The rpcs element is a list
containing a number of rpc methods, each of which has a 'name', an 'inputs',
and an 'outputs' element. These elements are lists of input or output
parameters, which are lists pairs containing a name and type. The set of
types includes all the SRPC basic types.
These SRPC specifications are used to generate a header file and either a
server or client stub file, as determined by the command line flag -s or -c.
"""
import getopt
import sys
import os
COPYRIGHT_AND_AUTOGEN_COMMENT = """\
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING
//
// Automatically generated code. See srpcgen.py
//
// NaCl Simple Remote Procedure Call interface abstractions.
"""
HEADER_INCLUDE_GUARD_START = """\
#ifndef %(include_guard)s
#define %(include_guard)s
"""
HEADER_INCLUDE_GUARD_END = """\
\n\n#endif // %(include_guard)s
"""
HEADER_FILE_INCLUDES = """\
#ifndef __native_client__
#include "native_client/src/include/portability.h"
#endif // __native_client__
%(EXTRA_INCLUDES)s
"""
SOURCE_FILE_INCLUDES = """\
#include "%(srpcgen_h)s"
#ifdef __native_client__
#ifndef UNREFERENCED_PARAMETER
#define UNREFERENCED_PARAMETER(P) do { (void) P; } while (0)
#endif // UNREFERENCED_PARAMETER
#else
#include "native_client/src/include/portability.h"
#endif // __native_client__
%(EXTRA_INCLUDES)s
"""
# For both .cc and .h files.
EXTRA_INCLUDES = [
'#include "native_client/src/shared/srpc/nacl_srpc.h"',
]
types = {'bool': ['b', 'bool', 'u.bval', ''],
'char[]': ['C', 'char*', 'arrays.carr', 'u.count'],
'double': ['d', 'double', 'u.dval', ''],
'double[]': ['D', 'double*', 'arrays.darr', 'u.count'],
'handle': ['h', 'NaClSrpcImcDescType', 'u.hval', ''],
'int32_t': ['i', 'int32_t', 'u.ival', ''],
'int32_t[]': ['I', 'int32_t*', 'arrays.iarr', 'u.count'],
'int64_t': ['l', 'int64_t', 'u.lval', ''],
'int64_t[]': ['L', 'int64_t', 'arrays.larr', 'u.count'],
'PP_Instance': ['i', 'PP_Instance', 'u.ival', ''],
'PP_Module': ['i', 'PP_Module', 'u.ival', ''],
'PP_Resource': ['i', 'PP_Resource', 'u.ival', ''],
'string': ['s', 'const char*', 'arrays.str', ''],
}
def AddInclude(name):
"""Adds an include to the include section of both .cc and .h files."""
EXTRA_INCLUDES.append('#include "%s"' % name)
def HeaderFileIncludes():
"""Includes are sorted alphabetically."""
EXTRA_INCLUDES.sort()
return HEADER_FILE_INCLUDES % {
'EXTRA_INCLUDES': '\n'.join(EXTRA_INCLUDES),
}
def SourceFileIncludes(srpcgen_h_file):
"""Includes are sorted alphabetically."""
EXTRA_INCLUDES.sort()
return SOURCE_FILE_INCLUDES % {
'EXTRA_INCLUDES': '\n'.join(EXTRA_INCLUDES),
'srpcgen_h': srpcgen_h_file
}
def PrintHeaderFileTop(output, include_guard):
"""Prints the header of the .h file including copyright,
header comment, include guard and includes."""
print >>output, COPYRIGHT_AND_AUTOGEN_COMMENT
print >>output, HEADER_INCLUDE_GUARD_START % {'include_guard': include_guard}
print >>output, HeaderFileIncludes()
def PrintHeaderFileBottom(output, include_guard):
"""Prints the footer of the .h file including copyright,
header comment, include guard and includes."""
print >>output, HEADER_INCLUDE_GUARD_END % {'include_guard': include_guard}
def PrintSourceFileTop(output, srpcgen_h_file):
"""Prints the header of the .cc file including copyright,
header comment and includes."""
print >>output, COPYRIGHT_AND_AUTOGEN_COMMENT
print >>output, SourceFileIncludes(srpcgen_h_file)
def CountName(name):
"""Returns the name of the auxiliary count member used for array typed."""
return '%s_bytes' % name
def FormatRpcPrototype(is_server, class_name, indent, rpc):
"""Returns a string for the prototype of an individual RPC."""
def FormatArgs(is_output, args):
"""Returns a string containing the formatted arguments for an RPC."""
def FormatArg(is_output, arg):
"""Returns a string containing a formatted argument to an RPC."""
if is_output:
suffix = '* '
else:
suffix = ' '
s = ''
type_info = types[arg[1]]
if type_info[3]:
s += 'nacl_abi_size_t%s%s, %s %s' % (suffix,
CountName(arg[0]),
type_info[1],
arg[0])
else:
s += '%s%s%s' % (type_info[1], suffix, arg[0])
return s
s = ''
for arg in args:
s += ',\n %s%s' % (indent, FormatArg(is_output, arg))
return s
if is_server:
ret_type = 'void'
else:
ret_type = 'NaClSrpcError'
s = '%s %s%s(\n' % (ret_type, class_name, rpc['name'])
# Until SRPC uses RPC/Closure on the client side, these must be different.
if is_server:
s += ' %sNaClSrpcRpc* rpc,\n' % indent
s += ' %sNaClSrpcClosure* done' % indent
else:
s += ' %sNaClSrpcChannel* channel' % indent
s += '%s' % FormatArgs(False, rpc['inputs'])
s += '%s' % FormatArgs(True, rpc['outputs'])
s += ')'
return s
def PrintHeaderFile(output, is_server, guard_name, interface_name, specs):
"""Prints out the header file containing the prototypes for the RPCs."""
PrintHeaderFileTop(output, guard_name)
s = ''
# iterate over all the specified interfaces
if is_server:
suffix = 'Server'
else:
suffix = 'Client'
for spec in specs:
class_name = spec['name'] + suffix
rpcs = spec['rpcs']
s += 'class %s {\n public:\n' % class_name
for rpc in rpcs:
s += ' static %s;\n' % FormatRpcPrototype(is_server, '', ' ', rpc)
s += '\n private:\n %s();\n' % class_name
s += ' %s(const %s&);\n' % (class_name, class_name)
s += ' void operator=(const %s);\n' % class_name
s += '}; // class %s\n\n' % class_name
if is_server:
s += 'class %s {\n' % interface_name
s += ' public:\n'
s += ' static NaClSrpcHandlerDesc srpc_methods[];\n'
s += '}; // class %s' % interface_name
print >>output, s
PrintHeaderFileBottom(output, guard_name)
def PrintServerFile(output, header_name, interface_name, specs):
"""Print the server (stub) .cc file."""
def FormatDispatchPrototype(indent, rpc):
"""Format the prototype of a dispatcher method."""
s = '%sstatic void %sDispatcher(\n' % (indent, rpc['name'])
s += '%s NaClSrpcRpc* rpc,\n' % indent
s += '%s NaClSrpcArg** inputs,\n' % indent
s += '%s NaClSrpcArg** outputs,\n' % indent
s += '%s NaClSrpcClosure* done\n' % indent
s += '%s)' % indent
return s
def FormatMethodString(rpc):
"""Format the SRPC text string for a single rpc method."""
def FormatTypes(args):
s = ''
for arg in args:
s += types[arg[1]][0]
return s
s = ' { "%s:%s:%s", %sDispatcher },\n' % (rpc['name'],
FormatTypes(rpc['inputs']),
FormatTypes(rpc['outputs']),
rpc['name'])
return s
def FormatCall(class_name, indent, rpc):
"""Format a call from a dispatcher method to its stub."""
def FormatArgs(is_output, args):
"""Format the arguments passed to the stub."""
def FormatArg(is_output, num, arg):
"""Format an argument passed to a stub."""
if is_output:
prefix = 'outputs[' + str(num) + ']->'
addr_prefix = '&('
addr_suffix = ')'
else:
prefix = 'inputs[' + str(num) + ']->'
addr_prefix = ''
addr_suffix = ''
type_info = types[arg[1]]
if type_info[3]:
s = '%s%s%s%s, %s%s' % (addr_prefix,
prefix,
type_info[3],
addr_suffix,
prefix,
type_info[2])
else:
s = '%s%s%s%s' % (addr_prefix, prefix, type_info[2], addr_suffix)
return s
# end FormatArg
s = ''
num = 0
for arg in args:
s += ',\n%s %s' % (indent, FormatArg(is_output, num, arg))
num += 1
return s
# end FormatArgs
s = '%s::%s(\n%s rpc,\n' % (class_name, rpc['name'], indent)
s += '%s done' % indent
s += FormatArgs(False, rpc['inputs'])
s += FormatArgs(True, rpc['outputs'])
s += '\n%s)' % indent
return s
# end FormatCall
PrintSourceFileTop(output, header_name)
s = 'namespace {\n\n'
for spec in specs:
class_name = spec['name'] + 'Server'
rpcs = spec['rpcs']
for rpc in rpcs:
s += '%s {\n' % FormatDispatchPrototype('', rpc)
if rpc['inputs'] == []:
s += ' UNREFERENCED_PARAMETER(inputs);\n'
if rpc['outputs'] == []:
s += ' UNREFERENCED_PARAMETER(outputs);\n'
s += ' %s;\n' % FormatCall(class_name, ' ', rpc)
s += '}\n\n'
s += '} // namespace\n\n'
s += 'NaClSrpcHandlerDesc %s::srpc_methods[] = {\n' % interface_name
for spec in specs:
class_name = spec['name'] + 'Server'
rpcs = spec['rpcs']
for rpc in rpcs:
s += FormatMethodString(rpc)
s += ' { NULL, NULL }\n};\n'
print >>output, s
def PrintClientFile(output, header_name, specs, thread_check):
"""Prints the client (proxy) .cc file."""
def InstanceInputArg(rpc):
"""Returns the name of the PP_Instance arg or None if there is none."""
for arg in rpc['inputs']:
if arg[1] == 'PP_Instance':
return arg[0]
return None
def DeadNexeHandling(rpc, retval):
"""Generates the code necessary to handle death of a nexe during the rpc
call. This is only possible if PP_Instance arg is present, otherwise"""
instance = InstanceInputArg(rpc);
if instance is not None:
check = (' if (%s == NACL_SRPC_RESULT_INTERNAL)\n'
' ppapi_proxy::CleanUpAfterDeadNexe(%s);\n')
return check % (retval, instance)
return '' # No handling
def FormatCall(rpc):
"""Format a call to the generic dispatcher, NaClSrpcInvokeBySignature."""
def FormatTypes(args):
"""Format a the type signature string for either inputs or outputs."""
s = ''
for arg in args:
s += types[arg[1]][0]
return s
def FormatArgs(args):
"""Format the arguments for the call to the generic dispatcher."""
def FormatArg(arg):
"""Format a single argument for the call to the generic dispatcher."""
s = ''
type_info = types[arg[1]]
if type_info[3]:
s += '%s, ' % CountName(arg[0])
s += arg[0]
return s
# end FormatArg
s = ''
for arg in args:
s += ',\n %s' % FormatArg(arg)
return s
#end FormatArgs
s = '(\n channel,\n "%s:%s:%s"' % (rpc['name'],
FormatTypes(rpc['inputs']),
FormatTypes(rpc['outputs']))
s += FormatArgs(rpc['inputs'])
s += FormatArgs(rpc['outputs']) + '\n )'
return s
# end FormatCall
# We need this to handle dead nexes.
if header_name.startswith('trusted'):
AddInclude('native_client/src/shared/ppapi_proxy/browser_globals.h')
if thread_check:
AddInclude('native_client/src/shared/ppapi_proxy/plugin_globals.h')
AddInclude('ppapi/c/ppb_core.h')
AddInclude('native_client/src/shared/platform/nacl_check.h')
PrintSourceFileTop(output, header_name)
s = ''
for spec in specs:
class_name = spec['name'] + 'Client'
rpcs = spec['rpcs']
for rpc in rpcs:
s += '%s {\n' % FormatRpcPrototype('', class_name + '::', '', rpc)
if thread_check and rpc['name'] not in ['PPB_GetInterface',
'PPB_Core_CallOnMainThread']:
error = '"%s: PPAPI calls are not supported off the main thread\\n"'
s += ' VCHECK(ppapi_proxy::PPBCoreInterface()->IsMainThread(),\n'
s += ' (%s,\n' % error
s += ' __FUNCTION__));\n'
s += ' NaClSrpcError retval;\n'
s += ' retval = NaClSrpcInvokeBySignature%s;\n' % FormatCall(rpc)
if header_name.startswith('trusted'):
s += DeadNexeHandling(rpc, 'retval')
s += ' return retval;\n'
s += '}\n\n'
print >>output, s
def MakePath(name):
paths = name.split(os.sep)
path = os.sep.join(paths[:-1])
try:
os.makedirs(path)
except OSError:
return
def main(argv):
usage = 'Usage: srpcgen.py <-c | -s> [--include=<name>] [--ppapi]'
usage = usage + ' <iname> <gname> <.h> <.cc> <specs>'
mode = None
ppapi = False
thread_check = False
try:
long_opts = ['include=', 'ppapi', 'thread-check']
opts, pargs = getopt.getopt(argv[1:], 'cs', long_opts)
except getopt.error, e:
print >>sys.stderr, 'Illegal option:', str(e)
print >>sys.stderr, usage
return 1
# Get the class name for the interface.
interface_name = pargs[0]
# Get the name for the token used as a multiple inclusion guard in the header.
include_guard_name = pargs[1]
# Get the name of the header file to be generated.
h_file_name = pargs[2]
MakePath(h_file_name)
# Note we open output files in binary mode so that on Windows the files
# will always get LF line-endings rather than CRLF.
h_file = open(h_file_name, 'wb')
# Get the name of the source file to be generated. Depending upon whether
# -c or -s is generated, this file contains either client or server methods.
cc_file_name = pargs[3]
MakePath(cc_file_name)
cc_file = open(cc_file_name, 'wb')
# The remaining arguments are the spec files to be compiled.
spec_files = pargs[4:]
for opt, val in opts:
if opt == '-c':
mode = 'client'
elif opt == '-s':
mode = 'server'
elif opt == '--include':
h_file_name = val
elif opt == '--ppapi':
ppapi = True
elif opt == '--thread-check':
thread_check = True
if ppapi:
AddInclude("ppapi/c/pp_instance.h")
AddInclude("ppapi/c/pp_module.h")
AddInclude("ppapi/c/pp_resource.h")
# Convert to forward slash paths if needed
h_file_name = "/".join(h_file_name.split("\\"))
# Verify we picked server or client mode
if not mode:
print >>sys.stderr, 'Neither -c nor -s specified'
usage()
return 1
# Combine the rpc specs from spec_files into rpcs.
specs = []
for spec_file in spec_files:
code_obj = compile(open(spec_file, 'r').read(), 'file', 'eval')
specs.append(eval(code_obj))
# Print out the requested files.
if mode == 'client':
PrintHeaderFile(h_file, False, include_guard_name, interface_name, specs)
PrintClientFile(cc_file, h_file_name, specs, thread_check)
elif mode == 'server':
PrintHeaderFile(h_file, True, include_guard_name, interface_name, specs)
PrintServerFile(cc_file, h_file_name, interface_name, specs)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
ceph/gtest | refs/heads/master | scripts/gen_gtest_pred_impl.py | 2538 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""gen_gtest_pred_impl.py v0.1
Generates the implementation of Google Test predicate assertions and
accompanying tests.
Usage:
gen_gtest_pred_impl.py MAX_ARITY
where MAX_ARITY is a positive integer.
The command generates the implementation of up-to MAX_ARITY-ary
predicate assertions, and writes it to file gtest_pred_impl.h in the
directory where the script is. It also generates the accompanying
unit test in file gtest_pred_impl_unittest.cc.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import sys
import time
# Where this script is.
SCRIPT_DIR = os.path.dirname(sys.argv[0])
# Where to store the generated header.
HEADER = os.path.join(SCRIPT_DIR, '../include/gtest/gtest_pred_impl.h')
# Where to store the generated unit test.
UNIT_TEST = os.path.join(SCRIPT_DIR, '../test/gtest_pred_impl_unittest.cc')
def HeaderPreamble(n):
"""Returns the preamble for the header file.
Args:
n: the maximum arity of the predicate macros to be generated.
"""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), n),
'n' : n
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
//
// Implements a family of generic predicate assertion macros.
#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
// Makes sure this header is not included before gtest.h.
#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
# error Do not include gtest_pred_impl.h directly. Include gtest.h instead.
#endif // GTEST_INCLUDE_GTEST_GTEST_H_
// This header implements a family of generic predicate assertion
// macros:
//
// ASSERT_PRED_FORMAT1(pred_format, v1)
// ASSERT_PRED_FORMAT2(pred_format, v1, v2)
// ...
//
// where pred_format is a function or functor that takes n (in the
// case of ASSERT_PRED_FORMATn) values and their source expression
// text, and returns a testing::AssertionResult. See the definition
// of ASSERT_EQ in gtest.h for an example.
//
// If you don't care about formatting, you can use the more
// restrictive version:
//
// ASSERT_PRED1(pred, v1)
// ASSERT_PRED2(pred, v1, v2)
// ...
//
// where pred is an n-ary function or functor that returns bool,
// and the values v1, v2, ..., must support the << operator for
// streaming to std::ostream.
//
// We also define the EXPECT_* variations.
//
// For now we only support predicates whose arity is at most %(n)s.
// Please email googletestframework@googlegroups.com if you need
// support for higher arities.
// GTEST_ASSERT_ is the basic statement to which all of the assertions
// in this file reduce. Don't use this in your code.
#define GTEST_ASSERT_(expression, on_failure) \\
GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\
if (const ::testing::AssertionResult gtest_ar = (expression)) \\
; \\
else \\
on_failure(gtest_ar.failure_message())
""" % DEFS)
def Arity(n):
"""Returns the English name of the given arity."""
if n < 0:
return None
elif n <= 3:
return ['nullary', 'unary', 'binary', 'ternary'][n]
else:
return '%s-ary' % n
def Title(word):
"""Returns the given word in title case. The difference between
this and string's title() method is that Title('4-ary') is '4-ary'
while '4-ary'.title() is '4-Ary'."""
return word[0].upper() + word[1:]
def OneTo(n):
"""Returns the list [1, 2, 3, ..., n]."""
return range(1, n + 1)
def Iter(n, format, sep=''):
"""Given a positive integer n, a format string that contains 0 or
more '%s' format specs, and optionally a separator string, returns
the join of n strings, each formatted with the format string on an
iterator ranged from 1 to n.
Example:
Iter(3, 'v%s', sep=', ') returns 'v1, v2, v3'.
"""
# How many '%s' specs are in format?
spec_count = len(format.split('%s')) - 1
return sep.join([format % (spec_count * (i,)) for i in OneTo(n)])
def ImplementationForArity(n):
"""Returns the implementation of n-ary predicate assertions."""
# A map the defines the values used in the implementation template.
DEFS = {
'n' : str(n),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'arity' : Arity(n),
'Arity' : Title(Arity(n))
}
impl = """
// Helper function for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
template <typename Pred""" % DEFS
impl += Iter(n, """,
typename T%s""")
impl += """>
AssertionResult AssertPred%(n)sHelper(const char* pred_text""" % DEFS
impl += Iter(n, """,
const char* e%s""")
impl += """,
Pred pred"""
impl += Iter(n, """,
const T%s& v%s""")
impl += """) {
if (pred(%(vs)s)) return AssertionSuccess();
""" % DEFS
impl += ' return AssertionFailure() << pred_text << "("'
impl += Iter(n, """
<< e%s""", sep=' << ", "')
impl += ' << ") evaluates to false, where"'
impl += Iter(n, """
<< "\\n" << e%s << " evaluates to " << v%s""")
impl += """;
}
// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
// Don't use this in your code.
#define GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, on_failure)\\
GTEST_ASSERT_(pred_format(%(vts)s, %(vs)s), \\
on_failure)
// Internal macro for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
#define GTEST_PRED%(n)s_(pred, %(vs)s, on_failure)\\
GTEST_ASSERT_(::testing::AssertPred%(n)sHelper(#pred""" % DEFS
impl += Iter(n, """, \\
#v%s""")
impl += """, \\
pred"""
impl += Iter(n, """, \\
v%s""")
impl += """), on_failure)
// %(Arity)s predicate assertion macros.
#define EXPECT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define EXPECT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define ASSERT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_FATAL_FAILURE_)
#define ASSERT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_FATAL_FAILURE_)
""" % DEFS
return impl
def HeaderPostamble():
"""Returns the postamble for the header file."""
return """
#endif // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
"""
def GenerateFile(path, content):
"""Given a file path and a content string, overwrites it with the
given content."""
print 'Updating file %s . . .' % path
f = file(path, 'w+')
print >>f, content,
f.close()
print 'File %s has been updated.' % path
def GenerateHeader(n):
"""Given the maximum arity n, updates the header file that implements
the predicate assertions."""
GenerateFile(HEADER,
HeaderPreamble(n)
+ ''.join([ImplementationForArity(i) for i in OneTo(n)])
+ HeaderPostamble())
def UnitTestPreamble():
"""Returns the preamble for the unit test file."""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), sys.argv[1]),
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
// Regression test for gtest_pred_impl.h
//
// This file is generated by a script and quite long. If you intend to
// learn how Google Test works by reading its unit tests, read
// gtest_unittest.cc instead.
//
// This is intended as a regression test for the Google Test predicate
// assertions. We compile it as part of the gtest_unittest target
// only to keep the implementation tidy and compact, as it is quite
// involved to set up the stage for testing Google Test using Google
// Test itself.
//
// Currently, gtest_unittest takes ~11 seconds to run in the testing
// daemon. In the future, if it grows too large and needs much more
// time to finish, we should consider separating this file into a
// stand-alone regression test.
#include <iostream>
#include "gtest/gtest.h"
#include "gtest/gtest-spi.h"
// A user-defined data type.
struct Bool {
explicit Bool(int val) : value(val != 0) {}
bool operator>(int n) const { return value > Bool(n).value; }
Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }
bool operator==(const Bool& rhs) const { return value == rhs.value; }
bool value;
};
// Enables Bool to be used in assertions.
std::ostream& operator<<(std::ostream& os, const Bool& x) {
return os << (x.value ? "true" : "false");
}
""" % DEFS)
def TestsForArity(n):
"""Returns the tests for n-ary predicate assertions."""
# A map that defines the values used in the template for the tests.
DEFS = {
'n' : n,
'es' : Iter(n, 'e%s', sep=', '),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'tvs' : Iter(n, 'T%s v%s', sep=', '),
'int_vs' : Iter(n, 'int v%s', sep=', '),
'Bool_vs' : Iter(n, 'Bool v%s', sep=', '),
'types' : Iter(n, 'typename T%s', sep=', '),
'v_sum' : Iter(n, 'v%s', sep=' + '),
'arity' : Arity(n),
'Arity' : Title(Arity(n)),
}
tests = (
"""// Sample functions/functors for testing %(arity)s predicate assertions.
// A %(arity)s predicate function.
template <%(types)s>
bool PredFunction%(n)s(%(tvs)s) {
return %(v_sum)s > 0;
}
// The following two functions are needed to circumvent a bug in
// gcc 2.95.3, which sometimes has problem with the above template
// function.
bool PredFunction%(n)sInt(%(int_vs)s) {
return %(v_sum)s > 0;
}
bool PredFunction%(n)sBool(%(Bool_vs)s) {
return %(v_sum)s > 0;
}
""" % DEFS)
tests += """
// A %(arity)s predicate functor.
struct PredFunctor%(n)s {
template <%(types)s>
bool operator()(""" % DEFS
tests += Iter(n, 'const T%s& v%s', sep=""",
""")
tests += """) {
return %(v_sum)s > 0;
}
};
""" % DEFS
tests += """
// A %(arity)s predicate-formatter function.
template <%(types)s>
testing::AssertionResult PredFormatFunction%(n)s(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) {
if (PredFunction%(n)s(%(vs)s))
return testing::AssertionSuccess();
return testing::AssertionFailure()
<< """ % DEFS
tests += Iter(n, 'e%s', sep=' << " + " << ')
tests += """
<< " is expected to be positive, but evaluates to "
<< %(v_sum)s << ".";
}
""" % DEFS
tests += """
// A %(arity)s predicate-formatter functor.
struct PredFormatFunctor%(n)s {
template <%(types)s>
testing::AssertionResult operator()(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) const {
return PredFormatFunction%(n)s(%(es)s, %(vs)s);
}
};
""" % DEFS
tests += """
// Tests for {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
class Predicate%(n)sTest : public testing::Test {
protected:
virtual void SetUp() {
expected_to_finish_ = true;
finished_ = false;""" % DEFS
tests += """
""" + Iter(n, 'n%s_ = ') + """0;
}
"""
tests += """
virtual void TearDown() {
// Verifies that each of the predicate's arguments was evaluated
// exactly once."""
tests += ''.join(["""
EXPECT_EQ(1, n%s_) <<
"The predicate assertion didn't evaluate argument %s "
"exactly once.";""" % (i, i + 1) for i in OneTo(n)])
tests += """
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
FAIL() << "The predicate assertion unexpactedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
}
}
// true iff the test function is expected to run to finish.
static bool expected_to_finish_;
// true iff the test function did run to finish.
static bool finished_;
""" % DEFS
tests += Iter(n, """
static int n%s_;""")
tests += """
};
bool Predicate%(n)sTest::expected_to_finish_;
bool Predicate%(n)sTest::finished_;
""" % DEFS
tests += Iter(n, """int Predicate%%(n)sTest::n%s_;
""") % DEFS
tests += """
typedef Predicate%(n)sTest EXPECT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest EXPECT_PRED%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED%(n)sTest;
""" % DEFS
def GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type):
"""Returns the test for a predicate assertion macro.
Args:
use_format: true iff the assertion is a *_PRED_FORMAT*.
use_assert: true iff the assertion is a ASSERT_*.
expect_failure: true iff the assertion is expected to fail.
use_functor: true iff the first argument of the assertion is
a functor (as opposed to a function)
use_user_type: true iff the predicate functor/function takes
argument(s) of a user-defined type.
Example:
GenTest(1, 0, 0, 1, 0) returns a test that tests the behavior
of a successful EXPECT_PRED_FORMATn() that takes a functor
whose arguments have built-in types."""
if use_assert:
assrt = 'ASSERT' # 'assert' is reserved, so we cannot use
# that identifier here.
else:
assrt = 'EXPECT'
assertion = assrt + '_PRED'
if use_format:
pred_format = 'PredFormat'
assertion += '_FORMAT'
else:
pred_format = 'Pred'
assertion += '%(n)s' % DEFS
if use_functor:
pred_format_type = 'functor'
pred_format += 'Functor%(n)s()'
else:
pred_format_type = 'function'
pred_format += 'Function%(n)s'
if not use_format:
if use_user_type:
pred_format += 'Bool'
else:
pred_format += 'Int'
test_name = pred_format_type.title()
if use_user_type:
arg_type = 'user-defined type (Bool)'
test_name += 'OnUserType'
if expect_failure:
arg = 'Bool(n%s_++)'
else:
arg = 'Bool(++n%s_)'
else:
arg_type = 'built-in type (int)'
test_name += 'OnBuiltInType'
if expect_failure:
arg = 'n%s_++'
else:
arg = '++n%s_'
if expect_failure:
successful_or_failed = 'failed'
expected_or_not = 'expected.'
test_name += 'Failure'
else:
successful_or_failed = 'successful'
expected_or_not = 'UNEXPECTED!'
test_name += 'Success'
# A map that defines the values used in the test template.
defs = DEFS.copy()
defs.update({
'assert' : assrt,
'assertion' : assertion,
'test_name' : test_name,
'pf_type' : pred_format_type,
'pf' : pred_format,
'arg_type' : arg_type,
'arg' : arg,
'successful' : successful_or_failed,
'expected' : expected_or_not,
})
test = """
// Tests a %(successful)s %(assertion)s where the
// predicate-formatter is a %(pf_type)s on a %(arg_type)s.
TEST_F(%(assertion)sTest, %(test_name)s) {""" % defs
indent = (len(assertion) + 3)*' '
extra_indent = ''
if expect_failure:
extra_indent = ' '
if use_assert:
test += """
expected_to_finish_ = false;
EXPECT_FATAL_FAILURE({ // NOLINT"""
else:
test += """
EXPECT_NONFATAL_FAILURE({ // NOLINT"""
test += '\n' + extra_indent + """ %(assertion)s(%(pf)s""" % defs
test = test % defs
test += Iter(n, ',\n' + indent + extra_indent + '%(arg)s' % defs)
test += ');\n' + extra_indent + ' finished_ = true;\n'
if expect_failure:
test += ' }, "");\n'
test += '}\n'
return test
# Generates tests for all 2**6 = 64 combinations.
tests += ''.join([GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type)
for use_format in [0, 1]
for use_assert in [0, 1]
for expect_failure in [0, 1]
for use_functor in [0, 1]
for use_user_type in [0, 1]
])
return tests
def UnitTestPostamble():
"""Returns the postamble for the tests."""
return ''
def GenerateUnitTest(n):
"""Returns the tests for up-to n-ary predicate assertions."""
GenerateFile(UNIT_TEST,
UnitTestPreamble()
+ ''.join([TestsForArity(i) for i in OneTo(n)])
+ UnitTestPostamble())
def _Main():
"""The entry point of the script. Generates the header file and its
unit test."""
if len(sys.argv) != 2:
print __doc__
print 'Author: ' + __author__
sys.exit(1)
n = int(sys.argv[1])
GenerateHeader(n)
GenerateUnitTest(n)
if __name__ == '__main__':
_Main()
|
johson/shell | refs/heads/master | photo/photo/spiders/test_photo/photo.py | 1 | # coding=utf-8
import time
import json
from scrapy import Request, FormRequest, Selector
from scrapy.spiders import CrawlSpider
__author__ = 'yss'
class CuccresetSpider(CrawlSpider):
name = 'stocksnap'
# allowed_domains = ['10086.com']
start_urls = ['https://stocksnap.io/photo/FY58O0P400']
def __init__(self, *args, **kwargs):
super(CuccresetSpider, self).__init__(*args, **kwargs)
self.userName = 'lidandan5258'
self.serNo = '11'
self.newPass = 'zzz251'
self.currentPage = ''
self.rtn_list = [] # 详单数据
self.rtn_list_address = [] # 收货地址数据
self.count = 0
self.page = 1
self.current_bar_count = 0 # 当前处理过的条数
self.bar_count = 0 # 总条数
self.totalPage = 0
def start_requests(self):
for i, url in enumerate(self.start_urls):
yield Request(url,
meta={'cookiejar': i},
dont_filter=True,
headers={
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3',
# 'Cache-Control':'max-age=0',
'Connection': 'keep-alive',
'DNT': '1',
'Host': 'stocksnap.io',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:46.0) Gecko/20100101 Firefox/46.0',
},
callback=self.submit_username)
def submit_username(self, response):
self._log_page(response, 'photo/photo.html')
dl_token = Selector(text=response.body)
dl_token = dl_token.xpath('//input[@name="dl_token"]/@value').extract_first()
return FormRequest(
url='https://stocksnap.io/download-photo/FY58O0P400',
headers={
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3',
# 'Cache-Control':'max-age=0',
'Connection': 'keep-alive',
'DNT': '1',
'Host': 'stocksnap.io',
'Referer':'https://stocksnap.io/photo/FY58O0P400',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:46.0) Gecko/20100101 Firefox/46.0',
},
formdata={
'dl_token': dl_token
},
meta={'cookiejar': response.meta['cookiejar']},
callback=self.photo
)
def photo(self, response):
self._log_page(response, 'photo/photo2.html')
with open('photo/photo.jpg', 'wb')as f:
f.write(response.body)
f.close()
def _log_page(self, response, filename):
with open(filename, 'w') as f:
try:
f.write("%s\n%s\n%s\n" % (response.url, response.headers, response.body))
except:
f.write("%s\n%s\n" % (response.url, response.headers))
pass
|
TanguyPatte/phantomjs-packaging | refs/heads/master | src/qt/qtwebkit/Tools/Scripts/webkitpy/port/mac_unittest.py | 117 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.port.mac import MacPort
from webkitpy.port import port_testcase
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.tool.mocktool import MockOptions
from webkitpy.common.system.executive_mock import MockExecutive, MockExecutive2, MockProcess, ScriptError
from webkitpy.common.system.systemhost_mock import MockSystemHost
class MacTest(port_testcase.PortTestCase):
os_name = 'mac'
os_version = 'lion'
port_name = 'mac-lion'
port_maker = MacPort
def assert_skipped_file_search_paths(self, port_name, expected_paths, use_webkit2=False):
port = self.make_port(port_name=port_name, options=MockOptions(webkit_test_runner=use_webkit2))
self.assertEqual(port._skipped_file_search_paths(), expected_paths)
def test_default_timeout_ms(self):
super(MacTest, self).test_default_timeout_ms()
self.assertEqual(self.make_port(options=MockOptions(guard_malloc=True)).default_timeout_ms(), 350000)
example_skipped_file = u"""
# <rdar://problem/5647952> fast/events/mouseout-on-window.html needs mac DRT to issue mouse out events
fast/events/mouseout-on-window.html
# <rdar://problem/5643675> window.scrollTo scrolls a window with no scrollbars
fast/events/attempt-scroll-with-no-scrollbars.html
# see bug <rdar://problem/5646437> REGRESSION (r28015): svg/batik/text/smallFonts fails
svg/batik/text/smallFonts.svg
# Java tests don't work on WK2
java/
"""
example_skipped_tests = [
"fast/events/mouseout-on-window.html",
"fast/events/attempt-scroll-with-no-scrollbars.html",
"svg/batik/text/smallFonts.svg",
"java",
]
def test_tests_from_skipped_file_contents(self):
port = self.make_port()
self.assertEqual(port._tests_from_skipped_file_contents(self.example_skipped_file), self.example_skipped_tests)
def assert_name(self, port_name, os_version_string, expected):
host = MockSystemHost(os_name='mac', os_version=os_version_string)
port = self.make_port(host=host, port_name=port_name)
self.assertEqual(expected, port.name())
def test_tests_for_other_platforms(self):
platforms = ['mac', 'chromium-linux', 'mac-snowleopard']
port = self.make_port(port_name='mac-snowleopard')
platform_dir_paths = map(port._webkit_baseline_path, platforms)
# Replace our empty mock file system with one which has our expected platform directories.
port._filesystem = MockFileSystem(dirs=platform_dir_paths)
dirs_to_skip = port._tests_for_other_platforms()
self.assertIn('platform/chromium-linux', dirs_to_skip)
self.assertNotIn('platform/mac', dirs_to_skip)
self.assertNotIn('platform/mac-snowleopard', dirs_to_skip)
def test_version(self):
port = self.make_port()
self.assertTrue(port.version())
def test_versions(self):
# Note: these tests don't need to be exhaustive as long as we get path coverage.
self.assert_name('mac', 'snowleopard', 'mac-snowleopard')
self.assert_name('mac-snowleopard', 'leopard', 'mac-snowleopard')
self.assert_name('mac-snowleopard', 'lion', 'mac-snowleopard')
self.assert_name('mac', 'lion', 'mac-lion')
self.assert_name('mac-lion', 'lion', 'mac-lion')
self.assert_name('mac', 'mountainlion', 'mac-mountainlion')
self.assert_name('mac-mountainlion', 'lion', 'mac-mountainlion')
self.assert_name('mac', 'future', 'mac-future')
self.assert_name('mac-future', 'future', 'mac-future')
self.assertRaises(AssertionError, self.assert_name, 'mac-tiger', 'leopard', 'mac-leopard')
def test_setup_environ_for_server(self):
port = self.make_port(options=MockOptions(leaks=True, guard_malloc=True))
env = port.setup_environ_for_server(port.driver_name())
self.assertEqual(env['MallocStackLogging'], '1')
self.assertEqual(env['DYLD_INSERT_LIBRARIES'], '/usr/lib/libgmalloc.dylib:/mock-build/libWebCoreTestShim.dylib')
def _assert_search_path(self, port_name, baseline_path, search_paths, use_webkit2=False):
port = self.make_port(port_name=port_name, options=MockOptions(webkit_test_runner=use_webkit2))
absolute_search_paths = map(port._webkit_baseline_path, search_paths)
self.assertEqual(port.baseline_path(), port._webkit_baseline_path(baseline_path))
self.assertEqual(port.baseline_search_path(), absolute_search_paths)
def test_baseline_search_path(self):
# Note that we don't need total coverage here, just path coverage, since this is all data driven.
self._assert_search_path('mac-snowleopard', 'mac-snowleopard', ['mac-snowleopard', 'mac-lion', 'mac'])
self._assert_search_path('mac-lion', 'mac-lion', ['mac-lion', 'mac'])
self._assert_search_path('mac-mountainlion', 'mac', ['mac'])
self._assert_search_path('mac-future', 'mac', ['mac'])
self._assert_search_path('mac-snowleopard', 'mac-wk2', ['mac-wk2', 'wk2', 'mac-snowleopard', 'mac-lion', 'mac'], use_webkit2=True)
self._assert_search_path('mac-lion', 'mac-wk2', ['mac-wk2', 'wk2', 'mac-lion', 'mac'], use_webkit2=True)
self._assert_search_path('mac-mountainlion', 'mac-wk2', ['mac-wk2', 'wk2', 'mac'], use_webkit2=True)
self._assert_search_path('mac-future', 'mac-wk2', ['mac-wk2', 'wk2', 'mac'], use_webkit2=True)
def test_show_results_html_file(self):
port = self.make_port()
# Delay setting a should_log executive to avoid logging from MacPort.__init__.
port._executive = MockExecutive(should_log=True)
expected_logs = "MOCK popen: ['Tools/Scripts/run-safari', '--release', '--no-saved-state', '-NSOpen', 'test.html'], cwd=/mock-checkout\n"
OutputCapture().assert_outputs(self, port.show_results_html_file, ["test.html"], expected_logs=expected_logs)
def test_operating_system(self):
self.assertEqual('mac', self.make_port().operating_system())
def test_default_child_processes(self):
port = self.make_port(port_name='mac-lion')
# MockPlatformInfo only has 2 mock cores. The important part is that 2 > 1.
self.assertEqual(port.default_child_processes(), 2)
bytes_for_drt = 200 * 1024 * 1024
port.host.platform.total_bytes_memory = lambda: bytes_for_drt
expected_logs = "This machine could support 2 child processes, but only has enough memory for 1.\n"
child_processes = OutputCapture().assert_outputs(self, port.default_child_processes, (), expected_logs=expected_logs)
self.assertEqual(child_processes, 1)
# Make sure that we always use one process, even if we don't have the memory for it.
port.host.platform.total_bytes_memory = lambda: bytes_for_drt - 1
expected_logs = "This machine could support 2 child processes, but only has enough memory for 1.\n"
child_processes = OutputCapture().assert_outputs(self, port.default_child_processes, (), expected_logs=expected_logs)
self.assertEqual(child_processes, 1)
# SnowLeopard has a CFNetwork bug which causes crashes if we execute more than one copy of DRT at once.
port = self.make_port(port_name='mac-snowleopard')
expected_logs = "Cannot run tests in parallel on Snow Leopard due to rdar://problem/10621525.\n"
child_processes = OutputCapture().assert_outputs(self, port.default_child_processes, (), expected_logs=expected_logs)
self.assertEqual(child_processes, 1)
def test_get_crash_log(self):
# Mac crash logs are tested elsewhere, so here we just make sure we don't crash.
def fake_time_cb():
times = [0, 20, 40]
return lambda: times.pop(0)
port = self.make_port(port_name='mac-snowleopard')
port._get_crash_log('DumpRenderTree', 1234, '', '', 0,
time_fn=fake_time_cb(), sleep_fn=lambda delay: None)
def test_helper_starts(self):
host = MockSystemHost(MockExecutive())
port = self.make_port(host)
oc = OutputCapture()
oc.capture_output()
host.executive._proc = MockProcess('ready\n')
port.start_helper()
port.stop_helper()
oc.restore_output()
# make sure trying to stop the helper twice is safe.
port.stop_helper()
def test_helper_fails_to_start(self):
host = MockSystemHost(MockExecutive())
port = self.make_port(host)
oc = OutputCapture()
oc.capture_output()
port.start_helper()
port.stop_helper()
oc.restore_output()
def test_helper_fails_to_stop(self):
host = MockSystemHost(MockExecutive())
host.executive._proc = MockProcess()
def bad_waiter():
raise IOError('failed to wait')
host.executive._proc.wait = bad_waiter
port = self.make_port(host)
oc = OutputCapture()
oc.capture_output()
port.start_helper()
port.stop_helper()
oc.restore_output()
def test_sample_process(self):
def logging_run_command(args):
print args
port = self.make_port()
port._executive = MockExecutive2(run_command_fn=logging_run_command)
expected_stdout = "['/usr/bin/sample', 42, 10, 10, '-file', '/mock-build/layout-test-results/test-42-sample.txt']\n"
OutputCapture().assert_outputs(self, port.sample_process, args=['test', 42], expected_stdout=expected_stdout)
def test_sample_process_throws_exception(self):
def throwing_run_command(args):
raise ScriptError("MOCK script error")
port = self.make_port()
port._executive = MockExecutive2(run_command_fn=throwing_run_command)
OutputCapture().assert_outputs(self, port.sample_process, args=['test', 42])
def test_32bit(self):
port = self.make_port(options=MockOptions(architecture='x86'))
def run_script(script, args=None, env=None):
self.args = args
port._run_script = run_script
self.assertEqual(port.architecture(), 'x86')
port._build_driver()
self.assertEqual(self.args, ['ARCHS=i386'])
def test_64bit(self):
# Apple Mac port is 64-bit by default
port = self.make_port()
self.assertEqual(port.architecture(), 'x86_64')
def run_script(script, args=None, env=None):
self.args = args
port._run_script = run_script
port._build_driver()
self.assertEqual(self.args, [])
|
ravindrapanda/tensorflow | refs/heads/master | tensorflow/contrib/boosted_trees/python/utils/losses_test.py | 33 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for trainer hooks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.boosted_trees.python.utils import losses
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import googletest
class LossesTest(test_util.TensorFlowTestCase):
def test_per_example_exp_loss(self):
def _logit(p):
return np.log(p) - np.log(1 - p)
labels_positive = array_ops.ones([10, 1], dtypes.float32)
weights = array_ops.ones([10, 1], dtypes.float32)
labels_negative = array_ops.zeros([10, 1], dtypes.float32)
predictions_probs = np.array(
[[0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [0.99]],
dtype=np.float32)
prediction_logits = _logit(predictions_probs)
eps = 0.2
with self.test_session():
predictions_tensor = constant_op.constant(
prediction_logits, dtype=dtypes.float32)
loss_for_positives, _ = losses.per_example_exp_loss(
labels_positive, weights, predictions_tensor, eps=eps)
loss_for_negatives, _ = losses.per_example_exp_loss(
labels_negative, weights, predictions_tensor, eps=eps)
pos_loss = loss_for_positives.eval()
neg_loss = loss_for_negatives.eval()
# For positive labels, points <= 0.3 get max loss of e.
# For negative labels, these points have minimum loss of 1/e.
self.assertAllClose(np.exp(np.ones([2, 1])), pos_loss[:2], atol=1e-4)
self.assertAllClose(np.exp(-np.ones([2, 1])), neg_loss[:2], atol=1e-4)
# For positive lables, p oints with predictions 0.7 and larger get minimum
# loss value of 1/e. For negative labels, these points are wrongly
# classified and get loss e.
self.assertAllClose(np.exp(-np.ones([4, 1])), pos_loss[6:10], atol=1e-4)
self.assertAllClose(np.exp(np.ones([4, 1])), neg_loss[6:10], atol=1e-4)
# Points in between 0.5-eps, 0..5+eps get loss exp(-label_m*y), where
# y = 1/eps *x -1/(2eps), where x is the probability and label_m is either
# 1 or -1 (for label of 0).
self.assertAllClose(
np.exp(-(predictions_probs[2:6] * 1.0 / eps - 0.5 / eps)),
pos_loss[2:6], atol=1e-4)
self.assertAllClose(
np.exp(predictions_probs[2:6] * 1.0 / eps - 0.5 / eps),
neg_loss[2:6], atol=1e-4)
def test_per_example_squared_loss(self):
labels = np.array([[0.123], [224.2], [-3], [2], [.3]], dtype=np.float32)
weights = array_ops.ones([5, 1], dtypes.float32)
predictions = np.array(
[[0.123], [23.2], [233], [52], [3]], dtype=np.float32)
with self.test_session():
loss_tensor, _ = losses.per_example_squared_loss(labels, weights,
predictions)
loss = loss_tensor.eval()
self.assertAllClose(
np.square(labels[:5] - predictions[:5]), loss[:5], atol=1e-4)
if __name__ == "__main__":
googletest.main()
|
mpalomer/micropython | refs/heads/master | tests/pyb/timer_callback.py | 96 | # check callback feature of the timer class
import pyb
from pyb import Timer
# callback function that disables the callback when called
def cb1(t):
print("cb1")
t.callback(None)
# callback function that disables the timer when called
def cb2(t):
print("cb2")
t.deinit()
# callback where cb4 closes over cb3.y
def cb3(x):
y = x
def cb4(t):
print("cb4", y)
t.callback(None)
return cb4
# create a timer with a callback, using callback(None) to stop
tim = Timer(1, freq=100, callback=cb1)
pyb.delay(5)
print("before cb1")
pyb.delay(15)
# create a timer with a callback, using deinit to stop
tim = Timer(2, freq=100, callback=cb2)
pyb.delay(5)
print("before cb2")
pyb.delay(15)
# create a timer, then set the freq, then set the callback
tim = Timer(4)
tim.init(freq=100)
tim.callback(cb1)
pyb.delay(5)
print("before cb1")
pyb.delay(15)
# test callback with a closure
tim.init(freq=100)
tim.callback(cb3(3))
pyb.delay(5)
print("before cb4")
pyb.delay(15)
|
mattdennewitz/fangraphs-steamer-scraper | refs/heads/master | steamer-to-csv.py | 2 | import collections
import json
import click
import unicodecsv as csv
def parse_object(obj, path=''):
"""Borrowed from `csvkit`
"""
if isinstance(obj, dict):
iterator = obj.items()
elif isinstance(obj, (list, tuple)):
iterator = enumerate(obj)
else:
return { path.strip('/'): obj }
d = {}
for key, value in iterator:
key = unicode(key)
d.update(parse_object(value, path + key + '/'))
return d
@click.command()
@click.option('-t', 'player_type',
type=click.Choice(('batting', 'pitching', )))
@click.option('-i', 'input_fp', type=click.File('rb'), required=True)
@click.argument('output_fp', type=click.File('w'), required=True)
def convert(player_type, input_fp, output_fp):
data = json.load(input_fp, object_pairs_hook=collections.OrderedDict)
if not isinstance(data, list):
raise click.BadParameter('JSON input object must be a list')
keys = set()
rows = []
valid_type = 'b' if player_type == 'batting' else 'p'
for projection in data:
if not projection['player_type'] == valid_type:
continue
projection = parse_object(projection)
for key in projection:
keys.add(key)
rows.append(projection)
headers = sorted(keys)
writer = csv.DictWriter(output_fp, headers)
writer.writeheader()
for row in rows:
writer.writerow(row)
if __name__ == '__main__':
convert()
|
BassantMorsi/finderApp | refs/heads/master | lib/python2.7/site-packages/django/utils/regex_helper.py | 45 | """
Functions for reversing a regular expression (used in reverse URL resolving).
Used internally by Django and not intended for external use.
This is not, and is not intended to be, a complete reg-exp decompiler. It
should be good enough for a large class of URLS, however.
"""
from __future__ import unicode_literals
import warnings
from django.utils import six
from django.utils.deprecation import RemovedInDjango21Warning
from django.utils.six.moves import zip
# Mapping of an escape character to a representative of that class. So, e.g.,
# "\w" is replaced by "x" in a reverse URL. A value of None means to ignore
# this sequence. Any missing key is mapped to itself.
ESCAPE_MAPPINGS = {
"A": None,
"b": None,
"B": None,
"d": "0",
"D": "x",
"s": " ",
"S": "x",
"w": "x",
"W": "!",
"Z": None,
}
class Choice(list):
"""
Used to represent multiple possibilities at this point in a pattern string.
We use a distinguished type, rather than a list, so that the usage in the
code is clear.
"""
class Group(list):
"""
Used to represent a capturing group in the pattern string.
"""
class NonCapture(list):
"""
Used to represent a non-capturing group in the pattern string.
"""
def normalize(pattern):
r"""
Given a reg-exp pattern, normalizes it to an iterable of forms that
suffice for reverse matching. This does the following:
(1) For any repeating sections, keeps the minimum number of occurrences
permitted (this means zero for optional groups).
(2) If an optional group includes parameters, include one occurrence of
that group (along with the zero occurrence case from step (1)).
(3) Select the first (essentially an arbitrary) element from any character
class. Select an arbitrary character for any unordered class (e.g. '.'
or '\w') in the pattern.
(4) Ignore look-ahead and look-behind assertions.
(5) Raise an error on any disjunctive ('|') constructs.
Django's URLs for forward resolving are either all positional arguments or
all keyword arguments. That is assumed here, as well. Although reverse
resolving can be done using positional args when keyword args are
specified, the two cannot be mixed in the same reverse() call.
"""
# Do a linear scan to work out the special features of this pattern. The
# idea is that we scan once here and collect all the information we need to
# make future decisions.
result = []
non_capturing_groups = []
consume_next = True
pattern_iter = next_char(iter(pattern))
num_args = 0
# A "while" loop is used here because later on we need to be able to peek
# at the next character and possibly go around without consuming another
# one at the top of the loop.
try:
ch, escaped = next(pattern_iter)
except StopIteration:
return [('', [])]
try:
while True:
if escaped:
result.append(ch)
elif ch == '.':
# Replace "any character" with an arbitrary representative.
result.append(".")
elif ch == '|':
# FIXME: One day we'll should do this, but not in 1.0.
raise NotImplementedError('Awaiting Implementation')
elif ch == "^":
pass
elif ch == '$':
break
elif ch == ')':
# This can only be the end of a non-capturing group, since all
# other unescaped parentheses are handled by the grouping
# section later (and the full group is handled there).
#
# We regroup everything inside the capturing group so that it
# can be quantified, if necessary.
start = non_capturing_groups.pop()
inner = NonCapture(result[start:])
result = result[:start] + [inner]
elif ch == '[':
# Replace ranges with the first character in the range.
ch, escaped = next(pattern_iter)
result.append(ch)
ch, escaped = next(pattern_iter)
while escaped or ch != ']':
ch, escaped = next(pattern_iter)
elif ch == '(':
# Some kind of group.
ch, escaped = next(pattern_iter)
if ch != '?' or escaped:
# A positional group
name = "_%d" % num_args
num_args += 1
result.append(Group((("%%(%s)s" % name), name)))
walk_to_end(ch, pattern_iter)
else:
ch, escaped = next(pattern_iter)
if ch in '!=<':
# All of these are ignorable. Walk to the end of the
# group.
walk_to_end(ch, pattern_iter)
elif ch in 'iLmsu#':
warnings.warn(
'Using (?%s) in url() patterns is deprecated.' % ch,
RemovedInDjango21Warning
)
walk_to_end(ch, pattern_iter)
elif ch == ':':
# Non-capturing group
non_capturing_groups.append(len(result))
elif ch != 'P':
# Anything else, other than a named group, is something
# we cannot reverse.
raise ValueError("Non-reversible reg-exp portion: '(?%s'" % ch)
else:
ch, escaped = next(pattern_iter)
if ch not in ('<', '='):
raise ValueError("Non-reversible reg-exp portion: '(?P%s'" % ch)
# We are in a named capturing group. Extra the name and
# then skip to the end.
if ch == '<':
terminal_char = '>'
# We are in a named backreference.
else:
terminal_char = ')'
name = []
ch, escaped = next(pattern_iter)
while ch != terminal_char:
name.append(ch)
ch, escaped = next(pattern_iter)
param = ''.join(name)
# Named backreferences have already consumed the
# parenthesis.
if terminal_char != ')':
result.append(Group((("%%(%s)s" % param), param)))
walk_to_end(ch, pattern_iter)
else:
result.append(Group((("%%(%s)s" % param), None)))
elif ch in "*?+{":
# Quantifiers affect the previous item in the result list.
count, ch = get_quantifier(ch, pattern_iter)
if ch:
# We had to look ahead, but it wasn't need to compute the
# quantifier, so use this character next time around the
# main loop.
consume_next = False
if count == 0:
if contains(result[-1], Group):
# If we are quantifying a capturing group (or
# something containing such a group) and the minimum is
# zero, we must also handle the case of one occurrence
# being present. All the quantifiers (except {0,0},
# which we conveniently ignore) that have a 0 minimum
# also allow a single occurrence.
result[-1] = Choice([None, result[-1]])
else:
result.pop()
elif count > 1:
result.extend([result[-1]] * (count - 1))
else:
# Anything else is a literal.
result.append(ch)
if consume_next:
ch, escaped = next(pattern_iter)
else:
consume_next = True
except StopIteration:
pass
except NotImplementedError:
# A case of using the disjunctive form. No results for you!
return [('', [])]
return list(zip(*flatten_result(result)))
def next_char(input_iter):
r"""
An iterator that yields the next character from "pattern_iter", respecting
escape sequences. An escaped character is replaced by a representative of
its class (e.g. \w -> "x"). If the escaped character is one that is
skipped, it is not returned (the next character is returned instead).
Yields the next character, along with a boolean indicating whether it is a
raw (unescaped) character or not.
"""
for ch in input_iter:
if ch != '\\':
yield ch, False
continue
ch = next(input_iter)
representative = ESCAPE_MAPPINGS.get(ch, ch)
if representative is None:
continue
yield representative, True
def walk_to_end(ch, input_iter):
"""
The iterator is currently inside a capturing group. We want to walk to the
close of this group, skipping over any nested groups and handling escaped
parentheses correctly.
"""
if ch == '(':
nesting = 1
else:
nesting = 0
for ch, escaped in input_iter:
if escaped:
continue
elif ch == '(':
nesting += 1
elif ch == ')':
if not nesting:
return
nesting -= 1
def get_quantifier(ch, input_iter):
"""
Parse a quantifier from the input, where "ch" is the first character in the
quantifier.
Returns the minimum number of occurrences permitted by the quantifier and
either None or the next character from the input_iter if the next character
is not part of the quantifier.
"""
if ch in '*?+':
try:
ch2, escaped = next(input_iter)
except StopIteration:
ch2 = None
if ch2 == '?':
ch2 = None
if ch == '+':
return 1, ch2
return 0, ch2
quant = []
while ch != '}':
ch, escaped = next(input_iter)
quant.append(ch)
quant = quant[:-1]
values = ''.join(quant).split(',')
# Consume the trailing '?', if necessary.
try:
ch, escaped = next(input_iter)
except StopIteration:
ch = None
if ch == '?':
ch = None
return int(values[0]), ch
def contains(source, inst):
"""
Returns True if the "source" contains an instance of "inst". False,
otherwise.
"""
if isinstance(source, inst):
return True
if isinstance(source, NonCapture):
for elt in source:
if contains(elt, inst):
return True
return False
def flatten_result(source):
"""
Turns the given source sequence into a list of reg-exp possibilities and
their arguments. Returns a list of strings and a list of argument lists.
Each of the two lists will be of the same length.
"""
if source is None:
return [''], [[]]
if isinstance(source, Group):
if source[1] is None:
params = []
else:
params = [source[1]]
return [source[0]], [params]
result = ['']
result_args = [[]]
pos = last = 0
for pos, elt in enumerate(source):
if isinstance(elt, six.string_types):
continue
piece = ''.join(source[last:pos])
if isinstance(elt, Group):
piece += elt[0]
param = elt[1]
else:
param = None
last = pos + 1
for i in range(len(result)):
result[i] += piece
if param:
result_args[i].append(param)
if isinstance(elt, (Choice, NonCapture)):
if isinstance(elt, NonCapture):
elt = [elt]
inner_result, inner_args = [], []
for item in elt:
res, args = flatten_result(item)
inner_result.extend(res)
inner_args.extend(args)
new_result = []
new_args = []
for item, args in zip(result, result_args):
for i_item, i_args in zip(inner_result, inner_args):
new_result.append(item + i_item)
new_args.append(args[:] + i_args)
result = new_result
result_args = new_args
if pos >= last:
piece = ''.join(source[last:])
for i in range(len(result)):
result[i] += piece
return result, result_args
|
hynnet/openwrt-mt7620 | refs/heads/master | staging_dir/host/lib/python2.7/test/doctest_aliases.py | 137 | # Used by test_doctest.py.
class TwoNames:
'''f() and g() are two names for the same method'''
def f(self):
'''
>>> print TwoNames().f()
f
'''
return 'f'
g = f # define an alias for f
|
nzavagli/UnrealPy | refs/heads/master | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/django-1.8.2/django/db/migrations/graph.py | 81 | from __future__ import unicode_literals
from collections import deque
from django.db.migrations.state import ProjectState
from django.utils.datastructures import OrderedSet
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import total_ordering
@python_2_unicode_compatible
@total_ordering
class Node(object):
"""
A single node in the migration graph. Contains direct links to adjacent
nodes in either direction.
"""
def __init__(self, key):
self.key = key
self.children = set()
self.parents = set()
def __eq__(self, other):
return self.key == other
def __lt__(self, other):
return self.key < other
def __hash__(self):
return hash(self.key)
def __getitem__(self, item):
return self.key[item]
def __str__(self):
return str(self.key)
def __repr__(self):
return '<Node: (%r, %r)>' % self.key
def add_child(self, child):
self.children.add(child)
def add_parent(self, parent):
self.parents.add(parent)
# Use manual caching, @cached_property effectively doubles the
# recursion depth for each recursion.
def ancestors(self):
# Use self.key instead of self to speed up the frequent hashing
# when constructing an OrderedSet.
if '_ancestors' not in self.__dict__:
ancestors = deque([self.key])
for parent in sorted(self.parents):
ancestors.extendleft(reversed(parent.ancestors()))
self.__dict__['_ancestors'] = list(OrderedSet(ancestors))
return self.__dict__['_ancestors']
# Use manual caching, @cached_property effectively doubles the
# recursion depth for each recursion.
def descendants(self):
# Use self.key instead of self to speed up the frequent hashing
# when constructing an OrderedSet.
if '_descendants' not in self.__dict__:
descendants = deque([self.key])
for child in sorted(self.children):
descendants.extendleft(reversed(child.descendants()))
self.__dict__['_descendants'] = list(OrderedSet(descendants))
return self.__dict__['_descendants']
@python_2_unicode_compatible
class MigrationGraph(object):
"""
Represents the digraph of all migrations in a project.
Each migration is a node, and each dependency is an edge. There are
no implicit dependencies between numbered migrations - the numbering is
merely a convention to aid file listing. Every new numbered migration
has a declared dependency to the previous number, meaning that VCS
branch merges can be detected and resolved.
Migrations files can be marked as replacing another set of migrations -
this is to support the "squash" feature. The graph handler isn't responsible
for these; instead, the code to load them in here should examine the
migration files and if the replaced migrations are all either unapplied
or not present, it should ignore the replaced ones, load in just the
replacing migration, and repoint any dependencies that pointed to the
replaced migrations to point to the replacing one.
A node should be a tuple: (app_path, migration_name). The tree special-cases
things within an app - namely, root nodes and leaf nodes ignore dependencies
to other apps.
"""
def __init__(self):
self.node_map = {}
self.nodes = {}
self.cached = False
def add_node(self, key, implementation):
node = Node(key)
self.node_map[key] = node
self.nodes[key] = implementation
self.clear_cache()
def add_dependency(self, migration, child, parent):
if child not in self.nodes:
raise NodeNotFoundError(
"Migration %s dependencies reference nonexistent child node %r" % (migration, child),
child
)
if parent not in self.nodes:
raise NodeNotFoundError(
"Migration %s dependencies reference nonexistent parent node %r" % (migration, parent),
parent
)
self.node_map[child].add_parent(self.node_map[parent])
self.node_map[parent].add_child(self.node_map[child])
self.clear_cache()
def clear_cache(self):
if self.cached:
for node in self.nodes:
self.node_map[node].__dict__.pop('_ancestors', None)
self.node_map[node].__dict__.pop('_descendants', None)
self.cached = False
def forwards_plan(self, node):
"""
Given a node, returns a list of which previous nodes (dependencies)
must be applied, ending with the node itself.
This is the list you would follow if applying the migrations to
a database.
"""
if node not in self.nodes:
raise NodeNotFoundError("Node %r not a valid node" % (node, ), node)
# Use parent.key instead of parent to speed up the frequent hashing in ensure_not_cyclic
self.ensure_not_cyclic(node, lambda x: (parent.key for parent in self.node_map[x].parents))
self.cached = True
return self.node_map[node].ancestors()
def backwards_plan(self, node):
"""
Given a node, returns a list of which dependent nodes (dependencies)
must be unapplied, ending with the node itself.
This is the list you would follow if removing the migrations from
a database.
"""
if node not in self.nodes:
raise NodeNotFoundError("Node %r not a valid node" % (node, ), node)
# Use child.key instead of child to speed up the frequent hashing in ensure_not_cyclic
self.ensure_not_cyclic(node, lambda x: (child.key for child in self.node_map[x].children))
self.cached = True
return self.node_map[node].descendants()
def root_nodes(self, app=None):
"""
Returns all root nodes - that is, nodes with no dependencies inside
their app. These are the starting point for an app.
"""
roots = set()
for node in self.nodes:
if (not any(key[0] == node[0] for key in self.node_map[node].parents)
and (not app or app == node[0])):
roots.add(node)
return sorted(roots)
def leaf_nodes(self, app=None):
"""
Returns all leaf nodes - that is, nodes with no dependents in their app.
These are the "most current" version of an app's schema.
Having more than one per app is technically an error, but one that
gets handled further up, in the interactive command - it's usually the
result of a VCS merge and needs some user input.
"""
leaves = set()
for node in self.nodes:
if (not any(key[0] == node[0] for key in self.node_map[node].children)
and (not app or app == node[0])):
leaves.add(node)
return sorted(leaves)
def ensure_not_cyclic(self, start, get_children):
# Algo from GvR:
# http://neopythonic.blogspot.co.uk/2009/01/detecting-cycles-in-directed-graph.html
todo = set(self.nodes)
while todo:
node = todo.pop()
stack = [node]
while stack:
top = stack[-1]
for node in get_children(top):
if node in stack:
cycle = stack[stack.index(node):]
raise CircularDependencyError(", ".join("%s.%s" % n for n in cycle))
if node in todo:
stack.append(node)
todo.remove(node)
break
else:
node = stack.pop()
def __str__(self):
return "Graph: %s nodes, %s edges" % (
len(self.nodes),
sum(len(node.parents) for node in self.node_map.values()),
)
def make_state(self, nodes=None, at_end=True, real_apps=None):
"""
Given a migration node or nodes, returns a complete ProjectState for it.
If at_end is False, returns the state before the migration has run.
If nodes is not provided, returns the overall most current project state.
"""
if nodes is None:
nodes = list(self.leaf_nodes())
if len(nodes) == 0:
return ProjectState()
if not isinstance(nodes[0], tuple):
nodes = [nodes]
plan = []
for node in nodes:
for migration in self.forwards_plan(node):
if migration not in plan:
if not at_end and migration in nodes:
continue
plan.append(migration)
project_state = ProjectState(real_apps=real_apps)
for node in plan:
project_state = self.nodes[node].mutate_state(project_state, preserve=False)
return project_state
def __contains__(self, node):
return node in self.nodes
class CircularDependencyError(Exception):
"""
Raised when there's an impossible-to-resolve circular dependency.
"""
pass
@python_2_unicode_compatible
class NodeNotFoundError(LookupError):
"""
Raised when an attempt on a node is made that is not available in the graph.
"""
def __init__(self, message, node):
self.message = message
self.node = node
def __str__(self):
return self.message
def __repr__(self):
return "NodeNotFoundError(%r)" % self.node
|
dmlc/tvm | refs/heads/main | tests/python/topi/python/common.py | 5 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Common utility for topi test"""
from tvm import autotvm
from tvm.autotvm.task.space import FallbackConfigEntity
class Int8Fallback(autotvm.FallbackContext):
def _query_inside(self, target, workload):
key = (target, workload)
if key in self.memory:
return self.memory[key]
cfg = FallbackConfigEntity()
self.memory[key] = cfg
cfg.is_fallback = False
return cfg
|
zaxliu/scipy | refs/heads/master | scipy/linalg/tests/test_decomp.py | 26 | """ Test functions for linalg.decomp module
"""
from __future__ import division, print_function, absolute_import
__usage__ = """
Build linalg:
python setup_linalg.py build
Run tests if scipy is installed:
python -c 'import scipy;scipy.linalg.test()'
Run tests if linalg is not installed:
python tests/test_decomp.py
"""
import numpy as np
from numpy.testing import (TestCase, assert_equal, assert_array_almost_equal,
assert_array_equal, assert_raises, assert_, assert_allclose,
run_module_suite, dec)
from scipy._lib.six import xrange
from scipy.linalg import (eig, eigvals, lu, svd, svdvals, cholesky, qr,
schur, rsf2csf, lu_solve, lu_factor, solve, diagsvd, hessenberg, rq,
eig_banded, eigvals_banded, eigh, eigvalsh, qr_multiply, qz, orth)
from scipy.linalg.lapack import dgbtrf, dgbtrs, zgbtrf, zgbtrs, \
dsbev, dsbevd, dsbevx, zhbevd, zhbevx
from scipy.linalg.misc import norm
from numpy import array, transpose, sometrue, diag, ones, linalg, \
argsort, zeros, arange, float32, complex64, dot, conj, identity, \
ravel, sqrt, iscomplex, shape, sort, conjugate, bmat, sign, \
asarray, matrix, isfinite, all, ndarray, outer, eye, dtype, empty,\
triu, tril
from numpy.random import rand, normal, seed
from scipy.linalg._testutils import assert_no_overwrite
# digit precision to use in asserts for different types
DIGITS = {'d':11, 'D':11, 'f':4, 'F':4}
# XXX: This function should be available through numpy.testing
def assert_dtype_equal(act, des):
if isinstance(act, ndarray):
act = act.dtype
else:
act = dtype(act)
if isinstance(des, ndarray):
des = des.dtype
else:
des = dtype(des)
assert_(act == des, 'dtype mismatch: "%s" (should be "%s") ' % (act, des))
# XXX: This function should not be defined here, but somewhere in
# scipy.linalg namespace
def symrand(dim_or_eigv):
"""Return a random symmetric (Hermitian) matrix.
If 'dim_or_eigv' is an integer N, return a NxN matrix, with eigenvalues
uniformly distributed on (-1,1).
If 'dim_or_eigv' is 1-D real array 'a', return a matrix whose
eigenvalues are 'a'.
"""
if isinstance(dim_or_eigv, int):
dim = dim_or_eigv
d = (rand(dim)*2)-1
elif (isinstance(dim_or_eigv, ndarray) and
len(dim_or_eigv.shape) == 1):
dim = dim_or_eigv.shape[0]
d = dim_or_eigv
else:
raise TypeError("input type not supported.")
v = random_rot(dim)
h = dot(dot(v.T.conj(), diag(d)), v)
# to avoid roundoff errors, symmetrize the matrix (again)
h = 0.5*(h.T+h)
return h
# XXX: This function should not be defined here, but somewhere in
# scipy.linalg namespace
def random_rot(dim):
"""Return a random rotation matrix, drawn from the Haar distribution
(the only uniform distribution on SO(n)).
The algorithm is described in the paper
Stewart, G.W., 'The efficient generation of random orthogonal
matrices with an application to condition estimators', SIAM Journal
on Numerical Analysis, 17(3), pp. 403-409, 1980.
For more information see
http://en.wikipedia.org/wiki/Orthogonal_matrix#Randomization"""
H = eye(dim)
D = ones((dim,))
for n in range(1, dim):
x = normal(size=(dim-n+1,))
D[n-1] = sign(x[0])
x[0] -= D[n-1]*sqrt((x*x).sum())
# Householder transformation
Hx = eye(dim-n+1) - 2.*outer(x, x)/(x*x).sum()
mat = eye(dim)
mat[n-1:,n-1:] = Hx
H = dot(H, mat)
# Fix the last sign such that the determinant is 1
D[-1] = -D.prod()
H = (D*H.T).T
return H
def random(size):
return rand(*size)
class TestEigVals(TestCase):
def test_simple(self):
a = [[1,2,3],[1,2,3],[2,5,6]]
w = eigvals(a)
exact_w = [(9+sqrt(93))/2,0,(9-sqrt(93))/2]
assert_array_almost_equal(w,exact_w)
def test_simple_tr(self):
a = array([[1,2,3],[1,2,3],[2,5,6]],'d')
a = transpose(a).copy()
a = transpose(a)
w = eigvals(a)
exact_w = [(9+sqrt(93))/2,0,(9-sqrt(93))/2]
assert_array_almost_equal(w,exact_w)
def test_simple_complex(self):
a = [[1,2,3],[1,2,3],[2,5,6+1j]]
w = eigvals(a)
exact_w = [(9+1j+sqrt(92+6j))/2,
0,
(9+1j-sqrt(92+6j))/2]
assert_array_almost_equal(w,exact_w)
def test_check_finite(self):
a = [[1,2,3],[1,2,3],[2,5,6]]
w = eigvals(a, check_finite=False)
exact_w = [(9+sqrt(93))/2,0,(9-sqrt(93))/2]
assert_array_almost_equal(w,exact_w)
class TestEig(object):
def test_simple(self):
a = [[1,2,3],[1,2,3],[2,5,6]]
w,v = eig(a)
exact_w = [(9+sqrt(93))/2,0,(9-sqrt(93))/2]
v0 = array([1,1,(1+sqrt(93)/3)/2])
v1 = array([3.,0,-1])
v2 = array([1,1,(1-sqrt(93)/3)/2])
v0 = v0 / sqrt(dot(v0,transpose(v0)))
v1 = v1 / sqrt(dot(v1,transpose(v1)))
v2 = v2 / sqrt(dot(v2,transpose(v2)))
assert_array_almost_equal(w,exact_w)
assert_array_almost_equal(v0,v[:,0]*sign(v[0,0]))
assert_array_almost_equal(v1,v[:,1]*sign(v[0,1]))
assert_array_almost_equal(v2,v[:,2]*sign(v[0,2]))
for i in range(3):
assert_array_almost_equal(dot(a,v[:,i]),w[i]*v[:,i])
w,v = eig(a,left=1,right=0)
for i in range(3):
assert_array_almost_equal(dot(transpose(a),v[:,i]),w[i]*v[:,i])
def test_simple_complex_eig(self):
a = [[1,2],[-2,1]]
w,vl,vr = eig(a,left=1,right=1)
assert_array_almost_equal(w, array([1+2j, 1-2j]))
for i in range(2):
assert_array_almost_equal(dot(a,vr[:,i]),w[i]*vr[:,i])
for i in range(2):
assert_array_almost_equal(dot(conjugate(transpose(a)),vl[:,i]),
conjugate(w[i])*vl[:,i])
def test_simple_complex(self):
a = [[1,2,3],[1,2,3],[2,5,6+1j]]
w,vl,vr = eig(a,left=1,right=1)
for i in range(3):
assert_array_almost_equal(dot(a,vr[:,i]),w[i]*vr[:,i])
for i in range(3):
assert_array_almost_equal(dot(conjugate(transpose(a)),vl[:,i]),
conjugate(w[i])*vl[:,i])
def _check_gen_eig(self, A, B):
A, B = asarray(A), asarray(B)
msg = "\n%r\n%r" % (A, B)
w, vr = eig(A,B)
wt = eigvals(A,B)
val1 = dot(A, vr)
val2 = dot(B, vr) * w
res = val1 - val2
for i in range(res.shape[1]):
if all(isfinite(res[:, i])):
assert_array_almost_equal(res[:, i], 0, err_msg=msg)
assert_array_almost_equal(sort(w[isfinite(w)]), sort(wt[isfinite(wt)]),
err_msg=msg)
length = np.empty(len(vr))
for i in xrange(len(vr)):
length[i] = norm(vr[:, i])
assert_array_almost_equal(length, np.ones(length.size), err_msg=msg)
def test_singular(self):
"""Test singular pair"""
# Example taken from
# http://www.cs.umu.se/research/nla/singular_pairs/guptri/matlab.html
A = array(([22,34,31,31,17], [45,45,42,19,29], [39,47,49,26,34],
[27,31,26,21,15], [38,44,44,24,30]))
B = array(([13,26,25,17,24], [31,46,40,26,37], [26,40,19,25,25],
[16,25,27,14,23], [24,35,18,21,22]))
olderr = np.seterr(all='ignore')
try:
self._check_gen_eig(A, B)
finally:
np.seterr(**olderr)
def test_falker(self):
"""Test matrices giving some Nan generalized eigen values."""
M = diag(array(([1,0,3])))
K = array(([2,-1,-1],[-1,2,-1],[-1,-1,2]))
D = array(([1,-1,0],[-1,1,0],[0,0,0]))
Z = zeros((3,3))
I = identity(3)
A = bmat([[I,Z],[Z,-K]])
B = bmat([[Z,I],[M,D]])
olderr = np.seterr(all='ignore')
try:
self._check_gen_eig(A, B)
finally:
np.seterr(**olderr)
def test_bad_geneig(self):
# Ticket #709 (strange return values from DGGEV)
def matrices(omega):
c1 = -9 + omega**2
c2 = 2*omega
A = [[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, c1, 0],
[0, 0, 0, c1]]
B = [[0, 0, 1, 0],
[0, 0, 0, 1],
[1, 0, 0, -c2],
[0, 1, c2, 0]]
return A, B
# With a buggy LAPACK, this can fail for different omega on different
# machines -- so we need to test several values
olderr = np.seterr(all='ignore')
try:
for k in xrange(100):
A, B = matrices(omega=k*5./100)
self._check_gen_eig(A, B)
finally:
np.seterr(**olderr)
def test_check_finite(self):
a = [[1,2,3],[1,2,3],[2,5,6]]
w,v = eig(a, check_finite=False)
exact_w = [(9+sqrt(93))/2,0,(9-sqrt(93))/2]
v0 = array([1,1,(1+sqrt(93)/3)/2])
v1 = array([3.,0,-1])
v2 = array([1,1,(1-sqrt(93)/3)/2])
v0 = v0 / sqrt(dot(v0,transpose(v0)))
v1 = v1 / sqrt(dot(v1,transpose(v1)))
v2 = v2 / sqrt(dot(v2,transpose(v2)))
assert_array_almost_equal(w,exact_w)
assert_array_almost_equal(v0,v[:,0]*sign(v[0,0]))
assert_array_almost_equal(v1,v[:,1]*sign(v[0,1]))
assert_array_almost_equal(v2,v[:,2]*sign(v[0,2]))
for i in range(3):
assert_array_almost_equal(dot(a,v[:,i]),w[i]*v[:,i])
def test_not_square_error(self):
"""Check that passing a non-square array raises a ValueError."""
A = np.arange(6).reshape(3,2)
assert_raises(ValueError, eig, A)
def test_shape_mismatch(self):
"""Check that passing arrays of with different shapes raises a ValueError."""
A = identity(2)
B = np.arange(9.0).reshape(3,3)
assert_raises(ValueError, eig, A, B)
assert_raises(ValueError, eig, B, A)
class TestEigBanded(TestCase):
def __init__(self, *args):
TestCase.__init__(self, *args)
self.create_bandmat()
def create_bandmat(self):
"""Create the full matrix `self.fullmat` and
the corresponding band matrix `self.bandmat`."""
N = 10
self.KL = 2 # number of subdiagonals (below the diagonal)
self.KU = 2 # number of superdiagonals (above the diagonal)
# symmetric band matrix
self.sym_mat = (diag(1.0*ones(N))
+ diag(-1.0*ones(N-1), -1) + diag(-1.0*ones(N-1), 1)
+ diag(-2.0*ones(N-2), -2) + diag(-2.0*ones(N-2), 2))
# hermitian band matrix
self.herm_mat = (diag(-1.0*ones(N))
+ 1j*diag(1.0*ones(N-1), -1) - 1j*diag(1.0*ones(N-1), 1)
+ diag(-2.0*ones(N-2), -2) + diag(-2.0*ones(N-2), 2))
# general real band matrix
self.real_mat = (diag(1.0*ones(N))
+ diag(-1.0*ones(N-1), -1) + diag(-3.0*ones(N-1), 1)
+ diag(2.0*ones(N-2), -2) + diag(-2.0*ones(N-2), 2))
# general complex band matrix
self.comp_mat = (1j*diag(1.0*ones(N))
+ diag(-1.0*ones(N-1), -1) + 1j*diag(-3.0*ones(N-1), 1)
+ diag(2.0*ones(N-2), -2) + diag(-2.0*ones(N-2), 2))
# Eigenvalues and -vectors from linalg.eig
ew, ev = linalg.eig(self.sym_mat)
ew = ew.real
args = argsort(ew)
self.w_sym_lin = ew[args]
self.evec_sym_lin = ev[:,args]
ew, ev = linalg.eig(self.herm_mat)
ew = ew.real
args = argsort(ew)
self.w_herm_lin = ew[args]
self.evec_herm_lin = ev[:,args]
# Extract upper bands from symmetric and hermitian band matrices
# (for use in dsbevd, dsbevx, zhbevd, zhbevx
# and their single precision versions)
LDAB = self.KU + 1
self.bandmat_sym = zeros((LDAB, N), dtype=float)
self.bandmat_herm = zeros((LDAB, N), dtype=complex)
for i in xrange(LDAB):
self.bandmat_sym[LDAB-i-1,i:N] = diag(self.sym_mat, i)
self.bandmat_herm[LDAB-i-1,i:N] = diag(self.herm_mat, i)
# Extract bands from general real and complex band matrix
# (for use in dgbtrf, dgbtrs and their single precision versions)
LDAB = 2*self.KL + self.KU + 1
self.bandmat_real = zeros((LDAB, N), dtype=float)
self.bandmat_real[2*self.KL,:] = diag(self.real_mat) # diagonal
for i in xrange(self.KL):
# superdiagonals
self.bandmat_real[2*self.KL-1-i,i+1:N] = diag(self.real_mat, i+1)
# subdiagonals
self.bandmat_real[2*self.KL+1+i,0:N-1-i] = diag(self.real_mat,-i-1)
self.bandmat_comp = zeros((LDAB, N), dtype=complex)
self.bandmat_comp[2*self.KL,:] = diag(self.comp_mat) # diagonal
for i in xrange(self.KL):
# superdiagonals
self.bandmat_comp[2*self.KL-1-i,i+1:N] = diag(self.comp_mat, i+1)
# subdiagonals
self.bandmat_comp[2*self.KL+1+i,0:N-1-i] = diag(self.comp_mat,-i-1)
# absolute value for linear equation system A*x = b
self.b = 1.0*arange(N)
self.bc = self.b * (1 + 1j)
#####################################################################
def test_dsbev(self):
"""Compare dsbev eigenvalues and eigenvectors with
the result of linalg.eig."""
w, evec, info = dsbev(self.bandmat_sym, compute_v=1)
evec_ = evec[:,argsort(w)]
assert_array_almost_equal(sort(w), self.w_sym_lin)
assert_array_almost_equal(abs(evec_), abs(self.evec_sym_lin))
def test_dsbevd(self):
"""Compare dsbevd eigenvalues and eigenvectors with
the result of linalg.eig."""
w, evec, info = dsbevd(self.bandmat_sym, compute_v=1)
evec_ = evec[:,argsort(w)]
assert_array_almost_equal(sort(w), self.w_sym_lin)
assert_array_almost_equal(abs(evec_), abs(self.evec_sym_lin))
def test_dsbevx(self):
"""Compare dsbevx eigenvalues and eigenvectors
with the result of linalg.eig."""
N,N = shape(self.sym_mat)
## Achtung: Argumente 0.0,0.0,range?
w, evec, num, ifail, info = dsbevx(self.bandmat_sym, 0.0, 0.0, 1, N,
compute_v=1, range=2)
evec_ = evec[:,argsort(w)]
assert_array_almost_equal(sort(w), self.w_sym_lin)
assert_array_almost_equal(abs(evec_), abs(self.evec_sym_lin))
def test_zhbevd(self):
"""Compare zhbevd eigenvalues and eigenvectors
with the result of linalg.eig."""
w, evec, info = zhbevd(self.bandmat_herm, compute_v=1)
evec_ = evec[:,argsort(w)]
assert_array_almost_equal(sort(w), self.w_herm_lin)
assert_array_almost_equal(abs(evec_), abs(self.evec_herm_lin))
def test_zhbevx(self):
"""Compare zhbevx eigenvalues and eigenvectors
with the result of linalg.eig."""
N,N = shape(self.herm_mat)
## Achtung: Argumente 0.0,0.0,range?
w, evec, num, ifail, info = zhbevx(self.bandmat_herm, 0.0, 0.0, 1, N,
compute_v=1, range=2)
evec_ = evec[:,argsort(w)]
assert_array_almost_equal(sort(w), self.w_herm_lin)
assert_array_almost_equal(abs(evec_), abs(self.evec_herm_lin))
def test_eigvals_banded(self):
"""Compare eigenvalues of eigvals_banded with those of linalg.eig."""
w_sym = eigvals_banded(self.bandmat_sym)
w_sym = w_sym.real
assert_array_almost_equal(sort(w_sym), self.w_sym_lin)
w_herm = eigvals_banded(self.bandmat_herm)
w_herm = w_herm.real
assert_array_almost_equal(sort(w_herm), self.w_herm_lin)
# extracting eigenvalues with respect to an index range
ind1 = 2
ind2 = 6
w_sym_ind = eigvals_banded(self.bandmat_sym,
select='i', select_range=(ind1, ind2))
assert_array_almost_equal(sort(w_sym_ind),
self.w_sym_lin[ind1:ind2+1])
w_herm_ind = eigvals_banded(self.bandmat_herm,
select='i', select_range=(ind1, ind2))
assert_array_almost_equal(sort(w_herm_ind),
self.w_herm_lin[ind1:ind2+1])
# extracting eigenvalues with respect to a value range
v_lower = self.w_sym_lin[ind1] - 1.0e-5
v_upper = self.w_sym_lin[ind2] + 1.0e-5
w_sym_val = eigvals_banded(self.bandmat_sym,
select='v', select_range=(v_lower, v_upper))
assert_array_almost_equal(sort(w_sym_val),
self.w_sym_lin[ind1:ind2+1])
v_lower = self.w_herm_lin[ind1] - 1.0e-5
v_upper = self.w_herm_lin[ind2] + 1.0e-5
w_herm_val = eigvals_banded(self.bandmat_herm,
select='v', select_range=(v_lower, v_upper))
assert_array_almost_equal(sort(w_herm_val),
self.w_herm_lin[ind1:ind2+1])
w_sym = eigvals_banded(self.bandmat_sym, check_finite=False)
w_sym = w_sym.real
assert_array_almost_equal(sort(w_sym), self.w_sym_lin)
def test_eig_banded(self):
"""Compare eigenvalues and eigenvectors of eig_banded
with those of linalg.eig. """
w_sym, evec_sym = eig_banded(self.bandmat_sym)
evec_sym_ = evec_sym[:,argsort(w_sym.real)]
assert_array_almost_equal(sort(w_sym), self.w_sym_lin)
assert_array_almost_equal(abs(evec_sym_), abs(self.evec_sym_lin))
w_herm, evec_herm = eig_banded(self.bandmat_herm)
evec_herm_ = evec_herm[:,argsort(w_herm.real)]
assert_array_almost_equal(sort(w_herm), self.w_herm_lin)
assert_array_almost_equal(abs(evec_herm_), abs(self.evec_herm_lin))
# extracting eigenvalues with respect to an index range
ind1 = 2
ind2 = 6
w_sym_ind, evec_sym_ind = eig_banded(self.bandmat_sym,
select='i', select_range=(ind1, ind2))
assert_array_almost_equal(sort(w_sym_ind),
self.w_sym_lin[ind1:ind2+1])
assert_array_almost_equal(abs(evec_sym_ind),
abs(self.evec_sym_lin[:,ind1:ind2+1]))
w_herm_ind, evec_herm_ind = eig_banded(self.bandmat_herm,
select='i', select_range=(ind1, ind2))
assert_array_almost_equal(sort(w_herm_ind),
self.w_herm_lin[ind1:ind2+1])
assert_array_almost_equal(abs(evec_herm_ind),
abs(self.evec_herm_lin[:,ind1:ind2+1]))
# extracting eigenvalues with respect to a value range
v_lower = self.w_sym_lin[ind1] - 1.0e-5
v_upper = self.w_sym_lin[ind2] + 1.0e-5
w_sym_val, evec_sym_val = eig_banded(self.bandmat_sym,
select='v', select_range=(v_lower, v_upper))
assert_array_almost_equal(sort(w_sym_val),
self.w_sym_lin[ind1:ind2+1])
assert_array_almost_equal(abs(evec_sym_val),
abs(self.evec_sym_lin[:,ind1:ind2+1]))
v_lower = self.w_herm_lin[ind1] - 1.0e-5
v_upper = self.w_herm_lin[ind2] + 1.0e-5
w_herm_val, evec_herm_val = eig_banded(self.bandmat_herm,
select='v', select_range=(v_lower, v_upper))
assert_array_almost_equal(sort(w_herm_val),
self.w_herm_lin[ind1:ind2+1])
assert_array_almost_equal(abs(evec_herm_val),
abs(self.evec_herm_lin[:,ind1:ind2+1]))
w_sym, evec_sym = eig_banded(self.bandmat_sym, check_finite=False)
evec_sym_ = evec_sym[:,argsort(w_sym.real)]
assert_array_almost_equal(sort(w_sym), self.w_sym_lin)
assert_array_almost_equal(abs(evec_sym_), abs(self.evec_sym_lin))
def test_dgbtrf(self):
"""Compare dgbtrf LU factorisation with the LU factorisation result
of linalg.lu."""
M,N = shape(self.real_mat)
lu_symm_band, ipiv, info = dgbtrf(self.bandmat_real, self.KL, self.KU)
# extract matrix u from lu_symm_band
u = diag(lu_symm_band[2*self.KL,:])
for i in xrange(self.KL + self.KU):
u += diag(lu_symm_band[2*self.KL-1-i,i+1:N], i+1)
p_lin, l_lin, u_lin = lu(self.real_mat, permute_l=0)
assert_array_almost_equal(u, u_lin)
def test_zgbtrf(self):
"""Compare zgbtrf LU factorisation with the LU factorisation result
of linalg.lu."""
M,N = shape(self.comp_mat)
lu_symm_band, ipiv, info = zgbtrf(self.bandmat_comp, self.KL, self.KU)
# extract matrix u from lu_symm_band
u = diag(lu_symm_band[2*self.KL,:])
for i in xrange(self.KL + self.KU):
u += diag(lu_symm_band[2*self.KL-1-i,i+1:N], i+1)
p_lin, l_lin, u_lin = lu(self.comp_mat, permute_l=0)
assert_array_almost_equal(u, u_lin)
def test_dgbtrs(self):
"""Compare dgbtrs solutions for linear equation system A*x = b
with solutions of linalg.solve."""
lu_symm_band, ipiv, info = dgbtrf(self.bandmat_real, self.KL, self.KU)
y, info = dgbtrs(lu_symm_band, self.KL, self.KU, self.b, ipiv)
y_lin = linalg.solve(self.real_mat, self.b)
assert_array_almost_equal(y, y_lin)
def test_zgbtrs(self):
"""Compare zgbtrs solutions for linear equation system A*x = b
with solutions of linalg.solve."""
lu_symm_band, ipiv, info = zgbtrf(self.bandmat_comp, self.KL, self.KU)
y, info = zgbtrs(lu_symm_band, self.KL, self.KU, self.bc, ipiv)
y_lin = linalg.solve(self.comp_mat, self.bc)
assert_array_almost_equal(y, y_lin)
def test_eigh():
DIM = 6
v = {'dim': (DIM,),
'dtype': ('f','d','F','D'),
'overwrite': (True, False),
'lower': (True, False),
'turbo': (True, False),
'eigvals': (None, (2, DIM-2))}
for dim in v['dim']:
for typ in v['dtype']:
for overwrite in v['overwrite']:
for turbo in v['turbo']:
for eigenvalues in v['eigvals']:
for lower in v['lower']:
yield (eigenhproblem_standard,
'ordinary',
dim, typ, overwrite, lower,
turbo, eigenvalues)
yield (eigenhproblem_general,
'general ',
dim, typ, overwrite, lower,
turbo, eigenvalues)
def test_eigh_of_sparse():
# This tests the rejection of inputs that eigh cannot currently handle.
import scipy.sparse
a = scipy.sparse.identity(2).tocsc()
b = np.atleast_2d(a)
assert_raises(ValueError, eigh, a)
assert_raises(ValueError, eigh, b)
def _complex_symrand(dim, dtype):
a1, a2 = symrand(dim), symrand(dim)
# add antisymmetric matrix as imag part
a = a1 + 1j*(triu(a2)-tril(a2))
return a.astype(dtype)
def eigenhproblem_standard(desc, dim, dtype,
overwrite, lower, turbo,
eigenvalues):
"""Solve a standard eigenvalue problem."""
if iscomplex(empty(1, dtype=dtype)):
a = _complex_symrand(dim, dtype)
else:
a = symrand(dim).astype(dtype)
if overwrite:
a_c = a.copy()
else:
a_c = a
w, z = eigh(a, overwrite_a=overwrite, lower=lower, eigvals=eigenvalues)
assert_dtype_equal(z.dtype, dtype)
w = w.astype(dtype)
diag_ = diag(dot(z.T.conj(), dot(a_c, z))).real
assert_array_almost_equal(diag_, w, DIGITS[dtype])
def eigenhproblem_general(desc, dim, dtype,
overwrite, lower, turbo,
eigenvalues):
"""Solve a generalized eigenvalue problem."""
if iscomplex(empty(1, dtype=dtype)):
a = _complex_symrand(dim, dtype)
b = _complex_symrand(dim, dtype)+diag([2.1]*dim).astype(dtype)
else:
a = symrand(dim).astype(dtype)
b = symrand(dim).astype(dtype)+diag([2.1]*dim).astype(dtype)
if overwrite:
a_c, b_c = a.copy(), b.copy()
else:
a_c, b_c = a, b
w, z = eigh(a, b, overwrite_a=overwrite, lower=lower,
overwrite_b=overwrite, turbo=turbo, eigvals=eigenvalues)
assert_dtype_equal(z.dtype, dtype)
w = w.astype(dtype)
diag1_ = diag(dot(z.T.conj(), dot(a_c, z))).real
assert_array_almost_equal(diag1_, w, DIGITS[dtype])
diag2_ = diag(dot(z.T.conj(), dot(b_c, z))).real
assert_array_almost_equal(diag2_, ones(diag2_.shape[0]), DIGITS[dtype])
def test_eigh_integer():
a = array([[1,2],[2,7]])
b = array([[3,1],[1,5]])
w,z = eigh(a)
w,z = eigh(a,b)
class TestLU(TestCase):
def __init__(self, *args, **kw):
TestCase.__init__(self, *args, **kw)
self.a = array([[1,2,3],[1,2,3],[2,5,6]])
self.ca = array([[1,2,3],[1,2,3],[2,5j,6]])
# Those matrices are more robust to detect problems in permutation
# matrices than the ones above
self.b = array([[1,2,3],[4,5,6],[7,8,9]])
self.cb = array([[1j,2j,3j],[4j,5j,6j],[7j,8j,9j]])
# Reectangular matrices
self.hrect = array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 12, 12]])
self.chrect = 1.j * array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 12, 12]])
self.vrect = array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 12, 12]])
self.cvrect = 1.j * array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 12, 12]])
# Medium sizes matrices
self.med = rand(30, 40)
self.cmed = rand(30, 40) + 1.j * rand(30, 40)
def _test_common(self, data):
p,l,u = lu(data)
assert_array_almost_equal(dot(dot(p,l),u),data)
pl,u = lu(data,permute_l=1)
assert_array_almost_equal(dot(pl,u),data)
# Simple tests
def test_simple(self):
self._test_common(self.a)
def test_simple_complex(self):
self._test_common(self.ca)
def test_simple2(self):
self._test_common(self.b)
def test_simple2_complex(self):
self._test_common(self.cb)
# rectangular matrices tests
def test_hrectangular(self):
self._test_common(self.hrect)
def test_vrectangular(self):
self._test_common(self.vrect)
def test_hrectangular_complex(self):
self._test_common(self.chrect)
def test_vrectangular_complex(self):
self._test_common(self.cvrect)
# Bigger matrices
def test_medium1(self):
"""Check lu decomposition on medium size, rectangular matrix."""
self._test_common(self.med)
def test_medium1_complex(self):
"""Check lu decomposition on medium size, rectangular matrix."""
self._test_common(self.cmed)
def test_check_finite(self):
p, l, u = lu(self.a, check_finite=False)
assert_array_almost_equal(dot(dot(p,l),u), self.a)
def test_simple_known(self):
# Ticket #1458
for order in ['C', 'F']:
A = np.array([[2, 1],[0, 1.]], order=order)
LU, P = lu_factor(A)
assert_array_almost_equal(LU, np.array([[2, 1], [0, 1]]))
assert_array_equal(P, np.array([0, 1]))
class TestLUSingle(TestLU):
"""LU testers for single precision, real and double"""
def __init__(self, *args, **kw):
TestLU.__init__(self, *args, **kw)
self.a = self.a.astype(float32)
self.ca = self.ca.astype(complex64)
self.b = self.b.astype(float32)
self.cb = self.cb.astype(complex64)
self.hrect = self.hrect.astype(float32)
self.chrect = self.hrect.astype(complex64)
self.vrect = self.vrect.astype(float32)
self.cvrect = self.vrect.astype(complex64)
self.med = self.vrect.astype(float32)
self.cmed = self.vrect.astype(complex64)
class TestLUSolve(TestCase):
def setUp(self):
seed(1234)
def test_lu(self):
a0 = random((10,10))
b = random((10,))
for order in ['C', 'F']:
a = np.array(a0, order=order)
x1 = solve(a,b)
lu_a = lu_factor(a)
x2 = lu_solve(lu_a,b)
assert_array_almost_equal(x1,x2)
def test_check_finite(self):
a = random((10,10))
b = random((10,))
x1 = solve(a,b)
lu_a = lu_factor(a, check_finite=False)
x2 = lu_solve(lu_a,b, check_finite=False)
assert_array_almost_equal(x1,x2)
class TestSVD(TestCase):
def setUp(self):
seed(1234)
def test_simple(self):
a = [[1,2,3],[1,20,3],[2,5,6]]
for full_matrices in (True, False):
u,s,vh = svd(a, full_matrices=full_matrices)
assert_array_almost_equal(dot(transpose(u),u),identity(3))
assert_array_almost_equal(dot(transpose(vh),vh),identity(3))
sigma = zeros((u.shape[0],vh.shape[0]),s.dtype.char)
for i in range(len(s)):
sigma[i,i] = s[i]
assert_array_almost_equal(dot(dot(u,sigma),vh),a)
def test_simple_singular(self):
a = [[1,2,3],[1,2,3],[2,5,6]]
for full_matrices in (True, False):
u,s,vh = svd(a, full_matrices=full_matrices)
assert_array_almost_equal(dot(transpose(u),u),identity(3))
assert_array_almost_equal(dot(transpose(vh),vh),identity(3))
sigma = zeros((u.shape[0],vh.shape[0]),s.dtype.char)
for i in range(len(s)):
sigma[i,i] = s[i]
assert_array_almost_equal(dot(dot(u,sigma),vh),a)
def test_simple_underdet(self):
a = [[1,2,3],[4,5,6]]
for full_matrices in (True, False):
u,s,vh = svd(a, full_matrices=full_matrices)
assert_array_almost_equal(dot(transpose(u),u),identity(u.shape[0]))
sigma = zeros((u.shape[0],vh.shape[0]),s.dtype.char)
for i in range(len(s)):
sigma[i,i] = s[i]
assert_array_almost_equal(dot(dot(u,sigma),vh),a)
def test_simple_overdet(self):
a = [[1,2],[4,5],[3,4]]
for full_matrices in (True, False):
u,s,vh = svd(a, full_matrices=full_matrices)
assert_array_almost_equal(dot(transpose(u),u), identity(u.shape[1]))
assert_array_almost_equal(dot(transpose(vh),vh),identity(2))
sigma = zeros((u.shape[1],vh.shape[0]),s.dtype.char)
for i in range(len(s)):
sigma[i,i] = s[i]
assert_array_almost_equal(dot(dot(u,sigma),vh),a)
def test_random(self):
n = 20
m = 15
for i in range(3):
for a in [random([n,m]),random([m,n])]:
for full_matrices in (True, False):
u,s,vh = svd(a, full_matrices=full_matrices)
assert_array_almost_equal(dot(transpose(u),u),identity(u.shape[1]))
assert_array_almost_equal(dot(vh, transpose(vh)),identity(vh.shape[0]))
sigma = zeros((u.shape[1],vh.shape[0]),s.dtype.char)
for i in range(len(s)):
sigma[i,i] = s[i]
assert_array_almost_equal(dot(dot(u,sigma),vh),a)
def test_simple_complex(self):
a = [[1,2,3],[1,2j,3],[2,5,6]]
for full_matrices in (True, False):
u,s,vh = svd(a, full_matrices=full_matrices)
assert_array_almost_equal(dot(conj(transpose(u)),u),identity(u.shape[1]))
assert_array_almost_equal(dot(conj(transpose(vh)),vh),identity(vh.shape[0]))
sigma = zeros((u.shape[0],vh.shape[0]),s.dtype.char)
for i in range(len(s)):
sigma[i,i] = s[i]
assert_array_almost_equal(dot(dot(u,sigma),vh),a)
def test_random_complex(self):
n = 20
m = 15
for i in range(3):
for full_matrices in (True, False):
for a in [random([n,m]),random([m,n])]:
a = a + 1j*random(list(a.shape))
u,s,vh = svd(a, full_matrices=full_matrices)
assert_array_almost_equal(dot(conj(transpose(u)),u),identity(u.shape[1]))
# This fails when [m,n]
# assert_array_almost_equal(dot(conj(transpose(vh)),vh),identity(len(vh),dtype=vh.dtype.char))
sigma = zeros((u.shape[1],vh.shape[0]),s.dtype.char)
for i in range(len(s)):
sigma[i,i] = s[i]
assert_array_almost_equal(dot(dot(u,sigma),vh),a)
def test_crash_1580(self):
sizes = [(13, 23), (30, 50), (60, 100)]
np.random.seed(1234)
for sz in sizes:
for dt in [np.float32, np.float64, np.complex64, np.complex128]:
a = np.random.rand(*sz).astype(dt)
# should not crash
svd(a)
def test_check_finite(self):
a = [[1,2,3],[1,20,3],[2,5,6]]
u,s,vh = svd(a, check_finite=False)
assert_array_almost_equal(dot(transpose(u),u),identity(3))
assert_array_almost_equal(dot(transpose(vh),vh),identity(3))
sigma = zeros((u.shape[0],vh.shape[0]),s.dtype.char)
for i in range(len(s)):
sigma[i,i] = s[i]
assert_array_almost_equal(dot(dot(u,sigma),vh),a)
def test_gh_5039(self):
# This is a smoke test for https://github.com/scipy/scipy/issues/5039
#
# The following is reported to raise "ValueError: On entry to DGESDD
# parameter number 12 had an illegal value".
# `interp1d([1,2,3,4], [1,2,3,4], kind='cubic')`
# This is reported to only show up on LAPACK 3.0.3.
#
# The matrix below is taken from the call to
# `B = _fitpack._bsplmat(order, xk)` in interpolate._find_smoothest
b = np.array(
[[0.16666667, 0.66666667, 0.16666667, 0., 0., 0.],
[0., 0.16666667, 0.66666667, 0.16666667, 0., 0.],
[0., 0., 0.16666667, 0.66666667, 0.16666667, 0.],
[0., 0., 0., 0.16666667, 0.66666667, 0.16666667]])
svd(b)
class TestSVDVals(TestCase):
def test_empty(self):
for a in [[]], np.empty((2, 0)), np.ones((0, 3)):
s = svdvals(a)
assert_equal(s, np.empty(0))
def test_simple(self):
a = [[1,2,3],[1,2,3],[2,5,6]]
s = svdvals(a)
assert_(len(s) == 3)
assert_(s[0] >= s[1] >= s[2])
def test_simple_underdet(self):
a = [[1,2,3],[4,5,6]]
s = svdvals(a)
assert_(len(s) == 2)
assert_(s[0] >= s[1])
def test_simple_overdet(self):
a = [[1,2],[4,5],[3,4]]
s = svdvals(a)
assert_(len(s) == 2)
assert_(s[0] >= s[1])
def test_simple_complex(self):
a = [[1,2,3],[1,20,3j],[2,5,6]]
s = svdvals(a)
assert_(len(s) == 3)
assert_(s[0] >= s[1] >= s[2])
def test_simple_underdet_complex(self):
a = [[1,2,3],[4,5j,6]]
s = svdvals(a)
assert_(len(s) == 2)
assert_(s[0] >= s[1])
def test_simple_overdet_complex(self):
a = [[1,2],[4,5],[3j,4]]
s = svdvals(a)
assert_(len(s) == 2)
assert_(s[0] >= s[1])
def test_check_finite(self):
a = [[1,2,3],[1,2,3],[2,5,6]]
s = svdvals(a, check_finite=False)
assert_(len(s) == 3)
assert_(s[0] >= s[1] >= s[2])
@dec.slow
def test_crash_2609(self):
np.random.seed(1234)
a = np.random.rand(1500, 2800)
# Shouldn't crash:
svdvals(a)
class TestDiagSVD(TestCase):
def test_simple(self):
assert_array_almost_equal(diagsvd([1,0,0],3,3),[[1,0,0],[0,0,0],[0,0,0]])
class TestQR(TestCase):
def setUp(self):
seed(1234)
def test_simple(self):
a = [[8,2,3],[2,9,3],[5,3,6]]
q,r = qr(a)
assert_array_almost_equal(dot(transpose(q),q),identity(3))
assert_array_almost_equal(dot(q,r),a)
def test_simple_left(self):
a = [[8,2,3],[2,9,3],[5,3,6]]
q,r = qr(a)
c = [1, 2, 3]
qc,r2 = qr_multiply(a, c, "left")
assert_array_almost_equal(dot(q, c), qc)
assert_array_almost_equal(r, r2)
qc,r2 = qr_multiply(a, identity(3), "left")
assert_array_almost_equal(q, qc)
def test_simple_right(self):
a = [[8,2,3],[2,9,3],[5,3,6]]
q,r = qr(a)
c = [1, 2, 3]
qc,r2 = qr_multiply(a, c)
assert_array_almost_equal(dot(c, q), qc)
assert_array_almost_equal(r, r2)
qc,r = qr_multiply(a, identity(3))
assert_array_almost_equal(q, qc)
def test_simple_pivoting(self):
a = np.asarray([[8,2,3],[2,9,3],[5,3,6]])
q,r,p = qr(a, pivoting=True)
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(transpose(q),q),identity(3))
assert_array_almost_equal(dot(q,r),a[:,p])
q2,r2 = qr(a[:,p])
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_simple_left_pivoting(self):
a = [[8,2,3],[2,9,3],[5,3,6]]
q,r,jpvt = qr(a, pivoting=True)
c = [1, 2, 3]
qc,r,jpvt = qr_multiply(a, c, "left", True)
assert_array_almost_equal(dot(q, c), qc)
def test_simple_right_pivoting(self):
a = [[8,2,3],[2,9,3],[5,3,6]]
q,r,jpvt = qr(a, pivoting=True)
c = [1, 2, 3]
qc,r,jpvt = qr_multiply(a, c, pivoting=True)
assert_array_almost_equal(dot(c, q), qc)
def test_simple_trap(self):
a = [[8,2,3],[2,9,3]]
q,r = qr(a)
assert_array_almost_equal(dot(transpose(q),q),identity(2))
assert_array_almost_equal(dot(q,r),a)
def test_simple_trap_pivoting(self):
a = np.asarray([[8,2,3],[2,9,3]])
q,r,p = qr(a, pivoting=True)
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(transpose(q),q),identity(2))
assert_array_almost_equal(dot(q,r),a[:,p])
q2,r2 = qr(a[:,p])
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_simple_tall(self):
# full version
a = [[8,2],[2,9],[5,3]]
q,r = qr(a)
assert_array_almost_equal(dot(transpose(q),q),identity(3))
assert_array_almost_equal(dot(q,r),a)
def test_simple_tall_pivoting(self):
# full version pivoting
a = np.asarray([[8,2],[2,9],[5,3]])
q,r,p = qr(a, pivoting=True)
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(transpose(q),q),identity(3))
assert_array_almost_equal(dot(q,r),a[:,p])
q2,r2 = qr(a[:,p])
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_simple_tall_e(self):
# economy version
a = [[8,2],[2,9],[5,3]]
q,r = qr(a, mode='economic')
assert_array_almost_equal(dot(transpose(q),q),identity(2))
assert_array_almost_equal(dot(q,r),a)
assert_equal(q.shape, (3,2))
assert_equal(r.shape, (2,2))
def test_simple_tall_e_pivoting(self):
# economy version pivoting
a = np.asarray([[8,2],[2,9],[5,3]])
q,r,p = qr(a, pivoting=True, mode='economic')
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(transpose(q),q),identity(2))
assert_array_almost_equal(dot(q,r),a[:,p])
q2,r2 = qr(a[:,p], mode='economic')
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_simple_tall_left(self):
a = [[8,2],[2,9],[5,3]]
q,r = qr(a, mode="economic")
c = [1, 2]
qc,r2 = qr_multiply(a, c, "left")
assert_array_almost_equal(dot(q, c), qc)
assert_array_almost_equal(r, r2)
c = array([1,2,0])
qc,r2 = qr_multiply(a, c, "left", overwrite_c=True)
assert_array_almost_equal(dot(q, c[:2]), qc)
qc,r = qr_multiply(a, identity(2), "left")
assert_array_almost_equal(qc, q)
def test_simple_tall_left_pivoting(self):
a = [[8,2],[2,9],[5,3]]
q,r,jpvt = qr(a, mode="economic", pivoting=True)
c = [1, 2]
qc,r,kpvt = qr_multiply(a, c, "left", True)
assert_array_equal(jpvt, kpvt)
assert_array_almost_equal(dot(q, c), qc)
qc,r,jpvt = qr_multiply(a, identity(2), "left", True)
assert_array_almost_equal(qc, q)
def test_simple_tall_right(self):
a = [[8,2],[2,9],[5,3]]
q,r = qr(a, mode="economic")
c = [1, 2, 3]
cq,r2 = qr_multiply(a, c)
assert_array_almost_equal(dot(c, q), cq)
assert_array_almost_equal(r, r2)
cq,r = qr_multiply(a, identity(3))
assert_array_almost_equal(cq, q)
def test_simple_tall_right_pivoting(self):
a = [[8,2],[2,9],[5,3]]
q,r,jpvt = qr(a, pivoting=True, mode="economic")
c = [1, 2, 3]
cq,r,jpvt = qr_multiply(a, c, pivoting=True)
assert_array_almost_equal(dot(c, q), cq)
cq,r,jpvt = qr_multiply(a, identity(3), pivoting=True)
assert_array_almost_equal(cq, q)
def test_simple_fat(self):
# full version
a = [[8,2,5],[2,9,3]]
q,r = qr(a)
assert_array_almost_equal(dot(transpose(q),q),identity(2))
assert_array_almost_equal(dot(q,r),a)
assert_equal(q.shape, (2,2))
assert_equal(r.shape, (2,3))
def test_simple_fat_pivoting(self):
# full version pivoting
a = np.asarray([[8,2,5],[2,9,3]])
q,r,p = qr(a, pivoting=True)
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(transpose(q),q),identity(2))
assert_array_almost_equal(dot(q,r),a[:,p])
assert_equal(q.shape, (2,2))
assert_equal(r.shape, (2,3))
q2,r2 = qr(a[:,p])
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_simple_fat_e(self):
# economy version
a = [[8,2,3],[2,9,5]]
q,r = qr(a, mode='economic')
assert_array_almost_equal(dot(transpose(q),q),identity(2))
assert_array_almost_equal(dot(q,r),a)
assert_equal(q.shape, (2,2))
assert_equal(r.shape, (2,3))
def test_simple_fat_e_pivoting(self):
# economy version pivoting
a = np.asarray([[8,2,3],[2,9,5]])
q,r,p = qr(a, pivoting=True, mode='economic')
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(transpose(q),q),identity(2))
assert_array_almost_equal(dot(q,r),a[:,p])
assert_equal(q.shape, (2,2))
assert_equal(r.shape, (2,3))
q2,r2 = qr(a[:,p], mode='economic')
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_simple_fat_left(self):
a = [[8,2,3],[2,9,5]]
q,r = qr(a, mode="economic")
c = [1, 2]
qc,r2 = qr_multiply(a, c, "left")
assert_array_almost_equal(dot(q, c), qc)
assert_array_almost_equal(r, r2)
qc,r = qr_multiply(a, identity(2), "left")
assert_array_almost_equal(qc, q)
def test_simple_fat_left_pivoting(self):
a = [[8,2,3],[2,9,5]]
q,r,jpvt = qr(a, mode="economic", pivoting=True)
c = [1, 2]
qc,r,jpvt = qr_multiply(a, c, "left", True)
assert_array_almost_equal(dot(q, c), qc)
qc,r,jpvt = qr_multiply(a, identity(2), "left", True)
assert_array_almost_equal(qc, q)
def test_simple_fat_right(self):
a = [[8,2,3],[2,9,5]]
q,r = qr(a, mode="economic")
c = [1, 2]
cq,r2 = qr_multiply(a, c)
assert_array_almost_equal(dot(c, q), cq)
assert_array_almost_equal(r, r2)
cq,r = qr_multiply(a, identity(2))
assert_array_almost_equal(cq, q)
def test_simple_fat_right_pivoting(self):
a = [[8,2,3],[2,9,5]]
q,r,jpvt = qr(a, pivoting=True, mode="economic")
c = [1, 2]
cq,r,jpvt = qr_multiply(a, c, pivoting=True)
assert_array_almost_equal(dot(c, q), cq)
cq,r,jpvt = qr_multiply(a, identity(2), pivoting=True)
assert_array_almost_equal(cq, q)
def test_simple_complex(self):
a = [[3,3+4j,5],[5,2,2+7j],[3,2,7]]
q,r = qr(a)
assert_array_almost_equal(dot(conj(transpose(q)),q),identity(3))
assert_array_almost_equal(dot(q,r),a)
def test_simple_complex_left(self):
a = [[3,3+4j,5],[5,2,2+7j],[3,2,7]]
q,r = qr(a)
c = [1, 2, 3+4j]
qc,r = qr_multiply(a, c, "left")
assert_array_almost_equal(dot(q, c), qc)
qc,r = qr_multiply(a, identity(3), "left")
assert_array_almost_equal(q, qc)
def test_simple_complex_right(self):
a = [[3,3+4j,5],[5,2,2+7j],[3,2,7]]
q,r = qr(a)
c = [1, 2, 3+4j]
qc,r = qr_multiply(a, c)
assert_array_almost_equal(dot(c, q), qc)
qc,r = qr_multiply(a, identity(3))
assert_array_almost_equal(q, qc)
def test_simple_tall_complex_left(self):
a = [[8,2+3j],[2,9],[5+7j,3]]
q,r = qr(a, mode="economic")
c = [1, 2+2j]
qc,r2 = qr_multiply(a, c, "left")
assert_array_almost_equal(dot(q, c), qc)
assert_array_almost_equal(r, r2)
c = array([1,2,0])
qc,r2 = qr_multiply(a, c, "left", overwrite_c=True)
assert_array_almost_equal(dot(q, c[:2]), qc)
qc,r = qr_multiply(a, identity(2), "left")
assert_array_almost_equal(qc, q)
def test_simple_complex_left_conjugate(self):
a = [[3,3+4j,5],[5,2,2+7j],[3,2,7]]
q,r = qr(a)
c = [1, 2, 3+4j]
qc,r = qr_multiply(a, c, "left", conjugate=True)
assert_array_almost_equal(dot(q.conjugate(), c), qc)
def test_simple_complex_tall_left_conjugate(self):
a = [[3,3+4j],[5,2+2j],[3,2]]
q,r = qr(a, mode='economic')
c = [1, 3+4j]
qc,r = qr_multiply(a, c, "left", conjugate=True)
assert_array_almost_equal(dot(q.conjugate(), c), qc)
def test_simple_complex_right_conjugate(self):
a = [[3,3+4j,5],[5,2,2+7j],[3,2,7]]
q,r = qr(a)
c = [1, 2, 3+4j]
qc,r = qr_multiply(a, c, conjugate=True)
assert_array_almost_equal(dot(c, q.conjugate()), qc)
def test_simple_complex_pivoting(self):
a = np.asarray([[3,3+4j,5],[5,2,2+7j],[3,2,7]])
q,r,p = qr(a, pivoting=True)
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(conj(transpose(q)),q),identity(3))
assert_array_almost_equal(dot(q,r),a[:,p])
q2,r2 = qr(a[:,p])
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_simple_complex_left_pivoting(self):
a = np.asarray([[3,3+4j,5],[5,2,2+7j],[3,2,7]])
q,r,jpvt = qr(a, pivoting=True)
c = [1, 2, 3+4j]
qc,r,jpvt = qr_multiply(a, c, "left", True)
assert_array_almost_equal(dot(q, c), qc)
def test_simple_complex_right_pivoting(self):
a = np.asarray([[3,3+4j,5],[5,2,2+7j],[3,2,7]])
q,r,jpvt = qr(a, pivoting=True)
c = [1, 2, 3+4j]
qc,r,jpvt = qr_multiply(a, c, pivoting=True)
assert_array_almost_equal(dot(c, q), qc)
def test_random(self):
n = 20
for k in range(2):
a = random([n,n])
q,r = qr(a)
assert_array_almost_equal(dot(transpose(q),q),identity(n))
assert_array_almost_equal(dot(q,r),a)
def test_random_left(self):
n = 20
for k in range(2):
a = random([n,n])
q,r = qr(a)
c = random([n])
qc,r = qr_multiply(a, c, "left")
assert_array_almost_equal(dot(q, c), qc)
qc,r = qr_multiply(a, identity(n), "left")
assert_array_almost_equal(q, qc)
def test_random_right(self):
n = 20
for k in range(2):
a = random([n,n])
q,r = qr(a)
c = random([n])
cq,r = qr_multiply(a, c)
assert_array_almost_equal(dot(c, q), cq)
cq,r = qr_multiply(a, identity(n))
assert_array_almost_equal(q, cq)
def test_random_pivoting(self):
n = 20
for k in range(2):
a = random([n,n])
q,r,p = qr(a, pivoting=True)
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(transpose(q),q),identity(n))
assert_array_almost_equal(dot(q,r),a[:,p])
q2,r2 = qr(a[:,p])
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_random_tall(self):
# full version
m = 200
n = 100
for k in range(2):
a = random([m,n])
q,r = qr(a)
assert_array_almost_equal(dot(transpose(q),q),identity(m))
assert_array_almost_equal(dot(q,r),a)
def test_random_tall_left(self):
# full version
m = 200
n = 100
for k in range(2):
a = random([m,n])
q,r = qr(a, mode="economic")
c = random([n])
qc,r = qr_multiply(a, c, "left")
assert_array_almost_equal(dot(q, c), qc)
qc,r = qr_multiply(a, identity(n), "left")
assert_array_almost_equal(qc, q)
def test_random_tall_right(self):
# full version
m = 200
n = 100
for k in range(2):
a = random([m,n])
q,r = qr(a, mode="economic")
c = random([m])
cq,r = qr_multiply(a, c)
assert_array_almost_equal(dot(c, q), cq)
cq,r = qr_multiply(a, identity(m))
assert_array_almost_equal(cq, q)
def test_random_tall_pivoting(self):
# full version pivoting
m = 200
n = 100
for k in range(2):
a = random([m,n])
q,r,p = qr(a, pivoting=True)
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(transpose(q),q),identity(m))
assert_array_almost_equal(dot(q,r),a[:,p])
q2,r2 = qr(a[:,p])
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_random_tall_e(self):
# economy version
m = 200
n = 100
for k in range(2):
a = random([m,n])
q,r = qr(a, mode='economic')
assert_array_almost_equal(dot(transpose(q),q),identity(n))
assert_array_almost_equal(dot(q,r),a)
assert_equal(q.shape, (m,n))
assert_equal(r.shape, (n,n))
def test_random_tall_e_pivoting(self):
# economy version pivoting
m = 200
n = 100
for k in range(2):
a = random([m,n])
q,r,p = qr(a, pivoting=True, mode='economic')
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(transpose(q),q),identity(n))
assert_array_almost_equal(dot(q,r),a[:,p])
assert_equal(q.shape, (m,n))
assert_equal(r.shape, (n,n))
q2,r2 = qr(a[:,p], mode='economic')
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_random_trap(self):
m = 100
n = 200
for k in range(2):
a = random([m,n])
q,r = qr(a)
assert_array_almost_equal(dot(transpose(q),q),identity(m))
assert_array_almost_equal(dot(q,r),a)
def test_random_trap_pivoting(self):
m = 100
n = 200
for k in range(2):
a = random([m,n])
q,r,p = qr(a, pivoting=True)
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(transpose(q),q),identity(m))
assert_array_almost_equal(dot(q,r),a[:,p])
q2,r2 = qr(a[:,p])
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_random_complex(self):
n = 20
for k in range(2):
a = random([n,n])+1j*random([n,n])
q,r = qr(a)
assert_array_almost_equal(dot(conj(transpose(q)),q),identity(n))
assert_array_almost_equal(dot(q,r),a)
def test_random_complex_left(self):
n = 20
for k in range(2):
a = random([n,n])+1j*random([n,n])
q,r = qr(a)
c = random([n])+1j*random([n])
qc,r = qr_multiply(a, c, "left")
assert_array_almost_equal(dot(q, c), qc)
qc,r = qr_multiply(a, identity(n), "left")
assert_array_almost_equal(q, qc)
def test_random_complex_right(self):
n = 20
for k in range(2):
a = random([n,n])+1j*random([n,n])
q,r = qr(a)
c = random([n])+1j*random([n])
cq,r = qr_multiply(a, c)
assert_array_almost_equal(dot(c, q), cq)
cq,r = qr_multiply(a, identity(n))
assert_array_almost_equal(q, cq)
def test_random_complex_pivoting(self):
n = 20
for k in range(2):
a = random([n,n])+1j*random([n,n])
q,r,p = qr(a, pivoting=True)
d = abs(diag(r))
assert_(all(d[1:] <= d[:-1]))
assert_array_almost_equal(dot(conj(transpose(q)),q),identity(n))
assert_array_almost_equal(dot(q,r),a[:,p])
q2,r2 = qr(a[:,p])
assert_array_almost_equal(q,q2)
assert_array_almost_equal(r,r2)
def test_check_finite(self):
a = [[8,2,3],[2,9,3],[5,3,6]]
q,r = qr(a, check_finite=False)
assert_array_almost_equal(dot(transpose(q),q),identity(3))
assert_array_almost_equal(dot(q,r),a)
def test_lwork(self):
a = [[8,2,3],[2,9,3],[5,3,6]]
# Get comparison values
q,r = qr(a, lwork=None)
# Test against minimum valid lwork
q2,r2 = qr(a, lwork=3)
assert_array_almost_equal(q2,q)
assert_array_almost_equal(r2,r)
# Test against larger lwork
q3,r3 = qr(a, lwork=10)
assert_array_almost_equal(q3,q)
assert_array_almost_equal(r3,r)
# Test against explicit lwork=-1
q4,r4 = qr(a, lwork=-1)
assert_array_almost_equal(q4,q)
assert_array_almost_equal(r4,r)
# Test against invalid lwork
assert_raises(Exception, qr, (a,), {'lwork':0})
assert_raises(Exception, qr, (a,), {'lwork':2})
class TestRQ(TestCase):
def setUp(self):
seed(1234)
def test_simple(self):
a = [[8,2,3],[2,9,3],[5,3,6]]
r,q = rq(a)
assert_array_almost_equal(dot(q, transpose(q)),identity(3))
assert_array_almost_equal(dot(r,q),a)
def test_r(self):
a = [[8,2,3],[2,9,3],[5,3,6]]
r,q = rq(a)
r2 = rq(a, mode='r')
assert_array_almost_equal(r, r2)
def test_random(self):
n = 20
for k in range(2):
a = random([n,n])
r,q = rq(a)
assert_array_almost_equal(dot(q, transpose(q)),identity(n))
assert_array_almost_equal(dot(r,q),a)
def test_simple_trap(self):
a = [[8,2,3],[2,9,3]]
r,q = rq(a)
assert_array_almost_equal(dot(transpose(q),q),identity(3))
assert_array_almost_equal(dot(r,q),a)
def test_simple_tall(self):
a = [[8,2],[2,9],[5,3]]
r,q = rq(a)
assert_array_almost_equal(dot(transpose(q),q),identity(2))
assert_array_almost_equal(dot(r,q),a)
def test_simple_fat(self):
a = [[8,2,5],[2,9,3]]
r,q = rq(a)
assert_array_almost_equal(dot(transpose(q),q),identity(3))
assert_array_almost_equal(dot(r,q),a)
def test_simple_complex(self):
a = [[3,3+4j,5],[5,2,2+7j],[3,2,7]]
r,q = rq(a)
assert_array_almost_equal(dot(q, conj(transpose(q))),identity(3))
assert_array_almost_equal(dot(r,q),a)
def test_random_tall(self):
m = 200
n = 100
for k in range(2):
a = random([m,n])
r,q = rq(a)
assert_array_almost_equal(dot(q, transpose(q)),identity(n))
assert_array_almost_equal(dot(r,q),a)
def test_random_trap(self):
m = 100
n = 200
for k in range(2):
a = random([m,n])
r,q = rq(a)
assert_array_almost_equal(dot(q, transpose(q)),identity(n))
assert_array_almost_equal(dot(r,q),a)
def test_random_trap_economic(self):
m = 100
n = 200
for k in range(2):
a = random([m,n])
r,q = rq(a, mode='economic')
assert_array_almost_equal(dot(q,transpose(q)),identity(m))
assert_array_almost_equal(dot(r,q),a)
assert_equal(q.shape, (m, n))
assert_equal(r.shape, (m, m))
def test_random_complex(self):
n = 20
for k in range(2):
a = random([n,n])+1j*random([n,n])
r,q = rq(a)
assert_array_almost_equal(dot(q, conj(transpose(q))),identity(n))
assert_array_almost_equal(dot(r,q),a)
def test_random_complex_economic(self):
m = 100
n = 200
for k in range(2):
a = random([m,n])+1j*random([m,n])
r,q = rq(a, mode='economic')
assert_array_almost_equal(dot(q,conj(transpose(q))),identity(m))
assert_array_almost_equal(dot(r,q),a)
assert_equal(q.shape, (m, n))
assert_equal(r.shape, (m, m))
def test_check_finite(self):
a = [[8,2,3],[2,9,3],[5,3,6]]
r,q = rq(a, check_finite=False)
assert_array_almost_equal(dot(q, transpose(q)),identity(3))
assert_array_almost_equal(dot(r,q),a)
transp = transpose
any = sometrue
class TestSchur(TestCase):
def test_simple(self):
a = [[8,12,3],[2,9,3],[10,3,6]]
t,z = schur(a)
assert_array_almost_equal(dot(dot(z,t),transp(conj(z))),a)
tc,zc = schur(a,'complex')
assert_(any(ravel(iscomplex(zc))) and any(ravel(iscomplex(tc))))
assert_array_almost_equal(dot(dot(zc,tc),transp(conj(zc))),a)
tc2,zc2 = rsf2csf(tc,zc)
assert_array_almost_equal(dot(dot(zc2,tc2),transp(conj(zc2))),a)
def test_sort(self):
a = [[4.,3.,1.,-1.],[-4.5,-3.5,-1.,1.],[9.,6.,-4.,4.5],[6.,4.,-3.,3.5]]
s,u,sdim = schur(a,sort='lhp')
assert_array_almost_equal([[0.1134,0.5436,0.8316,0.],
[-0.1134,-0.8245,0.5544,0.],
[-0.8213,0.1308,0.0265,-0.5547],
[-0.5475,0.0872,0.0177,0.8321]],
u,3)
assert_array_almost_equal([[-1.4142,0.1456,-11.5816,-7.7174],
[0.,-0.5000,9.4472,-0.7184],
[0.,0.,1.4142,-0.1456],
[0.,0.,0.,0.5]],
s,3)
assert_equal(2,sdim)
s,u,sdim = schur(a,sort='rhp')
assert_array_almost_equal([[0.4862,-0.4930,0.1434,-0.7071],
[-0.4862,0.4930,-0.1434,-0.7071],
[0.6042,0.3944,-0.6924,0.],
[0.4028,0.5986,0.6924,0.]],
u,3)
assert_array_almost_equal([[1.4142,-0.9270,4.5368,-14.4130],
[0.,0.5,6.5809,-3.1870],
[0.,0.,-1.4142,0.9270],
[0.,0.,0.,-0.5]],
s,3)
assert_equal(2,sdim)
s,u,sdim = schur(a,sort='iuc')
assert_array_almost_equal([[0.5547,0.,-0.5721,-0.6042],
[-0.8321,0.,-0.3814,-0.4028],
[0.,0.7071,-0.5134,0.4862],
[0.,0.7071,0.5134,-0.4862]],
u,3)
assert_array_almost_equal([[-0.5000,0.0000,-6.5809,-4.0974],
[0.,0.5000,-3.3191,-14.4130],
[0.,0.,1.4142,2.1573],
[0.,0.,0.,-1.4142]],
s,3)
assert_equal(2,sdim)
s,u,sdim = schur(a,sort='ouc')
assert_array_almost_equal([[0.4862,-0.5134,0.7071,0.],
[-0.4862,0.5134,0.7071,0.],
[0.6042,0.5721,0.,-0.5547],
[0.4028,0.3814,0.,0.8321]],
u,3)
assert_array_almost_equal([[1.4142,-2.1573,14.4130,4.0974],
[0.,-1.4142,3.3191,6.5809],
[0.,0.,-0.5000,0.],
[0.,0.,0.,0.5000]],
s,3)
assert_equal(2,sdim)
rhp_function = lambda x: x >= 0.0
s,u,sdim = schur(a,sort=rhp_function)
assert_array_almost_equal([[0.4862,-0.4930,0.1434,-0.7071],
[-0.4862,0.4930,-0.1434,-0.7071],
[0.6042,0.3944,-0.6924,0.],
[0.4028,0.5986,0.6924,0.]],
u,3)
assert_array_almost_equal([[1.4142,-0.9270,4.5368,-14.4130],
[0.,0.5,6.5809,-3.1870],
[0.,0.,-1.4142,0.9270],
[0.,0.,0.,-0.5]],
s,3)
assert_equal(2,sdim)
def test_sort_errors(self):
a = [[4.,3.,1.,-1.],[-4.5,-3.5,-1.,1.],[9.,6.,-4.,4.5],[6.,4.,-3.,3.5]]
assert_raises(ValueError, schur, a, sort='unsupported')
assert_raises(ValueError, schur, a, sort=1)
def test_check_finite(self):
a = [[8,12,3],[2,9,3],[10,3,6]]
t,z = schur(a, check_finite=False)
assert_array_almost_equal(dot(dot(z,t),transp(conj(z))),a)
class TestHessenberg(TestCase):
def test_simple(self):
a = [[-149, -50,-154],
[537, 180, 546],
[-27, -9, -25]]
h1 = [[-149.0000,42.2037,-156.3165],
[-537.6783,152.5511,-554.9272],
[0,0.0728, 2.4489]]
h,q = hessenberg(a,calc_q=1)
assert_array_almost_equal(dot(transp(q),dot(a,q)),h)
assert_array_almost_equal(h,h1,decimal=4)
def test_simple_complex(self):
a = [[-149, -50,-154],
[537, 180j, 546],
[-27j, -9, -25]]
h,q = hessenberg(a,calc_q=1)
h1 = dot(transp(conj(q)),dot(a,q))
assert_array_almost_equal(h1,h)
def test_simple2(self):
a = [[1,2,3,4,5,6,7],
[0,2,3,4,6,7,2],
[0,2,2,3,0,3,2],
[0,0,2,8,0,0,2],
[0,3,1,2,0,1,2],
[0,1,2,3,0,1,0],
[0,0,0,0,0,1,2]]
h,q = hessenberg(a,calc_q=1)
assert_array_almost_equal(dot(transp(q),dot(a,q)),h)
def test_simple3(self):
a = np.eye(3)
a[-1, 0] = 2
h, q = hessenberg(a, calc_q=1)
assert_array_almost_equal(dot(transp(q), dot(a, q)), h)
def test_random(self):
n = 20
for k in range(2):
a = random([n,n])
h,q = hessenberg(a,calc_q=1)
assert_array_almost_equal(dot(transp(q),dot(a,q)),h)
def test_random_complex(self):
n = 20
for k in range(2):
a = random([n,n])+1j*random([n,n])
h,q = hessenberg(a,calc_q=1)
h1 = dot(transp(conj(q)),dot(a,q))
assert_array_almost_equal(h1,h)
def test_check_finite(self):
a = [[-149, -50,-154],
[537, 180, 546],
[-27, -9, -25]]
h1 = [[-149.0000,42.2037,-156.3165],
[-537.6783,152.5511,-554.9272],
[0,0.0728, 2.4489]]
h,q = hessenberg(a,calc_q=1, check_finite=False)
assert_array_almost_equal(dot(transp(q),dot(a,q)),h)
assert_array_almost_equal(h,h1,decimal=4)
def test_2x2(self):
a = [[2, 1], [7, 12]]
h, q = hessenberg(a, calc_q=1)
assert_array_almost_equal(q, np.eye(2))
assert_array_almost_equal(h, a)
b = [[2-7j, 1+2j], [7+3j, 12-2j]]
h2, q2 = hessenberg(b, calc_q=1)
assert_array_almost_equal(q2, np.eye(2))
assert_array_almost_equal(h2, b)
class TestQZ(TestCase):
def setUp(self):
seed(12345)
def test_qz_single(self):
n = 5
A = random([n,n]).astype(float32)
B = random([n,n]).astype(float32)
AA,BB,Q,Z = qz(A,B)
assert_array_almost_equal(dot(dot(Q,AA),Z.T), A)
assert_array_almost_equal(dot(dot(Q,BB),Z.T), B)
assert_array_almost_equal(dot(Q,Q.T), eye(n))
assert_array_almost_equal(dot(Z,Z.T), eye(n))
assert_(all(diag(BB) >= 0))
def test_qz_double(self):
n = 5
A = random([n,n])
B = random([n,n])
AA,BB,Q,Z = qz(A,B)
assert_array_almost_equal(dot(dot(Q,AA),Z.T), A)
assert_array_almost_equal(dot(dot(Q,BB),Z.T), B)
assert_array_almost_equal(dot(Q,Q.T), eye(n))
assert_array_almost_equal(dot(Z,Z.T), eye(n))
assert_(all(diag(BB) >= 0))
def test_qz_complex(self):
n = 5
A = random([n,n]) + 1j*random([n,n])
B = random([n,n]) + 1j*random([n,n])
AA,BB,Q,Z = qz(A,B)
assert_array_almost_equal(dot(dot(Q,AA),Z.conjugate().T), A)
assert_array_almost_equal(dot(dot(Q,BB),Z.conjugate().T), B)
assert_array_almost_equal(dot(Q,Q.conjugate().T), eye(n))
assert_array_almost_equal(dot(Z,Z.conjugate().T), eye(n))
assert_(all(diag(BB) >= 0))
assert_(all(diag(BB).imag == 0))
def test_qz_complex64(self):
n = 5
A = (random([n,n]) + 1j*random([n,n])).astype(complex64)
B = (random([n,n]) + 1j*random([n,n])).astype(complex64)
AA,BB,Q,Z = qz(A,B)
assert_array_almost_equal(dot(dot(Q,AA),Z.conjugate().T), A, decimal=5)
assert_array_almost_equal(dot(dot(Q,BB),Z.conjugate().T), B, decimal=5)
assert_array_almost_equal(dot(Q,Q.conjugate().T), eye(n), decimal=5)
assert_array_almost_equal(dot(Z,Z.conjugate().T), eye(n), decimal=5)
assert_(all(diag(BB) >= 0))
assert_(all(diag(BB).imag == 0))
def test_qz_double_complex(self):
n = 5
A = random([n,n])
B = random([n,n])
AA,BB,Q,Z = qz(A,B, output='complex')
aa = dot(dot(Q,AA),Z.conjugate().T)
assert_array_almost_equal(aa.real, A)
assert_array_almost_equal(aa.imag, 0)
bb = dot(dot(Q,BB),Z.conjugate().T)
assert_array_almost_equal(bb.real, B)
assert_array_almost_equal(bb.imag, 0)
assert_array_almost_equal(dot(Q,Q.conjugate().T), eye(n))
assert_array_almost_equal(dot(Z,Z.conjugate().T), eye(n))
assert_(all(diag(BB) >= 0))
def test_qz_double_sort(self):
# from http://www.nag.com/lapack-ex/node119.html
# NOTE: These matrices may be ill-conditioned and lead to a
# seg fault on certain python versions when compiled with
# sse2 or sse3 older ATLAS/LAPACK binaries for windows
# A = np.array([[3.9, 12.5, -34.5, -0.5],
# [ 4.3, 21.5, -47.5, 7.5],
# [ 4.3, 21.5, -43.5, 3.5],
# [ 4.4, 26.0, -46.0, 6.0 ]])
# B = np.array([[ 1.0, 2.0, -3.0, 1.0],
# [1.0, 3.0, -5.0, 4.0],
# [1.0, 3.0, -4.0, 3.0],
# [1.0, 3.0, -4.0, 4.0]])
A = np.array([[3.9, 12.5, -34.5, 2.5],
[4.3, 21.5, -47.5, 7.5],
[4.3, 1.5, -43.5, 3.5],
[4.4, 6.0, -46.0, 6.0]])
B = np.array([[1.0, 1.0, -3.0, 1.0],
[1.0, 3.0, -5.0, 4.4],
[1.0, 2.0, -4.0, 1.0],
[1.2, 3.0, -4.0, 4.0]])
sort = lambda ar,ai,beta: ai == 0
assert_raises(ValueError, qz, A, B, sort=sort)
if False:
AA,BB,Q,Z,sdim = qz(A,B,sort=sort)
# assert_(sdim == 2)
assert_(sdim == 4)
assert_array_almost_equal(dot(dot(Q,AA),Z.T), A)
assert_array_almost_equal(dot(dot(Q,BB),Z.T), B)
# test absolute values bc the sign is ambiguous and might be platform
# dependent
assert_array_almost_equal(np.abs(AA), np.abs(np.array(
[[35.7864, -80.9061, -12.0629, -9.498],
[0., 2.7638, -2.3505, 7.3256],
[0., 0., 0.6258, -0.0398],
[0., 0., 0., -12.8217]])), 4)
assert_array_almost_equal(np.abs(BB), np.abs(np.array(
[[4.5324, -8.7878, 3.2357, -3.5526],
[0., 1.4314, -2.1894, 0.9709],
[0., 0., 1.3126, -0.3468],
[0., 0., 0., 0.559]])), 4)
assert_array_almost_equal(np.abs(Q), np.abs(np.array(
[[-0.4193, -0.605, -0.1894, -0.6498],
[-0.5495, 0.6987, 0.2654, -0.3734],
[-0.4973, -0.3682, 0.6194, 0.4832],
[-0.5243, 0.1008, -0.7142, 0.4526]])), 4)
assert_array_almost_equal(np.abs(Z), np.abs(np.array(
[[-0.9471, -0.2971, -0.1217, 0.0055],
[-0.0367, 0.1209, 0.0358, 0.9913],
[0.3171, -0.9041, -0.2547, 0.1312],
[0.0346, 0.2824, -0.9587, 0.0014]])), 4)
# test absolute values bc the sign is ambiguous and might be platform
# dependent
# assert_array_almost_equal(abs(AA), abs(np.array([
# [3.8009, -69.4505, 50.3135, -43.2884],
# [0.0000, 9.2033, -0.2001, 5.9881],
# [0.0000, 0.0000, 1.4279, 4.4453],
# [0.0000, 0.0000, 0.9019, -1.1962]])), 4)
# assert_array_almost_equal(abs(BB), abs(np.array([
# [1.9005, -10.2285, 0.8658, -5.2134],
# [0.0000, 2.3008, 0.7915, 0.4262],
# [0.0000, 0.0000, 0.8101, 0.0000],
# [0.0000, 0.0000, 0.0000, -0.2823]])), 4)
# assert_array_almost_equal(abs(Q), abs(np.array([
# [0.4642, 0.7886, 0.2915, -0.2786],
# [0.5002, -0.5986, 0.5638, -0.2713],
# [0.5002, 0.0154, -0.0107, 0.8657],
# [0.5331, -0.1395, -0.7727, -0.3151]])), 4)
# assert_array_almost_equal(dot(Q,Q.T), eye(4))
# assert_array_almost_equal(abs(Z), abs(np.array([
# [0.9961, -0.0014, 0.0887, -0.0026],
# [0.0057, -0.0404, -0.0938, -0.9948],
# [0.0626, 0.7194, -0.6908, 0.0363],
# [0.0626, -0.6934, -0.7114, 0.0956]])), 4)
# assert_array_almost_equal(dot(Z,Z.T), eye(4))
# def test_qz_complex_sort(self):
# cA = np.array([
# [-21.10+22.50*1j, 53.50+-50.50*1j, -34.50+127.50*1j, 7.50+ 0.50*1j],
# [-0.46+ -7.78*1j, -3.50+-37.50*1j, -15.50+ 58.50*1j,-10.50+ -1.50*1j],
# [ 4.30+ -5.50*1j, 39.70+-17.10*1j, -68.50+ 12.50*1j, -7.50+ -3.50*1j],
# [ 5.50+ 4.40*1j, 14.40+ 43.30*1j, -32.50+-46.00*1j,-19.00+-32.50*1j]])
# cB = np.array([
# [1.00+ -5.00*1j, 1.60+ 1.20*1j,-3.00+ 0.00*1j, 0.00+ -1.00*1j],
# [0.80+ -0.60*1j, 3.00+ -5.00*1j,-4.00+ 3.00*1j,-2.40+ -3.20*1j],
# [1.00+ 0.00*1j, 2.40+ 1.80*1j,-4.00+ -5.00*1j, 0.00+ -3.00*1j],
# [0.00+ 1.00*1j,-1.80+ 2.40*1j, 0.00+ -4.00*1j, 4.00+ -5.00*1j]])
# AAS,BBS,QS,ZS,sdim = qz(cA,cB,sort='lhp')
# eigenvalues = diag(AAS)/diag(BBS)
# assert_(all(np.real(eigenvalues[:sdim] < 0)))
# assert_(all(np.real(eigenvalues[sdim:] > 0)))
def test_check_finite(self):
n = 5
A = random([n,n])
B = random([n,n])
AA,BB,Q,Z = qz(A,B,check_finite=False)
assert_array_almost_equal(dot(dot(Q,AA),Z.T), A)
assert_array_almost_equal(dot(dot(Q,BB),Z.T), B)
assert_array_almost_equal(dot(Q,Q.T), eye(n))
assert_array_almost_equal(dot(Z,Z.T), eye(n))
assert_(all(diag(BB) >= 0))
class TestDatacopied(TestCase):
def test_datacopied(self):
from scipy.linalg.decomp import _datacopied
M = matrix([[0,1],[2,3]])
A = asarray(M)
L = M.tolist()
M2 = M.copy()
class Fake1:
def __array__(self):
return A
class Fake2:
__array_interface__ = A.__array_interface__
F1 = Fake1()
F2 = Fake2()
for item, status in [(M, False), (A, False), (L, True),
(M2, False), (F1, False), (F2, False)]:
arr = asarray(item)
assert_equal(_datacopied(arr, item), status,
err_msg=repr(item))
def test_aligned_mem_float():
"""Check linalg works with non-aligned memory"""
# Allocate 402 bytes of memory (allocated on boundary)
a = arange(402, dtype=np.uint8)
# Create an array with boundary offset 4
z = np.frombuffer(a.data, offset=2, count=100, dtype=float32)
z.shape = 10, 10
eig(z, overwrite_a=True)
eig(z.T, overwrite_a=True)
def test_aligned_mem():
"""Check linalg works with non-aligned memory"""
# Allocate 804 bytes of memory (allocated on boundary)
a = arange(804, dtype=np.uint8)
# Create an array with boundary offset 4
z = np.frombuffer(a.data, offset=4, count=100, dtype=float)
z.shape = 10, 10
eig(z, overwrite_a=True)
eig(z.T, overwrite_a=True)
def test_aligned_mem_complex():
"""Check that complex objects don't need to be completely aligned"""
# Allocate 1608 bytes of memory (allocated on boundary)
a = zeros(1608, dtype=np.uint8)
# Create an array with boundary offset 8
z = np.frombuffer(a.data, offset=8, count=100, dtype=complex)
z.shape = 10, 10
eig(z, overwrite_a=True)
# This does not need special handling
eig(z.T, overwrite_a=True)
def check_lapack_misaligned(func, args, kwargs):
args = list(args)
for i in range(len(args)):
a = args[:]
if isinstance(a[i],np.ndarray):
# Try misaligning a[i]
aa = np.zeros(a[i].size*a[i].dtype.itemsize+8, dtype=np.uint8)
aa = np.frombuffer(aa.data, offset=4, count=a[i].size, dtype=a[i].dtype)
aa.shape = a[i].shape
aa[...] = a[i]
a[i] = aa
func(*a,**kwargs)
if len(a[i].shape) > 1:
a[i] = a[i].T
func(*a,**kwargs)
@dec.knownfailureif(True, "Ticket #1152, triggers a segfault in rare cases.")
def test_lapack_misaligned():
M = np.eye(10,dtype=float)
R = np.arange(100)
R.shape = 10,10
S = np.arange(20000,dtype=np.uint8)
S = np.frombuffer(S.data, offset=4, count=100, dtype=float)
S.shape = 10, 10
b = np.ones(10)
LU, piv = lu_factor(S)
for (func, args, kwargs) in [
(eig,(S,),dict(overwrite_a=True)), # crash
(eigvals,(S,),dict(overwrite_a=True)), # no crash
(lu,(S,),dict(overwrite_a=True)), # no crash
(lu_factor,(S,),dict(overwrite_a=True)), # no crash
(lu_solve,((LU,piv),b),dict(overwrite_b=True)),
(solve,(S,b),dict(overwrite_a=True,overwrite_b=True)),
(svd,(M,),dict(overwrite_a=True)), # no crash
(svd,(R,),dict(overwrite_a=True)), # no crash
(svd,(S,),dict(overwrite_a=True)), # crash
(svdvals,(S,),dict()), # no crash
(svdvals,(S,),dict(overwrite_a=True)), # crash
(cholesky,(M,),dict(overwrite_a=True)), # no crash
(qr,(S,),dict(overwrite_a=True)), # crash
(rq,(S,),dict(overwrite_a=True)), # crash
(hessenberg,(S,),dict(overwrite_a=True)), # crash
(schur,(S,),dict(overwrite_a=True)), # crash
]:
yield check_lapack_misaligned, func, args, kwargs
# not properly tested
# cholesky, rsf2csf, lu_solve, solve, eig_banded, eigvals_banded, eigh, diagsvd
class TestOverwrite(object):
def test_eig(self):
assert_no_overwrite(eig, [(3,3)])
assert_no_overwrite(eig, [(3,3), (3,3)])
def test_eigh(self):
assert_no_overwrite(eigh, [(3,3)])
assert_no_overwrite(eigh, [(3,3), (3,3)])
def test_eig_banded(self):
assert_no_overwrite(eig_banded, [(3,2)])
def test_eigvals(self):
assert_no_overwrite(eigvals, [(3,3)])
def test_eigvalsh(self):
assert_no_overwrite(eigvalsh, [(3,3)])
def test_eigvals_banded(self):
assert_no_overwrite(eigvals_banded, [(3,2)])
def test_hessenberg(self):
assert_no_overwrite(hessenberg, [(3,3)])
def test_lu_factor(self):
assert_no_overwrite(lu_factor, [(3,3)])
def test_lu_solve(self):
x = np.array([[1,2,3], [4,5,6], [7,8,8]])
xlu = lu_factor(x)
assert_no_overwrite(lambda b: lu_solve(xlu, b), [(3,)])
def test_lu(self):
assert_no_overwrite(lu, [(3,3)])
def test_qr(self):
assert_no_overwrite(qr, [(3,3)])
def test_rq(self):
assert_no_overwrite(rq, [(3,3)])
def test_schur(self):
assert_no_overwrite(schur, [(3,3)])
def test_schur_complex(self):
assert_no_overwrite(lambda a: schur(a, 'complex'), [(3,3)],
dtypes=[np.float32, np.float64])
def test_svd(self):
assert_no_overwrite(svd, [(3,3)])
def test_svdvals(self):
assert_no_overwrite(svdvals, [(3,3)])
def _check_orth(n):
X = np.ones((n, 2), dtype=float)
Y = orth(X)
assert_equal(Y.shape, (n, 1))
assert_allclose(Y, Y.mean(), atol=1e-10)
Y = orth(X.T)
assert_equal(Y.shape, (2, 1))
assert_allclose(Y, Y.mean())
@dec.slow
def test_orth_memory_efficiency():
# Pick n so that 16*n bytes is reasonable but 8*n*n bytes is unreasonable.
# Keep in mind that @dec.slow tests are likely to be running
# under configurations that support 4Gb+ memory for tests related to
# 32 bit overflow.
n = 10*1000*1000
try:
_check_orth(n)
except MemoryError:
raise AssertionError('memory error perhaps caused by orth regression')
def test_orth():
for n in 1, 2, 3, 10, 100:
_check_orth(n)
if __name__ == "__main__":
run_module_suite()
|
classmember/proof_of_concept | refs/heads/master | python/events/lib/python3.4/site-packages/pip/_vendor/ipaddress.py | 48 | # Copyright 2007 Google Inc.
# Licensed to PSF under a Contributor Agreement.
"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
This library is used to create/poke/manipulate IPv4 and IPv6 addresses
and networks.
"""
from __future__ import unicode_literals
import itertools
import struct
__version__ = '1.0.22'
# Compatibility functions
_compat_int_types = (int,)
try:
_compat_int_types = (int, long)
except NameError:
pass
try:
_compat_str = unicode
except NameError:
_compat_str = str
assert bytes != str
if b'\0'[0] == 0: # Python 3 semantics
def _compat_bytes_to_byte_vals(byt):
return byt
else:
def _compat_bytes_to_byte_vals(byt):
return [struct.unpack(b'!B', b)[0] for b in byt]
try:
_compat_int_from_byte_vals = int.from_bytes
except AttributeError:
def _compat_int_from_byte_vals(bytvals, endianess):
assert endianess == 'big'
res = 0
for bv in bytvals:
assert isinstance(bv, _compat_int_types)
res = (res << 8) + bv
return res
def _compat_to_bytes(intval, length, endianess):
assert isinstance(intval, _compat_int_types)
assert endianess == 'big'
if length == 4:
if intval < 0 or intval >= 2 ** 32:
raise struct.error("integer out of range for 'I' format code")
return struct.pack(b'!I', intval)
elif length == 16:
if intval < 0 or intval >= 2 ** 128:
raise struct.error("integer out of range for 'QQ' format code")
return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
else:
raise NotImplementedError()
if hasattr(int, 'bit_length'):
# Not int.bit_length , since that won't work in 2.7 where long exists
def _compat_bit_length(i):
return i.bit_length()
else:
def _compat_bit_length(i):
for res in itertools.count():
if i >> res == 0:
return res
def _compat_range(start, end, step=1):
assert step > 0
i = start
while i < end:
yield i
i += step
class _TotalOrderingMixin(object):
__slots__ = ()
# Helper that derives the other comparison operations from
# __lt__ and __eq__
# We avoid functools.total_ordering because it doesn't handle
# NotImplemented correctly yet (http://bugs.python.org/issue10042)
def __eq__(self, other):
raise NotImplementedError
def __ne__(self, other):
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not equal
def __lt__(self, other):
raise NotImplementedError
def __le__(self, other):
less = self.__lt__(other)
if less is NotImplemented or not less:
return self.__eq__(other)
return less
def __gt__(self, other):
less = self.__lt__(other)
if less is NotImplemented:
return NotImplemented
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not (less or equal)
def __ge__(self, other):
less = self.__lt__(other)
if less is NotImplemented:
return NotImplemented
return not less
IPV4LENGTH = 32
IPV6LENGTH = 128
class AddressValueError(ValueError):
"""A Value Error related to the address."""
class NetmaskValueError(ValueError):
"""A Value Error related to the netmask."""
def ip_address(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Address or IPv6Address object.
Raises:
ValueError: if the *address* passed isn't either a v4 or a v6
address
"""
try:
return IPv4Address(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Address(address)
except (AddressValueError, NetmaskValueError):
pass
if isinstance(address, bytes):
raise AddressValueError(
'%r does not appear to be an IPv4 or IPv6 address. '
'Did you pass in a bytes (str in Python 2) instead of'
' a unicode object?' % address)
raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
address)
def ip_network(address, strict=True):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP network. Either IPv4 or
IPv6 networks may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Network or IPv6Network object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address. Or if the network has host bits set.
"""
try:
return IPv4Network(address, strict)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Network(address, strict)
except (AddressValueError, NetmaskValueError):
pass
if isinstance(address, bytes):
raise AddressValueError(
'%r does not appear to be an IPv4 or IPv6 network. '
'Did you pass in a bytes (str in Python 2) instead of'
' a unicode object?' % address)
raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
address)
def ip_interface(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Interface or IPv6Interface object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address.
Notes:
The IPv?Interface classes describe an Address on a particular
Network, so they're basically a combination of both the Address
and Network classes.
"""
try:
return IPv4Interface(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Interface(address)
except (AddressValueError, NetmaskValueError):
pass
raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
address)
def v4_int_to_packed(address):
"""Represent an address as 4 packed bytes in network (big-endian) order.
Args:
address: An integer representation of an IPv4 IP address.
Returns:
The integer address packed as 4 bytes in network (big-endian) order.
Raises:
ValueError: If the integer is negative or too large to be an
IPv4 IP address.
"""
try:
return _compat_to_bytes(address, 4, 'big')
except (struct.error, OverflowError):
raise ValueError("Address negative or too large for IPv4")
def v6_int_to_packed(address):
"""Represent an address as 16 packed bytes in network (big-endian) order.
Args:
address: An integer representation of an IPv6 IP address.
Returns:
The integer address packed as 16 bytes in network (big-endian) order.
"""
try:
return _compat_to_bytes(address, 16, 'big')
except (struct.error, OverflowError):
raise ValueError("Address negative or too large for IPv6")
def _split_optional_netmask(address):
"""Helper to split the netmask and raise AddressValueError if needed"""
addr = _compat_str(address).split('/')
if len(addr) > 2:
raise AddressValueError("Only one '/' permitted in %r" % address)
return addr
def _find_address_range(addresses):
"""Find a sequence of sorted deduplicated IPv#Address.
Args:
addresses: a list of IPv#Address objects.
Yields:
A tuple containing the first and last IP addresses in the sequence.
"""
it = iter(addresses)
first = last = next(it)
for ip in it:
if ip._ip != last._ip + 1:
yield first, last
first = ip
last = ip
yield first, last
def _count_righthand_zero_bits(number, bits):
"""Count the number of zero bits on the right hand side.
Args:
number: an integer.
bits: maximum number of bits to count.
Returns:
The number of zero bits on the right hand side of the number.
"""
if number == 0:
return bits
return min(bits, _compat_bit_length(~number & (number - 1)))
def summarize_address_range(first, last):
"""Summarize a network range given the first and last IP addresses.
Example:
>>> list(summarize_address_range(IPv4Address('192.0.2.0'),
... IPv4Address('192.0.2.130')))
... #doctest: +NORMALIZE_WHITESPACE
[IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
IPv4Network('192.0.2.130/32')]
Args:
first: the first IPv4Address or IPv6Address in the range.
last: the last IPv4Address or IPv6Address in the range.
Returns:
An iterator of the summarized IPv(4|6) network objects.
Raise:
TypeError:
If the first and last objects are not IP addresses.
If the first and last objects are not the same version.
ValueError:
If the last object is not greater than the first.
If the version of the first address is not 4 or 6.
"""
if (not (isinstance(first, _BaseAddress) and
isinstance(last, _BaseAddress))):
raise TypeError('first and last must be IP addresses, not networks')
if first.version != last.version:
raise TypeError("%s and %s are not of the same version" % (
first, last))
if first > last:
raise ValueError('last IP address must be greater than first')
if first.version == 4:
ip = IPv4Network
elif first.version == 6:
ip = IPv6Network
else:
raise ValueError('unknown IP version')
ip_bits = first._max_prefixlen
first_int = first._ip
last_int = last._ip
while first_int <= last_int:
nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
_compat_bit_length(last_int - first_int + 1) - 1)
net = ip((first_int, ip_bits - nbits))
yield net
first_int += 1 << nbits
if first_int - 1 == ip._ALL_ONES:
break
def _collapse_addresses_internal(addresses):
"""Loops through the addresses, collapsing concurrent netblocks.
Example:
ip1 = IPv4Network('192.0.2.0/26')
ip2 = IPv4Network('192.0.2.64/26')
ip3 = IPv4Network('192.0.2.128/26')
ip4 = IPv4Network('192.0.2.192/26')
_collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
[IPv4Network('192.0.2.0/24')]
This shouldn't be called directly; it is called via
collapse_addresses([]).
Args:
addresses: A list of IPv4Network's or IPv6Network's
Returns:
A list of IPv4Network's or IPv6Network's depending on what we were
passed.
"""
# First merge
to_merge = list(addresses)
subnets = {}
while to_merge:
net = to_merge.pop()
supernet = net.supernet()
existing = subnets.get(supernet)
if existing is None:
subnets[supernet] = net
elif existing != net:
# Merge consecutive subnets
del subnets[supernet]
to_merge.append(supernet)
# Then iterate over resulting networks, skipping subsumed subnets
last = None
for net in sorted(subnets.values()):
if last is not None:
# Since they are sorted,
# last.network_address <= net.network_address is a given.
if last.broadcast_address >= net.broadcast_address:
continue
yield net
last = net
def collapse_addresses(addresses):
"""Collapse a list of IP objects.
Example:
collapse_addresses([IPv4Network('192.0.2.0/25'),
IPv4Network('192.0.2.128/25')]) ->
[IPv4Network('192.0.2.0/24')]
Args:
addresses: An iterator of IPv4Network or IPv6Network objects.
Returns:
An iterator of the collapsed IPv(4|6)Network objects.
Raises:
TypeError: If passed a list of mixed version objects.
"""
addrs = []
ips = []
nets = []
# split IP addresses and networks
for ip in addresses:
if isinstance(ip, _BaseAddress):
if ips and ips[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, ips[-1]))
ips.append(ip)
elif ip._prefixlen == ip._max_prefixlen:
if ips and ips[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, ips[-1]))
try:
ips.append(ip.ip)
except AttributeError:
ips.append(ip.network_address)
else:
if nets and nets[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, nets[-1]))
nets.append(ip)
# sort and dedup
ips = sorted(set(ips))
# find consecutive address ranges in the sorted sequence and summarize them
if ips:
for first, last in _find_address_range(ips):
addrs.extend(summarize_address_range(first, last))
return _collapse_addresses_internal(addrs + nets)
def get_mixed_type_key(obj):
"""Return a key suitable for sorting between networks and addresses.
Address and Network objects are not sortable by default; they're
fundamentally different so the expression
IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
doesn't make any sense. There are some times however, where you may wish
to have ipaddress sort these for you anyway. If you need to do this, you
can use this function as the key= argument to sorted().
Args:
obj: either a Network or Address object.
Returns:
appropriate key.
"""
if isinstance(obj, _BaseNetwork):
return obj._get_networks_key()
elif isinstance(obj, _BaseAddress):
return obj._get_address_key()
return NotImplemented
class _IPAddressBase(_TotalOrderingMixin):
"""The mother class."""
__slots__ = ()
@property
def exploded(self):
"""Return the longhand version of the IP address as a string."""
return self._explode_shorthand_ip_string()
@property
def compressed(self):
"""Return the shorthand version of the IP address as a string."""
return _compat_str(self)
@property
def reverse_pointer(self):
"""The name of the reverse DNS pointer for the IP address, e.g.:
>>> ipaddress.ip_address("127.0.0.1").reverse_pointer
'1.0.0.127.in-addr.arpa'
>>> ipaddress.ip_address("2001:db8::1").reverse_pointer
'1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
"""
return self._reverse_pointer()
@property
def version(self):
msg = '%200s has no version specified' % (type(self),)
raise NotImplementedError(msg)
def _check_int_address(self, address):
if address < 0:
msg = "%d (< 0) is not permitted as an IPv%d address"
raise AddressValueError(msg % (address, self._version))
if address > self._ALL_ONES:
msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
raise AddressValueError(msg % (address, self._max_prefixlen,
self._version))
def _check_packed_address(self, address, expected_len):
address_len = len(address)
if address_len != expected_len:
msg = (
'%r (len %d != %d) is not permitted as an IPv%d address. '
'Did you pass in a bytes (str in Python 2) instead of'
' a unicode object?')
raise AddressValueError(msg % (address, address_len,
expected_len, self._version))
@classmethod
def _ip_int_from_prefix(cls, prefixlen):
"""Turn the prefix length into a bitwise netmask
Args:
prefixlen: An integer, the prefix length.
Returns:
An integer.
"""
return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
@classmethod
def _prefix_from_ip_int(cls, ip_int):
"""Return prefix length from the bitwise netmask.
Args:
ip_int: An integer, the netmask in expanded bitwise format
Returns:
An integer, the prefix length.
Raises:
ValueError: If the input intermingles zeroes & ones
"""
trailing_zeroes = _count_righthand_zero_bits(ip_int,
cls._max_prefixlen)
prefixlen = cls._max_prefixlen - trailing_zeroes
leading_ones = ip_int >> trailing_zeroes
all_ones = (1 << prefixlen) - 1
if leading_ones != all_ones:
byteslen = cls._max_prefixlen // 8
details = _compat_to_bytes(ip_int, byteslen, 'big')
msg = 'Netmask pattern %r mixes zeroes & ones'
raise ValueError(msg % details)
return prefixlen
@classmethod
def _report_invalid_netmask(cls, netmask_str):
msg = '%r is not a valid netmask' % netmask_str
raise NetmaskValueError(msg)
@classmethod
def _prefix_from_prefix_string(cls, prefixlen_str):
"""Return prefix length from a numeric string
Args:
prefixlen_str: The string to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask
"""
# int allows a leading +/- as well as surrounding whitespace,
# so we ensure that isn't the case
if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
cls._report_invalid_netmask(prefixlen_str)
try:
prefixlen = int(prefixlen_str)
except ValueError:
cls._report_invalid_netmask(prefixlen_str)
if not (0 <= prefixlen <= cls._max_prefixlen):
cls._report_invalid_netmask(prefixlen_str)
return prefixlen
@classmethod
def _prefix_from_ip_string(cls, ip_str):
"""Turn a netmask/hostmask string into a prefix length
Args:
ip_str: The netmask/hostmask to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask/hostmask
"""
# Parse the netmask/hostmask like an IP address.
try:
ip_int = cls._ip_int_from_string(ip_str)
except AddressValueError:
cls._report_invalid_netmask(ip_str)
# Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
# Note that the two ambiguous cases (all-ones and all-zeroes) are
# treated as netmasks.
try:
return cls._prefix_from_ip_int(ip_int)
except ValueError:
pass
# Invert the bits, and try matching a /0+1+/ hostmask instead.
ip_int ^= cls._ALL_ONES
try:
return cls._prefix_from_ip_int(ip_int)
except ValueError:
cls._report_invalid_netmask(ip_str)
def __reduce__(self):
return self.__class__, (_compat_str(self),)
class _BaseAddress(_IPAddressBase):
"""A generic IP object.
This IP class contains the version independent methods which are
used by single IP addresses.
"""
__slots__ = ()
def __int__(self):
return self._ip
def __eq__(self, other):
try:
return (self._ip == other._ip and
self._version == other._version)
except AttributeError:
return NotImplemented
def __lt__(self, other):
if not isinstance(other, _IPAddressBase):
return NotImplemented
if not isinstance(other, _BaseAddress):
raise TypeError('%s and %s are not of the same type' % (
self, other))
if self._version != other._version:
raise TypeError('%s and %s are not of the same version' % (
self, other))
if self._ip != other._ip:
return self._ip < other._ip
return False
# Shorthand for Integer addition and subtraction. This is not
# meant to ever support addition/subtraction of addresses.
def __add__(self, other):
if not isinstance(other, _compat_int_types):
return NotImplemented
return self.__class__(int(self) + other)
def __sub__(self, other):
if not isinstance(other, _compat_int_types):
return NotImplemented
return self.__class__(int(self) - other)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
def __str__(self):
return _compat_str(self._string_from_ip_int(self._ip))
def __hash__(self):
return hash(hex(int(self._ip)))
def _get_address_key(self):
return (self._version, self)
def __reduce__(self):
return self.__class__, (self._ip,)
class _BaseNetwork(_IPAddressBase):
"""A generic IP network object.
This IP class contains the version independent methods which are
used by networks.
"""
def __init__(self, address):
self._cache = {}
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
def __str__(self):
return '%s/%d' % (self.network_address, self.prefixlen)
def hosts(self):
"""Generate Iterator over usable hosts in a network.
This is like __iter__ except it doesn't return the network
or broadcast addresses.
"""
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network + 1, broadcast):
yield self._address_class(x)
def __iter__(self):
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network, broadcast + 1):
yield self._address_class(x)
def __getitem__(self, n):
network = int(self.network_address)
broadcast = int(self.broadcast_address)
if n >= 0:
if network + n > broadcast:
raise IndexError('address out of range')
return self._address_class(network + n)
else:
n += 1
if broadcast + n < network:
raise IndexError('address out of range')
return self._address_class(broadcast + n)
def __lt__(self, other):
if not isinstance(other, _IPAddressBase):
return NotImplemented
if not isinstance(other, _BaseNetwork):
raise TypeError('%s and %s are not of the same type' % (
self, other))
if self._version != other._version:
raise TypeError('%s and %s are not of the same version' % (
self, other))
if self.network_address != other.network_address:
return self.network_address < other.network_address
if self.netmask != other.netmask:
return self.netmask < other.netmask
return False
def __eq__(self, other):
try:
return (self._version == other._version and
self.network_address == other.network_address and
int(self.netmask) == int(other.netmask))
except AttributeError:
return NotImplemented
def __hash__(self):
return hash(int(self.network_address) ^ int(self.netmask))
def __contains__(self, other):
# always false if one is v4 and the other is v6.
if self._version != other._version:
return False
# dealing with another network.
if isinstance(other, _BaseNetwork):
return False
# dealing with another address
else:
# address
return (int(self.network_address) <= int(other._ip) <=
int(self.broadcast_address))
def overlaps(self, other):
"""Tell if self is partly contained in other."""
return self.network_address in other or (
self.broadcast_address in other or (
other.network_address in self or (
other.broadcast_address in self)))
@property
def broadcast_address(self):
x = self._cache.get('broadcast_address')
if x is None:
x = self._address_class(int(self.network_address) |
int(self.hostmask))
self._cache['broadcast_address'] = x
return x
@property
def hostmask(self):
x = self._cache.get('hostmask')
if x is None:
x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
self._cache['hostmask'] = x
return x
@property
def with_prefixlen(self):
return '%s/%d' % (self.network_address, self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self.network_address, self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self.network_address, self.hostmask)
@property
def num_addresses(self):
"""Number of hosts in the current subnet."""
return int(self.broadcast_address) - int(self.network_address) + 1
@property
def _address_class(self):
# Returning bare address objects (rather than interfaces) allows for
# more consistent behaviour across the network address, broadcast
# address and individual host addresses.
msg = '%200s has no associated address class' % (type(self),)
raise NotImplementedError(msg)
@property
def prefixlen(self):
return self._prefixlen
def address_exclude(self, other):
"""Remove an address from a larger block.
For example:
addr1 = ip_network('192.0.2.0/28')
addr2 = ip_network('192.0.2.1/32')
list(addr1.address_exclude(addr2)) =
[IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
or IPv6:
addr1 = ip_network('2001:db8::1/32')
addr2 = ip_network('2001:db8::1/128')
list(addr1.address_exclude(addr2)) =
[ip_network('2001:db8::1/128'),
ip_network('2001:db8::2/127'),
ip_network('2001:db8::4/126'),
ip_network('2001:db8::8/125'),
...
ip_network('2001:db8:8000::/33')]
Args:
other: An IPv4Network or IPv6Network object of the same type.
Returns:
An iterator of the IPv(4|6)Network objects which is self
minus other.
Raises:
TypeError: If self and other are of differing address
versions, or if other is not a network object.
ValueError: If other is not completely contained by self.
"""
if not self._version == other._version:
raise TypeError("%s and %s are not of the same version" % (
self, other))
if not isinstance(other, _BaseNetwork):
raise TypeError("%s is not a network object" % other)
if not other.subnet_of(self):
raise ValueError('%s not contained in %s' % (other, self))
if other == self:
return
# Make sure we're comparing the network of other.
other = other.__class__('%s/%s' % (other.network_address,
other.prefixlen))
s1, s2 = self.subnets()
while s1 != other and s2 != other:
if other.subnet_of(s1):
yield s2
s1, s2 = s1.subnets()
elif other.subnet_of(s2):
yield s1
s1, s2 = s2.subnets()
else:
# If we got here, there's a bug somewhere.
raise AssertionError('Error performing exclusion: '
's1: %s s2: %s other: %s' %
(s1, s2, other))
if s1 == other:
yield s2
elif s2 == other:
yield s1
else:
# If we got here, there's a bug somewhere.
raise AssertionError('Error performing exclusion: '
's1: %s s2: %s other: %s' %
(s1, s2, other))
def compare_networks(self, other):
"""Compare two IP objects.
This is only concerned about the comparison of the integer
representation of the network addresses. This means that the
host bits aren't considered at all in this method. If you want
to compare host bits, you can easily enough do a
'HostA._ip < HostB._ip'
Args:
other: An IP object.
Returns:
If the IP versions of self and other are the same, returns:
-1 if self < other:
eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
IPv6Network('2001:db8::1000/124') <
IPv6Network('2001:db8::2000/124')
0 if self == other
eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
IPv6Network('2001:db8::1000/124') ==
IPv6Network('2001:db8::1000/124')
1 if self > other
eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
IPv6Network('2001:db8::2000/124') >
IPv6Network('2001:db8::1000/124')
Raises:
TypeError if the IP versions are different.
"""
# does this need to raise a ValueError?
if self._version != other._version:
raise TypeError('%s and %s are not of the same type' % (
self, other))
# self._version == other._version below here:
if self.network_address < other.network_address:
return -1
if self.network_address > other.network_address:
return 1
# self.network_address == other.network_address below here:
if self.netmask < other.netmask:
return -1
if self.netmask > other.netmask:
return 1
return 0
def _get_networks_key(self):
"""Network-only key function.
Returns an object that identifies this address' network and
netmask. This function is a suitable "key" argument for sorted()
and list.sort().
"""
return (self._version, self.network_address, self.netmask)
def subnets(self, prefixlen_diff=1, new_prefix=None):
"""The subnets which join to make the current subnet.
In the case that self contains only one IP
(self._prefixlen == 32 for IPv4 or self._prefixlen == 128
for IPv6), yield an iterator with just ourself.
Args:
prefixlen_diff: An integer, the amount the prefix length
should be increased by. This should not be set if
new_prefix is also set.
new_prefix: The desired new prefix length. This must be a
larger number (smaller prefix) than the existing prefix.
This should not be set if prefixlen_diff is also set.
Returns:
An iterator of IPv(4|6) objects.
Raises:
ValueError: The prefixlen_diff is too small or too large.
OR
prefixlen_diff and new_prefix are both set or new_prefix
is a smaller number than the current prefix (smaller
number means a larger network)
"""
if self._prefixlen == self._max_prefixlen:
yield self
return
if new_prefix is not None:
if new_prefix < self._prefixlen:
raise ValueError('new prefix must be longer')
if prefixlen_diff != 1:
raise ValueError('cannot set prefixlen_diff and new_prefix')
prefixlen_diff = new_prefix - self._prefixlen
if prefixlen_diff < 0:
raise ValueError('prefix length diff must be > 0')
new_prefixlen = self._prefixlen + prefixlen_diff
if new_prefixlen > self._max_prefixlen:
raise ValueError(
'prefix length diff %d is invalid for netblock %s' % (
new_prefixlen, self))
start = int(self.network_address)
end = int(self.broadcast_address) + 1
step = (int(self.hostmask) + 1) >> prefixlen_diff
for new_addr in _compat_range(start, end, step):
current = self.__class__((new_addr, new_prefixlen))
yield current
def supernet(self, prefixlen_diff=1, new_prefix=None):
"""The supernet containing the current network.
Args:
prefixlen_diff: An integer, the amount the prefix length of
the network should be decreased by. For example, given a
/24 network and a prefixlen_diff of 3, a supernet with a
/21 netmask is returned.
Returns:
An IPv4 network object.
Raises:
ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
a negative prefix length.
OR
If prefixlen_diff and new_prefix are both set or new_prefix is a
larger number than the current prefix (larger number means a
smaller network)
"""
if self._prefixlen == 0:
return self
if new_prefix is not None:
if new_prefix > self._prefixlen:
raise ValueError('new prefix must be shorter')
if prefixlen_diff != 1:
raise ValueError('cannot set prefixlen_diff and new_prefix')
prefixlen_diff = self._prefixlen - new_prefix
new_prefixlen = self.prefixlen - prefixlen_diff
if new_prefixlen < 0:
raise ValueError(
'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
(self.prefixlen, prefixlen_diff))
return self.__class__((
int(self.network_address) & (int(self.netmask) << prefixlen_diff),
new_prefixlen))
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is a multicast address.
See RFC 2373 2.7 for details.
"""
return (self.network_address.is_multicast and
self.broadcast_address.is_multicast)
@staticmethod
def _is_subnet_of(a, b):
try:
# Always false if one is v4 and the other is v6.
if a._version != b._version:
raise TypeError("%s and %s are not of the same version" (a, b))
return (b.network_address <= a.network_address and
b.broadcast_address >= a.broadcast_address)
except AttributeError:
raise TypeError("Unable to test subnet containment "
"between %s and %s" % (a, b))
def subnet_of(self, other):
"""Return True if this network is a subnet of other."""
return self._is_subnet_of(self, other)
def supernet_of(self, other):
"""Return True if this network is a supernet of other."""
return self._is_subnet_of(other, self)
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within one of the
reserved IPv6 Network ranges.
"""
return (self.network_address.is_reserved and
self.broadcast_address.is_reserved)
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is reserved per RFC 4291.
"""
return (self.network_address.is_link_local and
self.broadcast_address.is_link_local)
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv4-special-registry or iana-ipv6-special-registry.
"""
return (self.network_address.is_private and
self.broadcast_address.is_private)
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, True if the address is not reserved per
iana-ipv4-special-registry or iana-ipv6-special-registry.
"""
return not self.is_private
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 2373 2.5.2.
"""
return (self.network_address.is_unspecified and
self.broadcast_address.is_unspecified)
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback address as defined in
RFC 2373 2.5.3.
"""
return (self.network_address.is_loopback and
self.broadcast_address.is_loopback)
class _BaseV4(object):
"""Base IPv4 object.
The following methods are used by IPv4 objects in both single IP
addresses and networks.
"""
__slots__ = ()
_version = 4
# Equivalent to 255.255.255.255 or 32 bits of 1's.
_ALL_ONES = (2 ** IPV4LENGTH) - 1
_DECIMAL_DIGITS = frozenset('0123456789')
# the valid octets for host and netmasks. only useful for IPv4.
_valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
_max_prefixlen = IPV4LENGTH
# There are only a handful of valid v4 netmasks, so we cache them all
# when constructed (see _make_netmask()).
_netmask_cache = {}
def _explode_shorthand_ip_string(self):
return _compat_str(self)
@classmethod
def _make_netmask(cls, arg):
"""Make a (netmask, prefix_len) tuple from the given argument.
Argument can be:
- an integer (the prefix length)
- a string representing the prefix length (e.g. "24")
- a string representing the prefix netmask (e.g. "255.255.255.0")
"""
if arg not in cls._netmask_cache:
if isinstance(arg, _compat_int_types):
prefixlen = arg
else:
try:
# Check for a netmask in prefix length form
prefixlen = cls._prefix_from_prefix_string(arg)
except NetmaskValueError:
# Check for a netmask or hostmask in dotted-quad form.
# This may raise NetmaskValueError.
prefixlen = cls._prefix_from_ip_string(arg)
netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
cls._netmask_cache[arg] = netmask, prefixlen
return cls._netmask_cache[arg]
@classmethod
def _ip_int_from_string(cls, ip_str):
"""Turn the given IP string into an integer for comparison.
Args:
ip_str: A string, the IP ip_str.
Returns:
The IP ip_str as an integer.
Raises:
AddressValueError: if ip_str isn't a valid IPv4 Address.
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
octets = ip_str.split('.')
if len(octets) != 4:
raise AddressValueError("Expected 4 octets in %r" % ip_str)
try:
return _compat_int_from_byte_vals(
map(cls._parse_octet, octets), 'big')
except ValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
@classmethod
def _parse_octet(cls, octet_str):
"""Convert a decimal octet into an integer.
Args:
octet_str: A string, the number to parse.
Returns:
The octet as an integer.
Raises:
ValueError: if the octet isn't strictly a decimal from [0..255].
"""
if not octet_str:
raise ValueError("Empty octet not permitted")
# Whitelist the characters, since int() allows a lot of bizarre stuff.
if not cls._DECIMAL_DIGITS.issuperset(octet_str):
msg = "Only decimal digits permitted in %r"
raise ValueError(msg % octet_str)
# We do the length check second, since the invalid character error
# is likely to be more informative for the user
if len(octet_str) > 3:
msg = "At most 3 characters permitted in %r"
raise ValueError(msg % octet_str)
# Convert to integer (we know digits are legal)
octet_int = int(octet_str, 10)
# Any octets that look like they *might* be written in octal,
# and which don't look exactly the same in both octal and
# decimal are rejected as ambiguous
if octet_int > 7 and octet_str[0] == '0':
msg = "Ambiguous (octal/decimal) value in %r not permitted"
raise ValueError(msg % octet_str)
if octet_int > 255:
raise ValueError("Octet %d (> 255) not permitted" % octet_int)
return octet_int
@classmethod
def _string_from_ip_int(cls, ip_int):
"""Turns a 32-bit integer into dotted decimal notation.
Args:
ip_int: An integer, the IP address.
Returns:
The IP address as a string in dotted decimal notation.
"""
return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
if isinstance(b, bytes)
else b)
for b in _compat_to_bytes(ip_int, 4, 'big'))
def _is_hostmask(self, ip_str):
"""Test if the IP string is a hostmask (rather than a netmask).
Args:
ip_str: A string, the potential hostmask.
Returns:
A boolean, True if the IP string is a hostmask.
"""
bits = ip_str.split('.')
try:
parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
except ValueError:
return False
if len(parts) != len(bits):
return False
if parts[0] < parts[-1]:
return True
return False
def _reverse_pointer(self):
"""Return the reverse DNS pointer name for the IPv4 address.
This implements the method described in RFC1035 3.5.
"""
reverse_octets = _compat_str(self).split('.')[::-1]
return '.'.join(reverse_octets) + '.in-addr.arpa'
@property
def max_prefixlen(self):
return self._max_prefixlen
@property
def version(self):
return self._version
class IPv4Address(_BaseV4, _BaseAddress):
"""Represent and manipulate single IPv4 Addresses."""
__slots__ = ('_ip', '__weakref__')
def __init__(self, address):
"""
Args:
address: A string or integer representing the IP
Additionally, an integer can be passed, so
IPv4Address('192.0.2.1') == IPv4Address(3221225985).
or, more generally
IPv4Address(int(IPv4Address('192.0.2.1'))) ==
IPv4Address('192.0.2.1')
Raises:
AddressValueError: If ipaddress isn't a valid IPv4 address.
"""
# Efficient constructor from integer.
if isinstance(address, _compat_int_types):
self._check_int_address(address)
self._ip = address
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 4)
bvs = _compat_bytes_to_byte_vals(address)
self._ip = _compat_int_from_byte_vals(bvs, 'big')
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP string.
addr_str = _compat_str(address)
if '/' in addr_str:
raise AddressValueError("Unexpected '/' in %r" % address)
self._ip = self._ip_int_from_string(addr_str)
@property
def packed(self):
"""The binary representation of this address."""
return v4_int_to_packed(self._ip)
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within the
reserved IPv4 Network range.
"""
return self in self._constants._reserved_network
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv4-special-registry.
"""
return any(self in net for net in self._constants._private_networks)
@property
def is_global(self):
return (
self not in self._constants._public_network and
not self.is_private)
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is multicast.
See RFC 3171 for details.
"""
return self in self._constants._multicast_network
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 5735 3.
"""
return self == self._constants._unspecified_address
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback per RFC 3330.
"""
return self in self._constants._loopback_network
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is link-local per RFC 3927.
"""
return self in self._constants._linklocal_network
class IPv4Interface(IPv4Address):
def __init__(self, address):
if isinstance(address, (bytes, _compat_int_types)):
IPv4Address.__init__(self, address)
self.network = IPv4Network(self._ip)
self._prefixlen = self._max_prefixlen
return
if isinstance(address, tuple):
IPv4Address.__init__(self, address[0])
if len(address) > 1:
self._prefixlen = int(address[1])
else:
self._prefixlen = self._max_prefixlen
self.network = IPv4Network(address, strict=False)
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
return
addr = _split_optional_netmask(address)
IPv4Address.__init__(self, addr[0])
self.network = IPv4Network(address, strict=False)
self._prefixlen = self.network._prefixlen
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
self.network.prefixlen)
def __eq__(self, other):
address_equal = IPv4Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except AttributeError:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False
def __lt__(self, other):
address_less = IPv4Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return (self.network < other.network or
self.network == other.network and address_less)
except AttributeError:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False
def __hash__(self):
return self._ip ^ self._prefixlen ^ int(self.network.network_address)
__reduce__ = _IPAddressBase.__reduce__
@property
def ip(self):
return IPv4Address(self._ip)
@property
def with_prefixlen(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.hostmask)
class IPv4Network(_BaseV4, _BaseNetwork):
"""This class represents and manipulates 32-bit IPv4 network + addresses..
Attributes: [examples for IPv4Network('192.0.2.0/27')]
.network_address: IPv4Address('192.0.2.0')
.hostmask: IPv4Address('0.0.0.31')
.broadcast_address: IPv4Address('192.0.2.32')
.netmask: IPv4Address('255.255.255.224')
.prefixlen: 27
"""
# Class to use when creating address objects
_address_class = IPv4Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv4 network object.
Args:
address: A string or integer representing the IP [& network].
'192.0.2.0/24'
'192.0.2.0/255.255.255.0'
'192.0.0.2/0.0.0.255'
are all functionally the same in IPv4. Similarly,
'192.0.2.1'
'192.0.2.1/255.255.255.255'
'192.0.2.1/32'
are also functionally equivalent. That is to say, failing to
provide a subnetmask will create an object with a mask of /32.
If the mask (portion after the / in the argument) is given in
dotted quad form, it is treated as a netmask if it starts with a
non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
starts with a zero field (e.g. 0.255.255.255 == /8), with the
single exception of an all-zero mask which is treated as a
netmask == /0. If no mask is given, a default of /32 is used.
Additionally, an integer can be passed, so
IPv4Network('192.0.2.1') == IPv4Network(3221225985)
or, more generally
IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
IPv4Interface('192.0.2.1')
Raises:
AddressValueError: If ipaddress isn't a valid IPv4 address.
NetmaskValueError: If the netmask isn't valid for
an IPv4 address.
ValueError: If strict is True and a network address is not
supplied.
"""
_BaseNetwork.__init__(self, address)
# Constructing from a packed address or integer
if isinstance(address, (_compat_int_types, bytes)):
self.network_address = IPv4Address(address)
self.netmask, self._prefixlen = self._make_netmask(
self._max_prefixlen)
# fixme: address/network test here.
return
if isinstance(address, tuple):
if len(address) > 1:
arg = address[1]
else:
# We weren't given an address[1]
arg = self._max_prefixlen
self.network_address = IPv4Address(address[0])
self.netmask, self._prefixlen = self._make_netmask(arg)
packed = int(self.network_address)
if packed & int(self.netmask) != packed:
if strict:
raise ValueError('%s has host bits set' % self)
else:
self.network_address = IPv4Address(packed &
int(self.netmask))
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
addr = _split_optional_netmask(address)
self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
if len(addr) == 2:
arg = addr[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
if strict:
if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
self.network_address):
raise ValueError('%s has host bits set' % self)
self.network_address = IPv4Address(int(self.network_address) &
int(self.netmask))
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, True if the address is not reserved per
iana-ipv4-special-registry.
"""
return (not (self.network_address in IPv4Network('100.64.0.0/10') and
self.broadcast_address in IPv4Network('100.64.0.0/10')) and
not self.is_private)
class _IPv4Constants(object):
_linklocal_network = IPv4Network('169.254.0.0/16')
_loopback_network = IPv4Network('127.0.0.0/8')
_multicast_network = IPv4Network('224.0.0.0/4')
_public_network = IPv4Network('100.64.0.0/10')
_private_networks = [
IPv4Network('0.0.0.0/8'),
IPv4Network('10.0.0.0/8'),
IPv4Network('127.0.0.0/8'),
IPv4Network('169.254.0.0/16'),
IPv4Network('172.16.0.0/12'),
IPv4Network('192.0.0.0/29'),
IPv4Network('192.0.0.170/31'),
IPv4Network('192.0.2.0/24'),
IPv4Network('192.168.0.0/16'),
IPv4Network('198.18.0.0/15'),
IPv4Network('198.51.100.0/24'),
IPv4Network('203.0.113.0/24'),
IPv4Network('240.0.0.0/4'),
IPv4Network('255.255.255.255/32'),
]
_reserved_network = IPv4Network('240.0.0.0/4')
_unspecified_address = IPv4Address('0.0.0.0')
IPv4Address._constants = _IPv4Constants
class _BaseV6(object):
"""Base IPv6 object.
The following methods are used by IPv6 objects in both single IP
addresses and networks.
"""
__slots__ = ()
_version = 6
_ALL_ONES = (2 ** IPV6LENGTH) - 1
_HEXTET_COUNT = 8
_HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
_max_prefixlen = IPV6LENGTH
# There are only a bunch of valid v6 netmasks, so we cache them all
# when constructed (see _make_netmask()).
_netmask_cache = {}
@classmethod
def _make_netmask(cls, arg):
"""Make a (netmask, prefix_len) tuple from the given argument.
Argument can be:
- an integer (the prefix length)
- a string representing the prefix length (e.g. "24")
- a string representing the prefix netmask (e.g. "255.255.255.0")
"""
if arg not in cls._netmask_cache:
if isinstance(arg, _compat_int_types):
prefixlen = arg
else:
prefixlen = cls._prefix_from_prefix_string(arg)
netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
cls._netmask_cache[arg] = netmask, prefixlen
return cls._netmask_cache[arg]
@classmethod
def _ip_int_from_string(cls, ip_str):
"""Turn an IPv6 ip_str into an integer.
Args:
ip_str: A string, the IPv6 ip_str.
Returns:
An int, the IPv6 address
Raises:
AddressValueError: if ip_str isn't a valid IPv6 Address.
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
parts = ip_str.split(':')
# An IPv6 address needs at least 2 colons (3 parts).
_min_parts = 3
if len(parts) < _min_parts:
msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
raise AddressValueError(msg)
# If the address has an IPv4-style suffix, convert it to hexadecimal.
if '.' in parts[-1]:
try:
ipv4_int = IPv4Address(parts.pop())._ip
except AddressValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
parts.append('%x' % (ipv4_int & 0xFFFF))
# An IPv6 address can't have more than 8 colons (9 parts).
# The extra colon comes from using the "::" notation for a single
# leading or trailing zero part.
_max_parts = cls._HEXTET_COUNT + 1
if len(parts) > _max_parts:
msg = "At most %d colons permitted in %r" % (
_max_parts - 1, ip_str)
raise AddressValueError(msg)
# Disregarding the endpoints, find '::' with nothing in between.
# This indicates that a run of zeroes has been skipped.
skip_index = None
for i in _compat_range(1, len(parts) - 1):
if not parts[i]:
if skip_index is not None:
# Can't have more than one '::'
msg = "At most one '::' permitted in %r" % ip_str
raise AddressValueError(msg)
skip_index = i
# parts_hi is the number of parts to copy from above/before the '::'
# parts_lo is the number of parts to copy from below/after the '::'
if skip_index is not None:
# If we found a '::', then check if it also covers the endpoints.
parts_hi = skip_index
parts_lo = len(parts) - skip_index - 1
if not parts[0]:
parts_hi -= 1
if parts_hi:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
parts_lo -= 1
if parts_lo:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
if parts_skipped < 1:
msg = "Expected at most %d other parts with '::' in %r"
raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
else:
# Otherwise, allocate the entire address to parts_hi. The
# endpoints could still be empty, but _parse_hextet() will check
# for that.
if len(parts) != cls._HEXTET_COUNT:
msg = "Exactly %d parts expected without '::' in %r"
raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
if not parts[0]:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_hi = len(parts)
parts_lo = 0
parts_skipped = 0
try:
# Now, parse the hextets into a 128-bit integer.
ip_int = 0
for i in range(parts_hi):
ip_int <<= 16
ip_int |= cls._parse_hextet(parts[i])
ip_int <<= 16 * parts_skipped
for i in range(-parts_lo, 0):
ip_int <<= 16
ip_int |= cls._parse_hextet(parts[i])
return ip_int
except ValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
@classmethod
def _parse_hextet(cls, hextet_str):
"""Convert an IPv6 hextet string into an integer.
Args:
hextet_str: A string, the number to parse.
Returns:
The hextet as an integer.
Raises:
ValueError: if the input isn't strictly a hex number from
[0..FFFF].
"""
# Whitelist the characters, since int() allows a lot of bizarre stuff.
if not cls._HEX_DIGITS.issuperset(hextet_str):
raise ValueError("Only hex digits permitted in %r" % hextet_str)
# We do the length check second, since the invalid character error
# is likely to be more informative for the user
if len(hextet_str) > 4:
msg = "At most 4 characters permitted in %r"
raise ValueError(msg % hextet_str)
# Length check means we can skip checking the integer value
return int(hextet_str, 16)
@classmethod
def _compress_hextets(cls, hextets):
"""Compresses a list of hextets.
Compresses a list of strings, replacing the longest continuous
sequence of "0" in the list with "" and adding empty strings at
the beginning or at the end of the string such that subsequently
calling ":".join(hextets) will produce the compressed version of
the IPv6 address.
Args:
hextets: A list of strings, the hextets to compress.
Returns:
A list of strings.
"""
best_doublecolon_start = -1
best_doublecolon_len = 0
doublecolon_start = -1
doublecolon_len = 0
for index, hextet in enumerate(hextets):
if hextet == '0':
doublecolon_len += 1
if doublecolon_start == -1:
# Start of a sequence of zeros.
doublecolon_start = index
if doublecolon_len > best_doublecolon_len:
# This is the longest sequence of zeros so far.
best_doublecolon_len = doublecolon_len
best_doublecolon_start = doublecolon_start
else:
doublecolon_len = 0
doublecolon_start = -1
if best_doublecolon_len > 1:
best_doublecolon_end = (best_doublecolon_start +
best_doublecolon_len)
# For zeros at the end of the address.
if best_doublecolon_end == len(hextets):
hextets += ['']
hextets[best_doublecolon_start:best_doublecolon_end] = ['']
# For zeros at the beginning of the address.
if best_doublecolon_start == 0:
hextets = [''] + hextets
return hextets
@classmethod
def _string_from_ip_int(cls, ip_int=None):
"""Turns a 128-bit integer into hexadecimal notation.
Args:
ip_int: An integer, the IP address.
Returns:
A string, the hexadecimal representation of the address.
Raises:
ValueError: The address is bigger than 128 bits of all ones.
"""
if ip_int is None:
ip_int = int(cls._ip)
if ip_int > cls._ALL_ONES:
raise ValueError('IPv6 address is too large')
hex_str = '%032x' % ip_int
hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]
hextets = cls._compress_hextets(hextets)
return ':'.join(hextets)
def _explode_shorthand_ip_string(self):
"""Expand a shortened IPv6 address.
Args:
ip_str: A string, the IPv6 address.
Returns:
A string, the expanded IPv6 address.
"""
if isinstance(self, IPv6Network):
ip_str = _compat_str(self.network_address)
elif isinstance(self, IPv6Interface):
ip_str = _compat_str(self.ip)
else:
ip_str = _compat_str(self)
ip_int = self._ip_int_from_string(ip_str)
hex_str = '%032x' % ip_int
parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
if isinstance(self, (_BaseNetwork, IPv6Interface)):
return '%s/%d' % (':'.join(parts), self._prefixlen)
return ':'.join(parts)
def _reverse_pointer(self):
"""Return the reverse DNS pointer name for the IPv6 address.
This implements the method described in RFC3596 2.5.
"""
reverse_chars = self.exploded[::-1].replace(':', '')
return '.'.join(reverse_chars) + '.ip6.arpa'
@property
def max_prefixlen(self):
return self._max_prefixlen
@property
def version(self):
return self._version
class IPv6Address(_BaseV6, _BaseAddress):
"""Represent and manipulate single IPv6 Addresses."""
__slots__ = ('_ip', '__weakref__')
def __init__(self, address):
"""Instantiate a new IPv6 address object.
Args:
address: A string or integer representing the IP
Additionally, an integer can be passed, so
IPv6Address('2001:db8::') ==
IPv6Address(42540766411282592856903984951653826560)
or, more generally
IPv6Address(int(IPv6Address('2001:db8::'))) ==
IPv6Address('2001:db8::')
Raises:
AddressValueError: If address isn't a valid IPv6 address.
"""
# Efficient constructor from integer.
if isinstance(address, _compat_int_types):
self._check_int_address(address)
self._ip = address
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 16)
bvs = _compat_bytes_to_byte_vals(address)
self._ip = _compat_int_from_byte_vals(bvs, 'big')
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP string.
addr_str = _compat_str(address)
if '/' in addr_str:
raise AddressValueError("Unexpected '/' in %r" % address)
self._ip = self._ip_int_from_string(addr_str)
@property
def packed(self):
"""The binary representation of this address."""
return v6_int_to_packed(self._ip)
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is a multicast address.
See RFC 2373 2.7 for details.
"""
return self in self._constants._multicast_network
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within one of the
reserved IPv6 Network ranges.
"""
return any(self in x for x in self._constants._reserved_networks)
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is reserved per RFC 4291.
"""
return self in self._constants._linklocal_network
@property
def is_site_local(self):
"""Test if the address is reserved for site-local.
Note that the site-local address space has been deprecated by RFC 3879.
Use is_private to test if this address is in the space of unique local
addresses as defined by RFC 4193.
Returns:
A boolean, True if the address is reserved per RFC 3513 2.5.6.
"""
return self in self._constants._sitelocal_network
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv6-special-registry.
"""
return any(self in net for net in self._constants._private_networks)
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, true if the address is not reserved per
iana-ipv6-special-registry.
"""
return not self.is_private
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 2373 2.5.2.
"""
return self._ip == 0
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback address as defined in
RFC 2373 2.5.3.
"""
return self._ip == 1
@property
def ipv4_mapped(self):
"""Return the IPv4 mapped address.
Returns:
If the IPv6 address is a v4 mapped address, return the
IPv4 mapped address. Return None otherwise.
"""
if (self._ip >> 32) != 0xFFFF:
return None
return IPv4Address(self._ip & 0xFFFFFFFF)
@property
def teredo(self):
"""Tuple of embedded teredo IPs.
Returns:
Tuple of the (server, client) IPs or None if the address
doesn't appear to be a teredo address (doesn't start with
2001::/32)
"""
if (self._ip >> 96) != 0x20010000:
return None
return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
IPv4Address(~self._ip & 0xFFFFFFFF))
@property
def sixtofour(self):
"""Return the IPv4 6to4 embedded address.
Returns:
The IPv4 6to4-embedded address if present or None if the
address doesn't appear to contain a 6to4 embedded address.
"""
if (self._ip >> 112) != 0x2002:
return None
return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
class IPv6Interface(IPv6Address):
def __init__(self, address):
if isinstance(address, (bytes, _compat_int_types)):
IPv6Address.__init__(self, address)
self.network = IPv6Network(self._ip)
self._prefixlen = self._max_prefixlen
return
if isinstance(address, tuple):
IPv6Address.__init__(self, address[0])
if len(address) > 1:
self._prefixlen = int(address[1])
else:
self._prefixlen = self._max_prefixlen
self.network = IPv6Network(address, strict=False)
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
return
addr = _split_optional_netmask(address)
IPv6Address.__init__(self, addr[0])
self.network = IPv6Network(address, strict=False)
self.netmask = self.network.netmask
self._prefixlen = self.network._prefixlen
self.hostmask = self.network.hostmask
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
self.network.prefixlen)
def __eq__(self, other):
address_equal = IPv6Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except AttributeError:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False
def __lt__(self, other):
address_less = IPv6Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return (self.network < other.network or
self.network == other.network and address_less)
except AttributeError:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False
def __hash__(self):
return self._ip ^ self._prefixlen ^ int(self.network.network_address)
__reduce__ = _IPAddressBase.__reduce__
@property
def ip(self):
return IPv6Address(self._ip)
@property
def with_prefixlen(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.hostmask)
@property
def is_unspecified(self):
return self._ip == 0 and self.network.is_unspecified
@property
def is_loopback(self):
return self._ip == 1 and self.network.is_loopback
class IPv6Network(_BaseV6, _BaseNetwork):
"""This class represents and manipulates 128-bit IPv6 networks.
Attributes: [examples for IPv6('2001:db8::1000/124')]
.network_address: IPv6Address('2001:db8::1000')
.hostmask: IPv6Address('::f')
.broadcast_address: IPv6Address('2001:db8::100f')
.netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
.prefixlen: 124
"""
# Class to use when creating address objects
_address_class = IPv6Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv6 Network object.
Args:
address: A string or integer representing the IPv6 network or the
IP and prefix/netmask.
'2001:db8::/128'
'2001:db8:0000:0000:0000:0000:0000:0000/128'
'2001:db8::'
are all functionally the same in IPv6. That is to say,
failing to provide a subnetmask will create an object with
a mask of /128.
Additionally, an integer can be passed, so
IPv6Network('2001:db8::') ==
IPv6Network(42540766411282592856903984951653826560)
or, more generally
IPv6Network(int(IPv6Network('2001:db8::'))) ==
IPv6Network('2001:db8::')
strict: A boolean. If true, ensure that we have been passed
A true network address, eg, 2001:db8::1000/124 and not an
IP address on a network, eg, 2001:db8::1/124.
Raises:
AddressValueError: If address isn't a valid IPv6 address.
NetmaskValueError: If the netmask isn't valid for
an IPv6 address.
ValueError: If strict was True and a network address was not
supplied.
"""
_BaseNetwork.__init__(self, address)
# Efficient constructor from integer or packed address
if isinstance(address, (bytes, _compat_int_types)):
self.network_address = IPv6Address(address)
self.netmask, self._prefixlen = self._make_netmask(
self._max_prefixlen)
return
if isinstance(address, tuple):
if len(address) > 1:
arg = address[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
self.network_address = IPv6Address(address[0])
packed = int(self.network_address)
if packed & int(self.netmask) != packed:
if strict:
raise ValueError('%s has host bits set' % self)
else:
self.network_address = IPv6Address(packed &
int(self.netmask))
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
addr = _split_optional_netmask(address)
self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
if len(addr) == 2:
arg = addr[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
if strict:
if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
self.network_address):
raise ValueError('%s has host bits set' % self)
self.network_address = IPv6Address(int(self.network_address) &
int(self.netmask))
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
def hosts(self):
"""Generate Iterator over usable hosts in a network.
This is like __iter__ except it doesn't return the
Subnet-Router anycast address.
"""
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network + 1, broadcast + 1):
yield self._address_class(x)
@property
def is_site_local(self):
"""Test if the address is reserved for site-local.
Note that the site-local address space has been deprecated by RFC 3879.
Use is_private to test if this address is in the space of unique local
addresses as defined by RFC 4193.
Returns:
A boolean, True if the address is reserved per RFC 3513 2.5.6.
"""
return (self.network_address.is_site_local and
self.broadcast_address.is_site_local)
class _IPv6Constants(object):
_linklocal_network = IPv6Network('fe80::/10')
_multicast_network = IPv6Network('ff00::/8')
_private_networks = [
IPv6Network('::1/128'),
IPv6Network('::/128'),
IPv6Network('::ffff:0:0/96'),
IPv6Network('100::/64'),
IPv6Network('2001::/23'),
IPv6Network('2001:2::/48'),
IPv6Network('2001:db8::/32'),
IPv6Network('2001:10::/28'),
IPv6Network('fc00::/7'),
IPv6Network('fe80::/10'),
]
_reserved_networks = [
IPv6Network('::/8'), IPv6Network('100::/8'),
IPv6Network('200::/7'), IPv6Network('400::/6'),
IPv6Network('800::/5'), IPv6Network('1000::/4'),
IPv6Network('4000::/3'), IPv6Network('6000::/3'),
IPv6Network('8000::/3'), IPv6Network('A000::/3'),
IPv6Network('C000::/3'), IPv6Network('E000::/4'),
IPv6Network('F000::/5'), IPv6Network('F800::/6'),
IPv6Network('FE00::/9'),
]
_sitelocal_network = IPv6Network('fec0::/10')
IPv6Address._constants = _IPv6Constants
|
yavalvas/yav_com | refs/heads/master | build/matplotlib/doc/mpl_examples/api/mathtext_asarray.py | 12 | """
Load a mathtext image as numpy array
"""
import numpy as np
import matplotlib.mathtext as mathtext
import matplotlib.pyplot as plt
import matplotlib
matplotlib.rc('image', origin='upper')
parser = mathtext.MathTextParser("Bitmap")
parser.to_png('test2.png', r'$\left[\left\lfloor\frac{5}{\frac{\left(3\right)}{4}} y\right)\right]$', color='green', fontsize=14, dpi=100)
rgba1, depth1 = parser.to_rgba(r'IQ: $\sigma_i=15$', color='blue', fontsize=20, dpi=200)
rgba2, depth2 = parser.to_rgba(r'some other string', color='red', fontsize=20, dpi=200)
fig = plt.figure()
fig.figimage(rgba1.astype(float)/255., 100, 100)
fig.figimage(rgba2.astype(float)/255., 100, 300)
plt.show()
|
nburn42/tensorflow | refs/heads/master | tensorflow/python/estimator/keras.py | 3 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access
"""Home of estimator related functions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
from tensorflow.python.client import session
from tensorflow.python.estimator import estimator as estimator_lib
from tensorflow.python.estimator import export as export_lib
from tensorflow.python.estimator import model_fn as model_fn_lib
from tensorflow.python.estimator import run_config as run_config_lib
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib
from tensorflow.python.framework import tensor_util
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import models
from tensorflow.python.keras import optimizers
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.engine.network import Network
from tensorflow.python.keras.utils.generic_utils import CustomObjectScope
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import metrics as metrics_module
from tensorflow.python.ops import variables as variables_module
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.training import distribute as distribute_lib
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training import training_util
from tensorflow.python.util.tf_export import tf_export
_DEFAULT_SERVING_KEY = signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
def _cast_tensor_to_floatx(x):
"""Cast tensor to keras's floatx dtype if it is not already the same dtype."""
if x.dtype == K.floatx():
return x
else:
return math_ops.cast(x, K.floatx())
def _convert_tensor(x):
"""Create or cast tensor if needed."""
if not tensor_util.is_tensor(x):
# x is a numpy array
x = sparse_tensor_lib.convert_to_tensor_or_sparse_tensor(x)
if check_ops.is_numeric_tensor(x):
# is_numeric_tensor returns False if provided with a numpy array
x = _cast_tensor_to_floatx(x)
return x
def _any_variable_initialized():
"""Check if any variable has been initialized in the Keras model.
Returns:
boolean, True if at least one variable has been initialized, else False.
"""
variables = variables_module.global_variables()
for v in variables:
if getattr(v, '_keras_initialized', False):
return True
return False
def _create_ordered_io(keras_model, estimator_io, is_input=True):
"""Create a list of tensors from IO dictionary based on Keras IO order.
Args:
keras_model: An instance of compiled keras model.
estimator_io: The features or labels (dict or plain array) from model_fn.
is_input: True if dictionary is for inputs.
Returns:
A list of tensors based on Keras IO order.
Raises:
ValueError: if dictionary keys cannot be found in Keras model input_names
or output_names.
"""
if isinstance(estimator_io, (list, tuple)):
# Case currently not supported by most built-in input_fn,
# but it's good to have for sanity
return [_convert_tensor(x) for x in estimator_io]
elif isinstance(estimator_io, dict):
if is_input:
if keras_model._is_graph_network:
keras_io_names = keras_model.input_names
else:
keras_io_names = [
'input_%d' % i for i in range(1, len(estimator_io) + 1)]
else:
if keras_model._is_graph_network:
keras_io_names = keras_model.output_names
else:
keras_io_names = [
'output_%d' % i for i in range(1, len(estimator_io) + 1)]
for key in estimator_io:
if key not in keras_io_names:
raise ValueError(
'Cannot find %s with name "%s" in Keras Model. '
'It needs to match one '
'of the following: %s' % ('input' if is_input else 'output', key,
', '.join(keras_io_names)))
tensors = [_convert_tensor(estimator_io[io_name])
for io_name in keras_io_names]
return tensors
else:
# Plain array.
return _convert_tensor(estimator_io)
def _in_place_subclassed_model_reset(model):
"""Substitute for model cloning that works for subclassed models.
Subclassed models cannot be cloned because their topology is not serializable.
To "instantiate" an identical model in a new TF graph, we reuse the original
model object, but we clear its state.
After calling this function on a model instance, you can use the model
instance as if it were a model clone (in particular you can use it in a new
graph).
This method clears the state of the input model. It is thus destructive.
However the original state can be restored fully by calling
`_in_place_subclassed_model_state_restoration`.
Args:
model: Instance of a Keras model created via subclassing.
Raises:
ValueError: In case the model uses a subclassed model as inner layer.
"""
assert not model._is_graph_network # Only makes sense for subclassed networks
# Retrieve all layers tracked by the model as well as their attribute names
attributes_cache = {}
for name in dir(model):
try:
value = getattr(model, name)
except (AttributeError, ValueError, TypeError):
continue
if isinstance(value, Layer):
attributes_cache[name] = value
assert value in model._layers
elif isinstance(value, (list, tuple)) and name not in ('layers', '_layers'):
# Handle case: list/tuple of layers (also tracked by the Network API).
if value and all(isinstance(val, Layer) for val in value):
raise ValueError('We do not support the use of list-of-layers '
'attributes in subclassed models used with '
'`model_to_estimator` at this time. Found list '
'model: %s' % name)
# Replace layers on the model with fresh layers
layers_to_names = {value: key for key, value in attributes_cache.items()}
original_layers = model._layers[:]
model._layers = []
for layer in original_layers: # We preserve layer order.
config = layer.get_config()
# This will not work for nested subclassed models used as layers.
# This would be theoretically possible to support, but would add complexity.
# Only do it if users complain.
if isinstance(layer, Network) and not layer._is_graph_network:
raise ValueError('We do not support the use of nested subclassed models '
'in `model_to_estimator` at this time. Found nested '
'model: %s' % layer)
fresh_layer = layer.__class__.from_config(config)
name = layers_to_names[layer]
setattr(model, name, fresh_layer)
# Cache original model build attributes (in addition to layers)
if (not hasattr(model, '_original_attributes_cache') or
model._original_attributes_cache is None):
if model.built:
attributes_to_cache = [
'inputs',
'outputs',
'_feed_outputs',
'_feed_output_names',
'_feed_output_shapes',
'_feed_loss_fns',
'loss_weights_list',
'targets',
'_feed_targets',
'sample_weight_modes',
'weighted_metrics',
'metrics_names',
'metrics_tensors',
'metrics_updates',
'stateful_metric_names',
'total_loss',
'sample_weights',
'_feed_sample_weights',
'train_function',
'test_function',
'predict_function',
'_collected_trainable_weights',
'_feed_inputs',
'_feed_input_names',
'_feed_input_shapes',
'optimizer',
]
for name in attributes_to_cache:
attributes_cache[name] = getattr(model, name)
model._original_attributes_cache = attributes_cache
# Reset built state
model.built = False
model.inputs = None
model.outputs = None
def _in_place_subclassed_model_state_restoration(model):
"""Restores the original state of a model after it was "reset".
This undoes this action of `_in_place_subclassed_model_reset`.
Args:
model: Instance of a Keras model created via subclassing, on which
`_in_place_subclassed_model_reset` was previously called.
"""
assert not model._is_graph_network
# Restore layers and build attributes
if (hasattr(model, '_original_attributes_cache') and
model._original_attributes_cache is not None):
model._layers = []
for name, value in model._original_attributes_cache.items():
setattr(model, name, value)
model._original_attributes_cache = None
else:
# Restore to the state of a never-called model.
model.built = False
model.inputs = None
model.outputs = None
def _clone_and_build_model(mode,
keras_model,
custom_objects,
features=None,
labels=None):
"""Clone and build the given keras_model.
Args:
mode: training mode.
keras_model: an instance of compiled keras model.
custom_objects: Dictionary for custom objects.
features: Dict of tensors.
labels: Dict of tensors, or single tensor instance.
Returns:
The newly built model.
"""
# Set to True during training, False for inference.
K.set_learning_phase(mode == model_fn_lib.ModeKeys.TRAIN)
# Get list of inputs.
if features is None:
input_tensors = None
else:
input_tensors = _create_ordered_io(keras_model,
estimator_io=features,
is_input=True)
# Get list of outputs.
if labels is None:
target_tensors = None
elif isinstance(labels, dict):
target_tensors = _create_ordered_io(keras_model,
estimator_io=labels,
is_input=False)
else:
target_tensors = [
_convert_tensor(labels)
]
if keras_model._is_graph_network:
if custom_objects:
with CustomObjectScope(custom_objects):
model = models.clone_model(keras_model, input_tensors=input_tensors)
else:
model = models.clone_model(keras_model, input_tensors=input_tensors)
else:
model = keras_model
_in_place_subclassed_model_reset(model)
if input_tensors is not None:
model._set_inputs(input_tensors)
# Compile/Build model
if mode is model_fn_lib.ModeKeys.PREDICT:
if isinstance(model, models.Sequential):
model.build()
else:
if isinstance(keras_model.optimizer, optimizers.TFOptimizer):
optimizer = keras_model.optimizer
else:
optimizer_config = keras_model.optimizer.get_config()
optimizer = keras_model.optimizer.__class__.from_config(optimizer_config)
optimizer.iterations = training_util.get_or_create_global_step()
model.compile(
optimizer,
keras_model.loss,
metrics=keras_model.metrics,
loss_weights=keras_model.loss_weights,
sample_weight_mode=keras_model.sample_weight_mode,
weighted_metrics=keras_model.weighted_metrics,
target_tensors=target_tensors)
return model
def _create_keras_model_fn(keras_model, custom_objects=None):
"""Creates model_fn for keras Estimator.
Args:
keras_model: an instance of compiled keras model.
custom_objects: Dictionary for custom objects.
Returns:
The model_fn for a keras Estimator.
"""
def model_fn(features, labels, mode):
"""model_fn for keras Estimator."""
model = _clone_and_build_model(mode, keras_model, custom_objects, features,
labels)
model_output_names = []
# We need to make sure that the output names of the last layer in the model
# is the same for each of the cloned models. This is required for mirrored
# strategy when we call regroup.
if distribute_lib.has_distribution_strategy():
for name in model.output_names:
name = re.compile(r'_\d$').sub('', name)
model_output_names.append(name)
else:
model_output_names = model.output_names
# Get inputs to EstimatorSpec
predictions = dict(zip(model_output_names, model.outputs))
loss = None
train_op = None
eval_metric_ops = None
# Set loss and metric only during train and evaluate.
if mode is not model_fn_lib.ModeKeys.PREDICT:
if mode is model_fn_lib.ModeKeys.TRAIN:
model._make_train_function() # pylint: disable=protected-access
else:
model._make_test_function() # pylint: disable=protected-access
loss = model.total_loss
if model.metrics:
# TODO(fchollet): support stateful metrics
eval_metric_ops = {}
# When each metric maps to an output
if isinstance(model.metrics, dict):
for i, output_name in enumerate(model.metrics.keys()):
metric_name = model.metrics[output_name]
if callable(metric_name):
metric_name = metric_name.__name__
# When some outputs use the same metric
if list(model.metrics.values()).count(metric_name) > 1:
metric_name += '_' + output_name
eval_metric_ops[metric_name] = metrics_module.mean(
model.metrics_tensors[i - len(model.metrics)])
else:
for i, metric_name in enumerate(model.metrics):
if callable(metric_name):
metric_name = metric_name.__name__
eval_metric_ops[metric_name] = metrics_module.mean(
model.metrics_tensors[i])
# Set train_op only during train.
if mode is model_fn_lib.ModeKeys.TRAIN:
train_op = model.train_function.updates_op
if not model._is_graph_network:
# Reset model state to original state,
# to avoid `model_fn` being destructive for the initial model argument.
_in_place_subclassed_model_state_restoration(keras_model)
return model_fn_lib.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops,
export_outputs={
_DEFAULT_SERVING_KEY:
export_lib.export_output.PredictOutput(predictions)
})
return model_fn
def _save_first_checkpoint(keras_model, estimator, custom_objects,
keras_weights):
"""Save first checkpoint for the keras Estimator.
Args:
keras_model: an instance of compiled keras model.
estimator: keras estimator.
custom_objects: Dictionary for custom objects.
keras_weights: A flat list of Numpy arrays for weights of given keras_model.
Returns:
The model_fn for a keras Estimator.
"""
# Load weights and save to checkpoint if there is no checkpoint
latest_path = saver_lib.latest_checkpoint(estimator.model_dir)
if not latest_path:
with ops.Graph().as_default():
random_seed.set_random_seed(estimator.config.tf_random_seed)
training_util.create_global_step()
model = _clone_and_build_model(model_fn_lib.ModeKeys.TRAIN, keras_model,
custom_objects)
# save to checkpoint
with session.Session(config=estimator._session_config) as sess:
if keras_weights:
model.set_weights(keras_weights)
# Make update ops and initialize all variables.
if not model.train_function:
# pylint: disable=protected-access
model._make_train_function()
K._initialize_variables(sess)
# pylint: enable=protected-access
saver = saver_lib.Saver()
saver.save(sess, os.path.join(estimator.model_dir, 'keras_model.ckpt'))
@tf_export('keras.estimator.model_to_estimator')
def model_to_estimator(keras_model=None,
keras_model_path=None,
custom_objects=None,
model_dir=None,
config=None):
"""Constructs an `Estimator` instance from given keras model.
For usage example, please see
@{$programmers_guide/estimators$creating_estimators_from_keras_models}.
Args:
keras_model: A compiled Keras model object. This argument is mutually
exclusive with `keras_model_path`.
keras_model_path: Path to a compiled Keras model saved on disk, in HDF5
format, which can be generated with the `save()` method of a Keras model.
This argument is mutually exclusive with `keras_model`.
custom_objects: Dictionary for custom objects.
model_dir: Directory to save Estimator model parameters, graph, summary
files for TensorBoard, etc.
config: Configuration object.
Returns:
An Estimator from given keras model.
Raises:
ValueError: if neither keras_model nor keras_model_path was given.
ValueError: if both keras_model and keras_model_path was given.
ValueError: if the keras_model_path is a GCS URI.
ValueError: if keras_model has not been compiled.
"""
if not (keras_model or keras_model_path):
raise ValueError(
'Either `keras_model` or `keras_model_path` needs to be provided.')
if keras_model and keras_model_path:
raise ValueError(
'Please specity either `keras_model` or `keras_model_path`, '
'but not both.')
if not keras_model:
if keras_model_path.startswith(
'gs://') or 'storage.googleapis.com' in keras_model_path:
raise ValueError(
'%s is not a local path. Please copy the model locally first.' %
keras_model_path)
logging.info('Loading models from %s', keras_model_path)
keras_model = models.load_model(keras_model_path)
else:
logging.info('Using the Keras model provided.')
keras_model = keras_model
if not hasattr(keras_model, 'optimizer') or not keras_model.optimizer:
raise ValueError(
'The given keras model has not been compiled yet. '
'Please compile the model with `model.compile()` '
'before calling `model_to_estimator()`.')
if isinstance(config, dict):
config = run_config_lib.RunConfig(**config)
keras_model_fn = _create_keras_model_fn(keras_model, custom_objects)
estimator = estimator_lib.Estimator(
keras_model_fn, model_dir=model_dir, config=config)
# Check if we need to call get_weights:
if _any_variable_initialized():
keras_weights = keras_model.get_weights()
# Warn if config passed to estimator tries to update GPUOptions. If a
# session has already been created, the GPUOptions passed to the first
# session sticks.
if estimator._session_config.HasField('gpu_options'):
logging.warning(
'The Keras backend session has already been set. '
'The _session_config passed to model_to_estimator will not be used.')
else:
# Pass the config into keras backend's default session.
sess = session.Session(config=estimator._session_config)
K.set_session(sess)
keras_weights = None
if keras_model._is_graph_network:
# TODO(yifeif): move checkpoint initialization to scaffold.init_fn
_save_first_checkpoint(keras_model,
estimator,
custom_objects,
keras_weights)
elif keras_model.built:
logging.warning('You are creating an Estimator from a Keras model '
'manually subclassed from `Model`, that was '
'already called on some inputs (and thus already had '
'weights). We are currently unable to preserve '
'the model\'s state (its weights) '
'as part of the estimator '
'in this case. Be warned that the estimator '
'has been created using '
'a freshly initialized version of your model.\n'
'Note that this doesn\'t affect the state of the '
'model instance you passed as `keras_model` argument.')
return estimator
|
dcroc16/skunk_works | refs/heads/master | google_appengine/google/appengine/api/search/__init__.py | 8 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Search API module."""
from search import AtomField
from search import Cursor
from search import DateField
from search import DeleteError
from search import DeleteResult
from search import Document
from search import DOCUMENT_ID_FIELD_NAME
from search import Error
from search import ExpressionError
from search import Field
from search import FieldExpression
from search import GeoField
from search import GeoPoint
from search import get_indexes
from search import GetResponse
from search import HtmlField
from search import Index
from search import InternalError
from search import InvalidRequest
from search import LANGUAGE_FIELD_NAME
from search import MatchScorer
from search import MAXIMUM_DOCUMENT_ID_LENGTH
from search import MAXIMUM_DOCUMENTS_PER_PUT_REQUEST
from search import MAXIMUM_DOCUMENTS_RETURNED_PER_SEARCH
from search import MAXIMUM_EXPRESSION_LENGTH
from search import MAXIMUM_FIELD_ATOM_LENGTH
from search import MAXIMUM_FIELD_NAME_LENGTH
from search import MAXIMUM_FIELD_VALUE_LENGTH
from search import MAXIMUM_FIELDS_RETURNED_PER_SEARCH
from search import MAXIMUM_GET_INDEXES_OFFSET
from search import MAXIMUM_INDEX_NAME_LENGTH
from search import MAXIMUM_INDEXES_RETURNED_PER_GET_REQUEST
from search import MAXIMUM_NUMBER_FOUND_ACCURACY
from search import MAXIMUM_QUERY_LENGTH
from search import MAXIMUM_SEARCH_OFFSET
from search import MAXIMUM_SORTED_DOCUMENTS
from search import NumberField
from search import OperationResult
from search import PutError
from search import PutResult
from search import Query
from search import QueryError
from search import QueryOptions
from search import RANK_FIELD_NAME
from search import RescoringMatchScorer
from search import SCORE_FIELD_NAME
from search import ScoredDocument
from search import SearchResults
from search import SortExpression
from search import SortOptions
from search import TextField
from search import TIMESTAMP_FIELD_NAME
from search import TransientError
|
smallyear/linuxLearn | refs/heads/master | salt/salt/states/modjk_worker.py | 3 | # -*- coding: utf-8 -*-
'''
Manage modjk workers
====================
Send commands to a :strong:`modjk` load balancer via the peer system.
This module can be used with the :doc:`prereq </ref/states/requisites>`
requisite to remove/add the worker from the load balancer before
deploying/restarting service.
Mandatory Settings:
- The minion needs to have permission to publish the :strong:`modjk.*`
functions (see :doc:`here </ref/peer>` for information on configuring
peer publishing permissions)
- The modjk load balancer must be configured as stated in the :strong:`modjk`
execution module :mod:`documentation <salt.modules.modjk>`
'''
def __virtual__():
'''
Check if we have peer access ?
'''
return True
def _send_command(cmd,
worker,
lbn,
target,
profile='default',
expr_form='glob'):
'''
Send a command to the modjk loadbalancer
The minion need to be able to publish the commands to the load balancer
cmd:
worker_stop - won't get any traffic from the lbn
worker_activate - activate the worker
worker_disable - will get traffic only for current sessions
'''
ret = {
'code': False,
'msg': 'OK',
'minions': [],
}
# Send the command to target
func = 'modjk.{0}'.format(cmd)
args = [worker, lbn, profile]
response = __salt__['publish.publish'](target, func, args, expr_form)
# Get errors and list of affeced minions
errors = []
minions = []
for minion in response:
minions.append(minion)
if not response[minion]:
errors.append(minion)
# parse response
if not response:
ret['msg'] = 'no servers answered the published command {0}'.format(
cmd
)
return ret
elif len(errors) > 0:
ret['msg'] = 'the following minions return False'
ret['minions'] = errors
return ret
else:
ret['code'] = True
ret['msg'] = 'the commad was published successfully'
ret['minions'] = minions
return ret
def _worker_status(target,
worker,
activation,
profile='default',
expr_form='glob'):
'''
Check if the worker is in `activation` state in the targeted load balancers
The function will return the following dictionary:
result - False if no server returned from the published command
errors - list of servers that couldn't find the worker
wrong_state - list of servers that the worker was in the wrong state
(not activation)
'''
ret = {
'result': True,
'errors': [],
'wrong_state': [],
}
args = [worker, profile]
status = __salt__['publish.publish'](
target, 'modjk.worker_status', args, expr_form
)
# Did we got any respone from someone ?
if not status:
ret['result'] = False
return ret
# Search for errors & status
for balancer in status:
if not status[balancer]:
ret['errors'].append(balancer)
elif status[balancer]['activation'] != activation:
ret['wrong_state'].append(balancer)
return ret
def _talk2modjk(name, lbn, target, action, profile='default', expr_form='glob'):
'''
Wrapper function for the stop/disable/activate functions
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
action_map = {
'worker_stop': 'STP',
'worker_disable': 'DIS',
'worker_activate': 'ACT',
}
# Check what needs to be done
status = _worker_status(
target, name, action_map[action], profile, expr_form
)
if not status['result']:
ret['result'] = False
ret['comment'] = ('no servers answered the published command '
'modjk.worker_status')
return ret
if status['errors']:
ret['result'] = False
ret['comment'] = ('the following balancers could not find the '
'worker {0}: {1}'.format(name, status['errors']))
return ret
if not status['wrong_state']:
ret['comment'] = ('the worker is in the desired activation state on '
'all the balancers')
return ret
else:
ret['comment'] = ('the action {0} will be sent to the balancers '
'{1}'.format(action, status['wrong_state']))
ret['changes'] = {action: status['wrong_state']}
if __opts__['test']:
ret['result'] = None
return ret
# Send the action command to target
response = _send_command(action, name, lbn, target, profile, expr_form)
ret['comment'] = response['msg']
ret['result'] = response['code']
return ret
def stop(name, lbn, target, profile='default', expr_form='glob'):
'''
Stop the named worker from the lbn load balancers at the targeted minions
The worker won't get any traffic from the lbn
Example:
.. code-block:: yaml
disable-before-deploy:
modjk_worker.stop:
- name: {{ grains['id'] }}
- lbn: application
- target: 'roles:balancer'
- expr_form: grain
'''
return _talk2modjk(name, lbn, target, 'worker_stop', profile, expr_form)
def activate(name, lbn, target, profile='default', expr_form='glob'):
'''
Activate the named worker from the lbn load balancers at the targeted
minions
Example:
.. code-block:: yaml
disable-before-deploy:
modjk_worker.activate:
- name: {{ grains['id'] }}
- lbn: application
- target: 'roles:balancer'
- expr_form: grain
'''
return _talk2modjk(name, lbn, target, 'worker_activate', profile, expr_form)
def disable(name, lbn, target, profile='default', expr_form='glob'):
'''
Disable the named worker from the lbn load balancers at the targeted
minions.
The worker will get traffic only for current sessions and won't get new
ones.
Example:
.. code-block:: yaml
disable-before-deploy:
modjk_worker.disable:
- name: {{ grains['id'] }}
- lbn: application
- target: 'roles:balancer'
- expr_form: grain
'''
return _talk2modjk(name, lbn, target, 'worker_disable', profile, expr_form)
|
bucknerns/concordance | refs/heads/master | concordance/__init__.py | 1 | """
Created by Nathan Buckner
"""
from concordance.simple_concordance_module import (
ConcordanceGenerator as Generator)
|
TheWrenchh/lpthw | refs/heads/master | exp13.py | 1 | from sys import argv
script, first, second, third = argv
print "The script is called:", script
print "Your first variable is:", first
print "Your second variable is :", second
print "Your third variable is:", third
|
mariianna/kodi | refs/heads/master | lib/gdata/tlslite/Checker.py | 359 | """Class for post-handshake certificate checking."""
from utils.cryptomath import hashAndBase64
from X509 import X509
from X509CertChain import X509CertChain
from errors import *
class Checker:
"""This class is passed to a handshake function to check the other
party's certificate chain.
If a handshake function completes successfully, but the Checker
judges the other party's certificate chain to be missing or
inadequate, a subclass of
L{tlslite.errors.TLSAuthenticationError} will be raised.
Currently, the Checker can check either an X.509 or a cryptoID
chain (for the latter, cryptoIDlib must be installed).
"""
def __init__(self, cryptoID=None, protocol=None,
x509Fingerprint=None,
x509TrustList=None, x509CommonName=None,
checkResumedSession=False):
"""Create a new Checker instance.
You must pass in one of these argument combinations:
- cryptoID[, protocol] (requires cryptoIDlib)
- x509Fingerprint
- x509TrustList[, x509CommonName] (requires cryptlib_py)
@type cryptoID: str
@param cryptoID: A cryptoID which the other party's certificate
chain must match. The cryptoIDlib module must be installed.
Mutually exclusive with all of the 'x509...' arguments.
@type protocol: str
@param protocol: A cryptoID protocol URI which the other
party's certificate chain must match. Requires the 'cryptoID'
argument.
@type x509Fingerprint: str
@param x509Fingerprint: A hex-encoded X.509 end-entity
fingerprint which the other party's end-entity certificate must
match. Mutually exclusive with the 'cryptoID' and
'x509TrustList' arguments.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
other party must present a certificate chain which extends to
one of these root certificates. The cryptlib_py module must be
installed. Mutually exclusive with the 'cryptoID' and
'x509Fingerprint' arguments.
@type x509CommonName: str
@param x509CommonName: The end-entity certificate's 'CN' field
must match this value. For a web server, this is typically a
server name such as 'www.amazon.com'. Mutually exclusive with
the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
'x509TrustList' argument.
@type checkResumedSession: bool
@param checkResumedSession: If resumed sessions should be
checked. This defaults to False, on the theory that if the
session was checked once, we don't need to bother
re-checking it.
"""
if cryptoID and (x509Fingerprint or x509TrustList):
raise ValueError()
if x509Fingerprint and x509TrustList:
raise ValueError()
if x509CommonName and not x509TrustList:
raise ValueError()
if protocol and not cryptoID:
raise ValueError()
if cryptoID:
import cryptoIDlib #So we raise an error here
if x509TrustList:
import cryptlib_py #So we raise an error here
self.cryptoID = cryptoID
self.protocol = protocol
self.x509Fingerprint = x509Fingerprint
self.x509TrustList = x509TrustList
self.x509CommonName = x509CommonName
self.checkResumedSession = checkResumedSession
def __call__(self, connection):
"""Check a TLSConnection.
When a Checker is passed to a handshake function, this will
be called at the end of the function.
@type connection: L{tlslite.TLSConnection.TLSConnection}
@param connection: The TLSConnection to examine.
@raise tlslite.errors.TLSAuthenticationError: If the other
party's certificate chain is missing or bad.
"""
if not self.checkResumedSession and connection.resumed:
return
if self.cryptoID or self.x509Fingerprint or self.x509TrustList:
if connection._client:
chain = connection.session.serverCertChain
else:
chain = connection.session.clientCertChain
if self.x509Fingerprint or self.x509TrustList:
if isinstance(chain, X509CertChain):
if self.x509Fingerprint:
if chain.getFingerprint() != self.x509Fingerprint:
raise TLSFingerprintError(\
"X.509 fingerprint mismatch: %s, %s" % \
(chain.getFingerprint(), self.x509Fingerprint))
else: #self.x509TrustList
if not chain.validate(self.x509TrustList):
raise TLSValidationError("X.509 validation failure")
if self.x509CommonName and \
(chain.getCommonName() != self.x509CommonName):
raise TLSAuthorizationError(\
"X.509 Common Name mismatch: %s, %s" % \
(chain.getCommonName(), self.x509CommonName))
elif chain:
raise TLSAuthenticationTypeError()
else:
raise TLSNoAuthenticationError()
elif self.cryptoID:
import cryptoIDlib.CertChain
if isinstance(chain, cryptoIDlib.CertChain.CertChain):
if chain.cryptoID != self.cryptoID:
raise TLSFingerprintError(\
"cryptoID mismatch: %s, %s" % \
(chain.cryptoID, self.cryptoID))
if self.protocol:
if not chain.checkProtocol(self.protocol):
raise TLSAuthorizationError(\
"cryptoID protocol mismatch")
if not chain.validate():
raise TLSValidationError("cryptoID validation failure")
elif chain:
raise TLSAuthenticationTypeError()
else:
raise TLSNoAuthenticationError()
|
advatar/caffe | refs/heads/master | tools/extra/extract_seconds.py | 58 | #!/usr/bin/env python
import datetime
import os
import sys
def extract_datetime_from_line(line, year):
# Expected format: I0210 13:39:22.381027 25210 solver.cpp:204] Iteration 100, lr = 0.00992565
line = line.strip().split()
month = int(line[0][1:3])
day = int(line[0][3:])
timestamp = line[1]
pos = timestamp.rfind('.')
ts = [int(x) for x in timestamp[:pos].split(':')]
hour = ts[0]
minute = ts[1]
second = ts[2]
microsecond = int(timestamp[pos + 1:])
dt = datetime.datetime(year, month, day, hour, minute, second, microsecond)
return dt
def extract_seconds(input_file, output_file):
with open(input_file, 'r') as f:
lines = f.readlines()
log_created_time = os.path.getctime(input_file)
log_created_year = datetime.datetime.fromtimestamp(log_created_time).year
start_time_found = False
out = open(output_file, 'w')
for line in lines:
line = line.strip()
if not start_time_found and line.find('Solving') != -1:
start_time_found = True
start_datetime = extract_datetime_from_line(line, log_created_year)
if line.find('Iteration') != -1:
dt = extract_datetime_from_line(line, log_created_year)
elapsed_seconds = (dt - start_datetime).total_seconds()
out.write('%f\n' % elapsed_seconds)
out.close()
if __name__ == '__main__':
if len(sys.argv) < 3:
print('Usage: ./extract_seconds input_file output_file')
exit(1)
extract_seconds(sys.argv[1], sys.argv[2])
|
Collisio-Adolebitque/pfne-2017 | refs/heads/master | py-junos-eznc/tests/functional/test_core.py | 4 | __author__ = "rsherman, vnitinv"
import unittest2 as unittest
from nose.plugins.attrib import attr
from jnpr.junos.exception import RpcTimeoutError
@attr('functional')
class TestCore(unittest.TestCase):
@classmethod
def setUpClass(self):
from jnpr.junos import Device
self.dev = Device(host='highlife.englab.juniper.net',
user='jenkins', password='password123')
self.dev.open()
@classmethod
def tearDownClass(self):
self.dev.close()
def test_device_open(self):
self.assertEqual(self.dev.connected, True)
def test_device_facts(self):
assert self.dev.facts['hostname'] == 'highlife'
def test_device_get_timeout(self):
assert self.dev.timeout == 30
def test_device_set_timeout(self):
self.dev.timeout = 35
assert self.dev.timeout == 35
def test_device_cli(self):
self.assertTrue('qfx5100' in self.dev.cli('show version'))
def test_device_rpc(self):
res = self.dev.rpc.get_route_information(destination='10.48.21.71')
self.assertEqual(res.tag, 'route-information')
def test_device_rpc_format_text(self):
res = self.dev.rpc.get_interface_information({'format': 'text'})
self.assertEqual(res.tag, 'output')
def test_device_rpc_timeout(self):
with self.assertRaises(RpcTimeoutError):
self.dev.rpc.get_route_information(dev_timeout=0.01)
def test_device_rpc_normalize_true(self):
rsp = self.dev.rpc.get_interface_information(
interface_name='ge-0/0/1', normalize=True)
self.assertEqual(rsp.xpath('physical-interface/name')[0].text,
'ge-0/0/1')
def test_load_config(self):
from jnpr.junos.utils.config import Config
cu = Config(self.dev)
data = """interfaces {
ge-1/0/0 {
description "MPLS interface";
unit 0 {
family mpls;
}
}
}
"""
cu.load(data, format='text')
self.assertTrue(cu.commit_check())
if cu.commit_check():
cu.rollback()
|
jthatch12/STi | refs/heads/master | linaro-12-android-toolchain/share/gdb/python/gdb/command/pretty_printers.py | 106 | # Pretty-printer commands.
# Copyright (C) 2010-2012 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""GDB commands for working with pretty-printers."""
import copy
import gdb
import re
def parse_printer_regexps(arg):
"""Internal utility to parse a pretty-printer command argv.
Arguments:
arg: The arguments to the command. The format is:
[object-regexp [name-regexp]].
Individual printers in a collection are named as
printer-name;subprinter-name.
Returns:
The result is a 3-tuple of compiled regular expressions, except that
the resulting compiled subprinter regexp is None if not provided.
Raises:
SyntaxError: an error processing ARG
"""
argv = gdb.string_to_argv(arg);
argc = len(argv)
object_regexp = "" # match everything
name_regexp = "" # match everything
subname_regexp = None
if argc > 3:
raise SyntaxError("too many arguments")
if argc >= 1:
object_regexp = argv[0]
if argc >= 2:
name_subname = argv[1].split(";", 1)
name_regexp = name_subname[0]
if len(name_subname) == 2:
subname_regexp = name_subname[1]
# That re.compile raises SyntaxError was determined empirically.
# We catch it and reraise it to provide a slightly more useful
# error message for the user.
try:
object_re = re.compile(object_regexp)
except SyntaxError:
raise SyntaxError("invalid object regexp: %s" % object_regexp)
try:
name_re = re.compile (name_regexp)
except SyntaxError:
raise SyntaxError("invalid name regexp: %s" % name_regexp)
if subname_regexp is not None:
try:
subname_re = re.compile(subname_regexp)
except SyntaxError:
raise SyntaxError("invalid subname regexp: %s" % subname_regexp)
else:
subname_re = None
return(object_re, name_re, subname_re)
def printer_enabled_p(printer):
"""Internal utility to see if printer (or subprinter) is enabled."""
if hasattr(printer, "enabled"):
return printer.enabled
else:
return True
class InfoPrettyPrinter(gdb.Command):
"""GDB command to list all registered pretty-printers.
Usage: info pretty-printer [object-regexp [name-regexp]]
OBJECT-REGEXP is a regular expression matching the objects to list.
Objects are "global", the program space's file, and the objfiles within
that program space.
NAME-REGEXP matches the name of the pretty-printer.
Individual printers in a collection are named as
printer-name;subprinter-name.
"""
def __init__ (self):
super(InfoPrettyPrinter, self).__init__("info pretty-printer",
gdb.COMMAND_DATA)
@staticmethod
def enabled_string(printer):
"""Return "" if PRINTER is enabled, otherwise " [disabled]"."""
if printer_enabled_p(printer):
return ""
else:
return " [disabled]"
@staticmethod
def printer_name(printer):
"""Return the printer's name."""
if hasattr(printer, "name"):
return printer.name
if hasattr(printer, "__name__"):
return printer.__name__
# This "shouldn't happen", but the public API allows for
# direct additions to the pretty-printer list, and we shouldn't
# crash because someone added a bogus printer.
# Plus we want to give the user a way to list unknown printers.
return "unknown"
def list_pretty_printers(self, pretty_printers, name_re, subname_re):
"""Print a list of pretty-printers."""
# A potential enhancement is to provide an option to list printers in
# "lookup order" (i.e. unsorted).
sorted_pretty_printers = copy.copy(pretty_printers)
sorted_pretty_printers.sort(lambda x, y:
cmp(self.printer_name(x),
self.printer_name(y)))
for printer in sorted_pretty_printers:
name = self.printer_name(printer)
enabled = self.enabled_string(printer)
if name_re.match(name):
print " %s%s" % (name, enabled)
if (hasattr(printer, "subprinters") and
printer.subprinters is not None):
sorted_subprinters = copy.copy(printer.subprinters)
sorted_subprinters.sort(lambda x, y:
cmp(self.printer_name(x),
self.printer_name(y)))
for subprinter in sorted_subprinters:
if (not subname_re or
subname_re.match(subprinter.name)):
print (" %s%s" %
(subprinter.name,
self.enabled_string(subprinter)))
def invoke1(self, title, printer_list,
obj_name_to_match, object_re, name_re, subname_re):
"""Subroutine of invoke to simplify it."""
if printer_list and object_re.match(obj_name_to_match):
print title
self.list_pretty_printers(printer_list, name_re, subname_re)
def invoke(self, arg, from_tty):
"""GDB calls this to perform the command."""
(object_re, name_re, subname_re) = parse_printer_regexps(arg)
self.invoke1("global pretty-printers:", gdb.pretty_printers,
"global", object_re, name_re, subname_re)
cp = gdb.current_progspace()
self.invoke1("progspace %s pretty-printers:" % cp.filename,
cp.pretty_printers, "progspace",
object_re, name_re, subname_re)
for objfile in gdb.objfiles():
self.invoke1(" objfile %s pretty-printers:" % objfile.filename,
objfile.pretty_printers, objfile.filename,
object_re, name_re, subname_re)
def count_enabled_printers(pretty_printers):
"""Return a 2-tuple of number of enabled and total printers."""
enabled = 0
total = 0
for printer in pretty_printers:
if (hasattr(printer, "subprinters")
and printer.subprinters is not None):
if printer_enabled_p(printer):
for subprinter in printer.subprinters:
if printer_enabled_p(subprinter):
enabled += 1
total += len(printer.subprinters)
else:
if printer_enabled_p(printer):
enabled += 1
total += 1
return (enabled, total)
def count_all_enabled_printers():
"""Return a 2-tuble of the enabled state and total number of all printers.
This includes subprinters.
"""
enabled_count = 0
total_count = 0
(t_enabled, t_total) = count_enabled_printers(gdb.pretty_printers)
enabled_count += t_enabled
total_count += t_total
(t_enabled, t_total) = count_enabled_printers(gdb.current_progspace().pretty_printers)
enabled_count += t_enabled
total_count += t_total
for objfile in gdb.objfiles():
(t_enabled, t_total) = count_enabled_printers(objfile.pretty_printers)
enabled_count += t_enabled
total_count += t_total
return (enabled_count, total_count)
def pluralize(text, n, suffix="s"):
"""Return TEXT pluralized if N != 1."""
if n != 1:
return "%s%s" % (text, suffix)
else:
return text
def show_pretty_printer_enabled_summary():
"""Print the number of printers enabled/disabled.
We count subprinters individually.
"""
(enabled_count, total_count) = count_all_enabled_printers()
print "%d of %d printers enabled" % (enabled_count, total_count)
def do_enable_pretty_printer_1 (pretty_printers, name_re, subname_re, flag):
"""Worker for enabling/disabling pretty-printers.
Arguments:
pretty_printers: list of pretty-printers
name_re: regular-expression object to select printers
subname_re: regular expression object to select subprinters or None
if all are affected
flag: True for Enable, False for Disable
Returns:
The number of printers affected.
This is just for informational purposes for the user.
"""
total = 0
for printer in pretty_printers:
if (hasattr(printer, "name") and name_re.match(printer.name) or
hasattr(printer, "__name__") and name_re.match(printer.__name__)):
if (hasattr(printer, "subprinters") and
printer.subprinters is not None):
if not subname_re:
# Only record printers that change state.
if printer_enabled_p(printer) != flag:
for subprinter in printer.subprinters:
if printer_enabled_p(subprinter):
total += 1
# NOTE: We preserve individual subprinter settings.
printer.enabled = flag
else:
# NOTE: Whether this actually disables the subprinter
# depends on whether the printer's lookup function supports
# the "enable" API. We can only assume it does.
for subprinter in printer.subprinters:
if subname_re.match(subprinter.name):
# Only record printers that change state.
if (printer_enabled_p(printer) and
printer_enabled_p(subprinter) != flag):
total += 1
subprinter.enabled = flag
else:
# This printer has no subprinters.
# If the user does "disable pretty-printer .* .* foo"
# should we disable printers that don't have subprinters?
# How do we apply "foo" in this context? Since there is no
# "foo" subprinter it feels like we should skip this printer.
# There's still the issue of how to handle
# "disable pretty-printer .* .* .*", and every other variation
# that can match everything. For now punt and only support
# "disable pretty-printer .* .*" (i.e. subname is elided)
# to disable everything.
if not subname_re:
# Only record printers that change state.
if printer_enabled_p(printer) != flag:
total += 1
printer.enabled = flag
return total
def do_enable_pretty_printer (arg, flag):
"""Internal worker for enabling/disabling pretty-printers."""
(object_re, name_re, subname_re) = parse_printer_regexps(arg)
total = 0
if object_re.match("global"):
total += do_enable_pretty_printer_1(gdb.pretty_printers,
name_re, subname_re, flag)
cp = gdb.current_progspace()
if object_re.match("progspace"):
total += do_enable_pretty_printer_1(cp.pretty_printers,
name_re, subname_re, flag)
for objfile in gdb.objfiles():
if object_re.match(objfile.filename):
total += do_enable_pretty_printer_1(objfile.pretty_printers,
name_re, subname_re, flag)
if flag:
state = "enabled"
else:
state = "disabled"
print "%d %s %s" % (total, pluralize("printer", total), state)
# Print the total list of printers currently enabled/disabled.
# This is to further assist the user in determining whether the result
# is expected. Since we use regexps to select it's useful.
show_pretty_printer_enabled_summary()
# Enable/Disable one or more pretty-printers.
#
# This is intended for use when a broken pretty-printer is shipped/installed
# and the user wants to disable that printer without disabling all the other
# printers.
#
# A useful addition would be -v (verbose) to show each printer affected.
class EnablePrettyPrinter (gdb.Command):
"""GDB command to enable the specified pretty-printer.
Usage: enable pretty-printer [object-regexp [name-regexp]]
OBJECT-REGEXP is a regular expression matching the objects to examine.
Objects are "global", the program space's file, and the objfiles within
that program space.
NAME-REGEXP matches the name of the pretty-printer.
Individual printers in a collection are named as
printer-name;subprinter-name.
"""
def __init__(self):
super(EnablePrettyPrinter, self).__init__("enable pretty-printer",
gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
"""GDB calls this to perform the command."""
do_enable_pretty_printer(arg, True)
class DisablePrettyPrinter (gdb.Command):
"""GDB command to disable the specified pretty-printer.
Usage: disable pretty-printer [object-regexp [name-regexp]]
OBJECT-REGEXP is a regular expression matching the objects to examine.
Objects are "global", the program space's file, and the objfiles within
that program space.
NAME-REGEXP matches the name of the pretty-printer.
Individual printers in a collection are named as
printer-name;subprinter-name.
"""
def __init__(self):
super(DisablePrettyPrinter, self).__init__("disable pretty-printer",
gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
"""GDB calls this to perform the command."""
do_enable_pretty_printer(arg, False)
def register_pretty_printer_commands():
"""Call from a top level script to install the pretty-printer commands."""
InfoPrettyPrinter()
EnablePrettyPrinter()
DisablePrettyPrinter()
register_pretty_printer_commands()
|
deepinsight/Deformable-ConvNets | refs/heads/master | Cdiscount/common/data.py | 1 | import mxnet as mx
import random
from mxnet.io import DataBatch, DataIter
import numpy as np
def add_data_args(parser):
data = parser.add_argument_group('Data', 'the input images')
data.add_argument('--data-train', type=str, help='the training data')
data.add_argument('--data-val', type=str, help='the validation data')
data.add_argument('--rgb-mean', type=str, default='123.68,116.779,103.939',
help='a tuple of size 3 for the mean rgb')
data.add_argument('--pad-size', type=int, default=0,
help='padding the input image')
data.add_argument('--image-shape', type=str,
help='the image shape feed into the network, e.g. (3,224,224)')
data.add_argument('--num-classes', type=int, help='the number of classes')
data.add_argument('--num-examples', type=int, help='the number of training examples')
data.add_argument('--data-nthreads', type=int, default=4,
help='number of threads for data decoding')
data.add_argument('--benchmark', type=int, default=0,
help='if 1, then feed the network with synthetic data')
data.add_argument('--dtype', type=str, default='float32',
help='data type: float32 or float16')
return data
def add_data_aug_args(parser):
aug = parser.add_argument_group(
'Image augmentations', 'implemented in src/io/image_aug_default.cc')
aug.add_argument('--random-crop', type=int, default=1,
help='if or not randomly crop the image')
aug.add_argument('--random-mirror', type=int, default=1,
help='if or not randomly flip horizontally')
aug.add_argument('--max-random-h', type=int, default=0,
help='max change of hue, whose range is [0, 180]')
aug.add_argument('--max-random-s', type=int, default=0,
help='max change of saturation, whose range is [0, 255]')
aug.add_argument('--max-random-l', type=int, default=0,
help='max change of intensity, whose range is [0, 255]')
aug.add_argument('--max-random-aspect-ratio', type=float, default=0,
help='max change of aspect ratio, whose range is [0, 1]')
aug.add_argument('--max-random-rotate-angle', type=int, default=0,
help='max angle to rotate, whose range is [0, 360]')
aug.add_argument('--max-random-shear-ratio', type=float, default=0,
help='max ratio to shear, whose range is [0, 1]')
aug.add_argument('--max-random-scale', type=float, default=1,
help='max ratio to scale')
aug.add_argument('--min-random-scale', type=float, default=1,
help='min ratio to scale, should >= img_size/input_shape. otherwise use --pad-size')
return aug
def set_data_aug_level(aug, level):
if level >= 1:
aug.set_defaults(random_crop=1, random_mirror=1)
if level >= 2:
aug.set_defaults(max_random_h=36, max_random_s=50, max_random_l=50)
if level >= 3:
aug.set_defaults(max_random_rotate_angle=10, max_random_shear_ratio=0.1, max_random_aspect_ratio=0.25)
class SyntheticDataIter(DataIter):
def __init__(self, num_classes, data_shape, max_iter, dtype):
self.batch_size = data_shape[0]
self.cur_iter = 0
self.max_iter = max_iter
self.dtype = dtype
label = np.random.randint(0, num_classes, [self.batch_size,])
data = np.random.uniform(-1, 1, data_shape)
self.data = mx.nd.array(data, dtype=self.dtype)
self.label = mx.nd.array(label, dtype=self.dtype)
def __iter__(self):
return self
@property
def provide_data(self):
return [mx.io.DataDesc('data', self.data.shape, self.dtype)]
@property
def provide_label(self):
return [mx.io.DataDesc('softmax_label', (self.batch_size,), self.dtype)]
def next(self):
self.cur_iter += 1
if self.cur_iter <= self.max_iter:
return DataBatch(data=(self.data,),
label=(self.label,),
pad=0,
index=None,
provide_data=self.provide_data,
provide_label=self.provide_label)
else:
raise StopIteration
def __next__(self):
return self.next()
def reset(self):
self.cur_iter = 0
def get_rec_iter(args, kv=None):
image_shape = tuple([int(l) for l in args.image_shape.split(',')])
dtype = np.float32;
if 'dtype' in args:
if args.dtype == 'float16':
dtype = np.float16
if 'benchmark' in args and args.benchmark:
data_shape = (args.batch_size,) + image_shape
train = SyntheticDataIter(args.num_classes, data_shape, 50, dtype)
return (train, None)
if kv:
(rank, nworker) = (kv.rank, kv.num_workers)
else:
(rank, nworker) = (0, 1)
rgb_mean = [float(i) for i in args.rgb_mean.split(',')]
train = mx.img.ImageIter(
label_width = 1,
path_root = './',
path_imglist = args.data_train,
data_shape = image_shape,
batch_size = args.batch_size,
rand_crop = True,
rand_resize = True,
rand_mirror = True,
shuffle = True,
brightness = 0.4,
contrast = 0.4,
saturation = 0.4,
pca_noise = 0.1,
num_parts = nworker,
part_index = rank)
if args.data_val is None:
return (train, None)
val = mx.img.ImageIter(
label_width = 1,
path_root = './',
path_imglist = args.data_val,
batch_size = args.batch_size,
data_shape = image_shape,
resize = args.resize_size,
rand_crop = False,
rand_resize = False,
rand_mirror = False,
num_parts = nworker,
part_index = rank)
return (train, val)
|
BlendOSVR/OSVR-Python | refs/heads/dev | examples/TrackerCallbackPy.py | 1 | import osvr.ClientKit
def myTrackerCallback(userdata, timestamp, report):
print("Got POSE report: Position = (%f, %f, %f), orientation = (%f, %f, %f, %f)\n" % (report.contents.pose.translation.data[0], report.contents.pose.translation.data[1], report.contents.pose.translation.data[2], report.contents.pose.rotation.data[0], report.contents.pose.rotation.data[1], report.contents.pose.rotation.data[2], report.contents.pose.rotation.data[3]))
def myOrientationCallback(userdata, timestamp, report):
print("Got ORIENTATION report: Orientation = (%f, %f, %f, %f)\n" % (report.contents.pose.rotation.data[0], report.contents.pose.rotation.data[1], report.contents.pose.rotation.data[2], report.contents.pose.rotation.data[3]))
def myPositionCallback(userdata, timestamp, report):
print("Got POSITION report: Position = (%f, %f, %f)\n" % (report.contents.xyz.data[0], report.contents.xyz.data[1], report.contents.xyz.data[2]))
ctx = osvr.ClientKit.ClientContext("com.osvr.exampleclients.TrackerCallback")
lefthand = ctx.getInterface("/me/head")
poseCallback = osvr.ClientKit.PoseCallback(myTrackerCallback)
orientationCallback = osvr.ClientKit.OrientationCallback(myOrientationCallback)
positionCallback = osvr.ClientKit.PositionCallback(myPositionCallback)
lefthand.registerCallback(poseCallback, None)
lefthand.registerCallback(orientationCallback, None)
lefthand.registerCallback(positionCallback, None)
for i in range(0, 1000):
ctx.update()
ctx.shutdown()
print("Library shut down, exiting.\n") |
haad/ansible | refs/heads/devel | lib/ansible/modules/cloud/amazon/ec2_vpc_net.py | 9 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: ec2_vpc_net
short_description: Configure AWS virtual private clouds
description:
- Create, modify, and terminate AWS virtual private clouds.
version_added: "2.0"
author:
- Jonathan Davila (@defionscode)
- Sloane Hertel (@s-hertel)
options:
name:
description:
- The name to give your VPC. This is used in combination with C(cidr_block) to determine if a VPC already exists.
required: yes
cidr_block:
description:
- The primary CIDR of the VPC. After 2.5 a list of CIDRs can be provided. The first in the list will be used as the primary CIDR
and is used in conjunction with the C(name) to ensure idempotence.
required: yes
purge_cidrs:
description:
- Remove CIDRs that are associated with the VPC and are not specified in C(cidr_block).
default: no
choices: [ 'yes', 'no' ]
version_added: '2.5'
tenancy:
description:
- Whether to be default or dedicated tenancy. This cannot be changed after the VPC has been created.
default: default
choices: [ 'default', 'dedicated' ]
dns_support:
description:
- Whether to enable AWS DNS support.
default: yes
choices: [ 'yes', 'no' ]
dns_hostnames:
description:
- Whether to enable AWS hostname support.
default: yes
choices: [ 'yes', 'no' ]
dhcp_opts_id:
description:
- the id of the DHCP options to use for this vpc
tags:
description:
- The tags you want attached to the VPC. This is independent of the name value, note if you pass a 'Name' key it would override the Name of
the VPC if it's different.
aliases: [ 'resource_tags' ]
state:
description:
- The state of the VPC. Either absent or present.
default: present
choices: [ 'present', 'absent' ]
multi_ok:
description:
- By default the module will not create another VPC if there is another VPC with the same name and CIDR block. Specify this as true if you want
duplicate VPCs created.
default: false
requirements:
- boto3
- botocore
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
- name: create a VPC with dedicated tenancy and a couple of tags
ec2_vpc_net:
name: Module_dev2
cidr_block: 10.10.0.0/16
region: us-east-1
tags:
module: ec2_vpc_net
this: works
tenancy: dedicated
'''
RETURN = '''
vpc:
description: info about the VPC that was created or deleted
returned: always
type: complex
contains:
cidr_block:
description: The CIDR of the VPC
returned: always
type: string
sample: 10.0.0.0/16
cidr_block_association_set:
description: IPv4 CIDR blocks associated with the VPC
returned: success
type: list
sample:
"cidr_block_association_set": [
{
"association_id": "vpc-cidr-assoc-97aeeefd",
"cidr_block": "20.0.0.0/24",
"cidr_block_state": {
"state": "associated"
}
}
]
classic_link_enabled:
description: indicates whether ClassicLink is enabled
returned: always
type: NoneType
sample: null
dhcp_options_id:
description: the id of the DHCP options assocaited with this VPC
returned: always
type: string
sample: dopt-0fb8bd6b
id:
description: VPC resource id
returned: always
type: string
sample: vpc-c2e00da5
instance_tenancy:
description: indicates whether VPC uses default or dedicated tenancy
returned: always
type: string
sample: default
is_default:
description: indicates whether this is the default VPC
returned: always
type: bool
sample: false
state:
description: state of the VPC
returned: always
type: string
sample: available
tags:
description: tags attached to the VPC, includes name
returned: always
type: complex
contains:
Name:
description: name tag for the VPC
returned: always
type: string
sample: pk_vpc4
'''
try:
import botocore
except ImportError:
pass # Handled by AnsibleAWSModule
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import (boto3_conn, get_aws_connection_info, ec2_argument_spec, camel_dict_to_snake_dict,
ansible_dict_to_boto3_tag_list, boto3_tag_list_to_ansible_dict)
from ansible.module_utils.six import string_types
def vpc_exists(module, vpc, name, cidr_block, multi):
"""Returns None or a vpc object depending on the existence of a VPC. When supplied
with a CIDR, it will check for matching tags to determine if it is a match
otherwise it will assume the VPC does not exist and thus return None.
"""
try:
matching_vpcs = vpc.describe_vpcs(Filters=[{'Name': 'tag:Name', 'Values': [name]}, {'Name': 'cidr-block', 'Values': cidr_block}])['Vpcs']
# If an exact matching using a list of CIDRs isn't found, check for a match with the first CIDR as is documented for C(cidr_block)
if not matching_vpcs:
matching_vpcs = vpc.describe_vpcs(Filters=[{'Name': 'tag:Name', 'Values': [name]}, {'Name': 'cidr-block', 'Values': [cidr_block[0]]}])['Vpcs']
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Failed to describe VPCs")
if multi:
return None
elif len(matching_vpcs) == 1:
return matching_vpcs[0]['VpcId']
elif len(matching_vpcs) > 1:
module.fail_json(msg='Currently there are %d VPCs that have the same name and '
'CIDR block you specified. If you would like to create '
'the VPC anyway please pass True to the multi_ok param.' % len(matching_vpcs))
return None
def get_vpc(module, connection, vpc_id):
try:
vpc_obj = connection.describe_vpcs(VpcIds=[vpc_id])['Vpcs'][0]
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Failed to describe VPCs")
try:
classic_link = connection.describe_vpc_classic_link(VpcIds=[vpc_id])['Vpcs'][0].get('ClassicLinkEnabled')
vpc_obj['ClassicLinkEnabled'] = classic_link
except botocore.exceptions.ClientError as e:
if e.response["Error"]["Message"] == "The functionality you requested is not available in this region.":
vpc_obj['ClassicLinkEnabled'] = False
else:
module.fail_json_aws(e, msg="Failed to describe VPCs")
except botocore.exceptions.BotoCoreError as e:
module.fail_json_aws(e, msg="Failed to describe VPCs")
return vpc_obj
def update_vpc_tags(connection, module, vpc_id, tags, name):
if tags is None:
tags = dict()
tags.update({'Name': name})
try:
current_tags = dict((t['Key'], t['Value']) for t in connection.describe_tags(Filters=[{'Name': 'resource-id', 'Values': [vpc_id]}])['Tags'])
if tags != current_tags:
if not module.check_mode:
tags = ansible_dict_to_boto3_tag_list(tags)
connection.create_tags(Resources=[vpc_id], Tags=tags)
return True
else:
return False
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Failed to update tags")
def update_dhcp_opts(connection, module, vpc_obj, dhcp_id):
if vpc_obj['DhcpOptionsId'] != dhcp_id:
if not module.check_mode:
try:
connection.associate_dhcp_options(DhcpOptionsId=dhcp_id, VpcId=vpc_obj['VpcId'])
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Failed to associate DhcpOptionsId {0}".format(dhcp_id))
return True
else:
return False
def create_vpc(connection, module, cidr_block, tenancy):
try:
if not module.check_mode:
vpc_obj = connection.create_vpc(CidrBlock=cidr_block, InstanceTenancy=tenancy)
else:
module.exit_json(changed=True)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, "Failed to create the VPC")
return vpc_obj['Vpc']['VpcId']
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
cidr_block=dict(type='list', required=True),
tenancy=dict(choices=['default', 'dedicated'], default='default'),
dns_support=dict(type='bool', default=True),
dns_hostnames=dict(type='bool', default=True),
dhcp_opts_id=dict(),
tags=dict(type='dict', aliases=['resource_tags']),
state=dict(choices=['present', 'absent'], default='present'),
multi_ok=dict(type='bool', default=False),
purge_cidrs=dict(type='bool', default=False),
)
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=True
)
name = module.params.get('name')
cidr_block = module.params.get('cidr_block')
purge_cidrs = module.params.get('purge_cidrs')
tenancy = module.params.get('tenancy')
dns_support = module.params.get('dns_support')
dns_hostnames = module.params.get('dns_hostnames')
dhcp_id = module.params.get('dhcp_opts_id')
tags = module.params.get('tags')
state = module.params.get('state')
multi = module.params.get('multi_ok')
changed = False
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
connection = boto3_conn(module, conn_type='client', resource='ec2', region=region, endpoint=ec2_url, **aws_connect_params)
if dns_hostnames and not dns_support:
module.fail_json(msg='In order to enable DNS Hostnames you must also enable DNS support')
if state == 'present':
# Check if VPC exists
vpc_id = vpc_exists(module, connection, name, cidr_block, multi)
if vpc_id is None:
vpc_id = create_vpc(connection, module, cidr_block[0], tenancy)
changed = True
vpc_obj = get_vpc(module, connection, vpc_id)
associated_cidrs = dict((cidr['CidrBlock'], cidr['AssociationId']) for cidr in vpc_obj.get('CidrBlockAssociationSet', [])
if cidr['CidrBlockState']['State'] != 'disassociated')
to_add = [cidr for cidr in cidr_block if cidr not in associated_cidrs]
to_remove = [associated_cidrs[cidr] for cidr in associated_cidrs if cidr not in cidr_block]
if len(cidr_block) > 1:
for cidr in to_add:
changed = True
connection.associate_vpc_cidr_block(CidrBlock=cidr, VpcId=vpc_id)
if purge_cidrs:
for association_id in to_remove:
changed = True
try:
connection.disassociate_vpc_cidr_block(AssociationId=association_id)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, "Unable to disassociate {0}. You must detach or delete all gateways and resources that "
"are associated with the CIDR block before you can disassociate it.".format(association_id))
if dhcp_id is not None:
try:
if update_dhcp_opts(connection, module, vpc_obj, dhcp_id):
changed = True
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, "Failed to update DHCP options")
if tags is not None or name is not None:
try:
if update_vpc_tags(connection, module, vpc_id, tags, name):
changed = True
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Failed to update tags")
current_dns_enabled = connection.describe_vpc_attribute(Attribute='enableDnsSupport', VpcId=vpc_id)['EnableDnsSupport']['Value']
current_dns_hostnames = connection.describe_vpc_attribute(Attribute='enableDnsHostnames', VpcId=vpc_id)['EnableDnsHostnames']['Value']
if current_dns_enabled != dns_support:
changed = True
if not module.check_mode:
try:
connection.modify_vpc_attribute(VpcId=vpc_id, EnableDnsSupport={'Value': dns_support})
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, "Failed to update enabled dns support attribute")
if current_dns_hostnames != dns_hostnames:
changed = True
if not module.check_mode:
try:
connection.modify_vpc_attribute(VpcId=vpc_id, EnableDnsHostnames={'Value': dns_hostnames})
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, "Failed to update enabled dns hostnames attribute")
final_state = camel_dict_to_snake_dict(get_vpc(module, connection, vpc_id))
final_state['tags'] = boto3_tag_list_to_ansible_dict(final_state.get('tags', []))
final_state['id'] = final_state.pop('vpc_id')
module.exit_json(changed=changed, vpc=final_state)
elif state == 'absent':
# Check if VPC exists
vpc_id = vpc_exists(module, connection, name, cidr_block, multi)
if vpc_id is not None:
try:
if not module.check_mode:
connection.delete_vpc(VpcId=vpc_id)
changed = True
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Failed to delete VPC {0} You may want to use the ec2_vpc_subnet, ec2_vpc_igw, "
"and/or ec2_vpc_route_table modules to ensure the other components are absent.".format(vpc_id))
module.exit_json(changed=changed, vpc={})
if __name__ == '__main__':
main()
|
Johnzero/erp | refs/heads/fga | openerp/report/render/makohtml2html/__init__.py | 76 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from makohtml2html import parseNode
#.apidoc title: MAKO to HTML engine
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ladylovelace/acaidera | refs/heads/master | blog/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
jessepeterson/commandment | refs/heads/master | commandment/alembic/versions/a3ddaad5c358_add_dep_device_columns.py | 1 | """Add DEP device columns
Revision ID: a3ddaad5c358
Revises: 2808deb9fc62
Create Date: 2018-07-04 21:44:41.549806
"""
# From: http://alembic.zzzcomputing.com/en/latest/cookbook.html#conditional-migration-elements
from alembic import op
import sqlalchemy as sa
import commandment.dbtypes
from alembic import context
# revision identifiers, used by Alembic.
revision = 'a3ddaad5c358'
down_revision = '2808deb9fc62'
branch_labels = None
depends_on = None
def upgrade():
schema_upgrades()
# if context.get_x_argument(as_dictionary=True).get('data', None):
# data_upgrades()
def downgrade():
# if context.get_x_argument(as_dictionary=True).get('data', None):
# data_downgrades()
schema_downgrades()
def schema_upgrades():
op.add_column('devices', sa.Column('description', sa.String(), nullable=True))
op.add_column('devices', sa.Column('asset_tag', sa.String(), nullable=True))
op.add_column('devices', sa.Column('color', sa.String(), nullable=True))
op.add_column('devices', sa.Column('device_assigned_by', sa.String(), nullable=True))
op.add_column('devices', sa.Column('device_assigned_date', sa.DateTime(), nullable=True))
op.add_column('devices', sa.Column('device_family', sa.String(), nullable=True))
op.add_column('devices', sa.Column('is_dep', sa.Boolean(), nullable=True))
op.add_column('devices', sa.Column('os', sa.String(), nullable=True))
op.add_column('devices', sa.Column('profile_assign_time', sa.DateTime(), nullable=True))
op.add_column('devices', sa.Column('profile_push_time', sa.DateTime(), nullable=True))
op.add_column('devices', sa.Column('profile_status', sa.String(), nullable=True))
op.add_column('devices', sa.Column('profile_uuid', sa.String(), nullable=True))
def schema_downgrades():
op.drop_column('devices', 'profile_uuid')
op.drop_column('devices', 'profile_status')
op.drop_column('devices', 'profile_push_time')
op.drop_column('devices', 'profile_assign_time')
op.drop_column('devices', 'os')
op.drop_column('devices', 'is_dep')
op.drop_column('devices', 'device_family')
op.drop_column('devices', 'device_assigned_date')
op.drop_column('devices', 'device_assigned_by')
op.drop_column('devices', 'color')
op.drop_column('devices', 'asset_tag')
op.drop_column('devices', 'description')
# def data_upgrades():
# """Add any optional data upgrade migrations here!"""
# pass
#
#
# def data_downgrades():
# """Add any optional data downgrade migrations here!"""
# pass
|
akatrevorjay/slask | refs/heads/master | plugins/gif.py | 1 | """!gif <search term> return a random result from the google gif search result for <search term>"""
from urllib import quote
import re
import requests
from random import randint, choice, shuffle
def gif(searchterm_raw):
# There's a chance of pandas today
eggs = ['panda', 'dickbutt', 'nickleback']
if randint(0, 100) < 10:
searchterm_raw = '{} {}'.format(choice(eggs), searchterm_raw)
# defaults
opts = dict(random=10, safe=True)
# Search for opts in string
terms = re.split(r'\b(\w+=\w+)\b', searchterm_raw)
searchterm_raw = terms[0]
yes_values = ['yes', 'y', 'true', '1']
no_values = ['no', 'n', 'false', '0']
for term in terms[1:]:
if '=' not in term:
continue
opt, value = term.split('=', 1)
if opt in ['random', 'rand', 'r']:
if value.isdigit():
opts['random'] = int(value)
elif opt in ['safe']:
if value in yes_values:
opts['safe'] = True
elif value in no_values:
opts['safe'] = False
searchterm = quote(searchterm_raw)
safe = "&safe="
if opts['safe']:
safe += 'active'
searchurl = "https://www.google.com/search?tbs=itp:animated&tbm=isch&q={0}{1}".format(searchterm, safe)
# this is an old iphone user agent. Seems to make google return good results.
useragent = "Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_0 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8A293 Safari/6531.22.7"
headers = {'User-agent': useragent}
gresult = requests.get(searchurl, headers=headers).text
gifs = re.findall(r'imgurl.*?(http.*?)\\', gresult)
if not gifs:
gifs = ['No images found? Quit wasting my time.']
if opts['random']:
gifs = gifs[:opts['random']]
shuffle(gifs)
# Make sure we return a valid result, only check up to a certain count
opts['index'] = 0
for gif in gifs[:10]:
try:
r = requests.get(gif, headers=headers)
if r.ok:
break
except Exception:
pass
opts['index'] += 1
return "{} {}\n{}".format(searchterm_raw, opts, gif)
def on_message(msg, server):
text = msg.get("text", "")
match = re.findall(r"!gif (.*)", text)
if not match:
return
searchterm = match[0]
return gif(searchterm)
|
sarahgrogan/scikit-learn | refs/heads/master | sklearn/utils/tests/test_stats.py | 304 | import numpy as np
from numpy.testing import TestCase
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.stats import rankdata
_cases = (
# values, method, expected
([100], 'max', [1.0]),
([100, 100, 100], 'max', [3.0, 3.0, 3.0]),
([100, 300, 200], 'max', [1.0, 3.0, 2.0]),
([100, 200, 300, 200], 'max', [1.0, 3.0, 4.0, 3.0]),
([100, 200, 300, 200, 100], 'max', [2.0, 4.0, 5.0, 4.0, 2.0]),
)
def test_cases():
def check_case(values, method, expected):
r = rankdata(values, method=method)
assert_array_equal(r, expected)
for values, method, expected in _cases:
yield check_case, values, method, expected
|
ReachingOut/unisubs | refs/heads/staging | apps/videos/migrations/0037_auto__add_unique_subtitle_version_subtitle_id.py | 5 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
if not db.dry_run:
# remove subtitles that don't have a unique (version, subtitle_id) pair
# (somewhat arbitrarily) keeping the sub with highest id
for version in orm.SubtitleVersion.objects.all():
subtitle_dict = {}
for subtitle in version.subtitle_set.all():
subtitle_dict.setdefault(subtitle.subtitle_id, []) \
.append(subtitle)
for k, v in subtitle_dict.items():
if len(v) > 1:
subs_to_remove = self._subs_to_remove(v)
for subtitle in subs_to_remove:
version.subtitle_set.remove(subtitle)
subtitle.delete()
# Adding unique constraint on 'Subtitle', fields ['version', 'subtitle_id']
db.create_unique('videos_subtitle', ['version_id', 'subtitle_id'])
def _subs_to_remove(self, subtitles):
max_id = max([s.id for s in subtitles])
return [s for s in subtitles if s.id != max_id]
def backwards(self, orm):
# Removing unique constraint on 'Subtitle', fields ['version', 'subtitle_id']
db.delete_unique('videos_subtitle', ['version_id', 'subtitle_id'])
models = {
'auth.customuser': {
'Meta': {'object_name': 'CustomUser', '_ormbases': ['auth.User']},
'autoplay_preferences': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'award_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'changes_notification': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'picture': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'preferred_language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'valid_email': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'comments.comment': {
'Meta': {'object_name': 'Comment'},
'content': ('django.db.models.fields.TextField', [], {'max_length': '3000'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'content_type_set_for_comment'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.TextField', [], {}),
'reply_to': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['comments.Comment']", 'null': 'True', 'blank': 'True'}),
'submit_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'videos.action': {
'Meta': {'object_name': 'Action'},
'action_type': ('django.db.models.fields.IntegerField', [], {}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['comments.Comment']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']", 'null': 'True', 'blank': 'True'}),
'new_video_title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'videos.nullsubtitles': {
'Meta': {'unique_together': "(('video', 'user', 'language'),)", 'object_name': 'NullSubtitles'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'videos.nulltranslations': {
'Meta': {'object_name': 'NullTranslations'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'videos.nullvideocaptions': {
'Meta': {'object_name': 'NullVideoCaptions'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'videos.stopnotification': {
'Meta': {'object_name': 'StopNotification'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'videos.subtitle': {
'Meta': {'unique_together': "(('version', 'subtitle_id'),)", 'object_name': 'Subtitle'},
'end_time': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'null_subtitles': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.NullSubtitles']", 'null': 'True'}),
'start_time': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'subtitle_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'subtitle_order': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'subtitle_text': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleVersion']", 'null': 'True'})
},
'videos.subtitlelanguage': {
'Meta': {'unique_together': "(('video', 'language'),)", 'object_name': 'SubtitleLanguage'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"}),
'was_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True'}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'videos.subtitleversion': {
'Meta': {'unique_together': "(('language', 'version_no'),)", 'object_name': 'SubtitleVersion'},
'datetime_started': ('django.db.models.fields.DateTimeField', [], {}),
'finished': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']"}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'notification_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'text_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'time_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True'}),
'version_no': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'videos.translation': {
'Meta': {'object_name': 'Translation'},
'caption_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'null_translations': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.NullTranslations']", 'null': 'True'}),
'translation_text': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.TranslationVersion']", 'null': 'True'})
},
'videos.translationlanguage': {
'Meta': {'object_name': 'TranslationLanguage'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_translated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"}),
'was_translated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True'}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'videos.translationversion': {
'Meta': {'object_name': 'TranslationVersion'},
'datetime_started': ('django.db.models.fields.DateTimeField', [], {}),
'finished': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.TranslationLanguage']"}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'notification_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'text_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'time_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'version_no': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'videos.usertestresult': {
'Meta': {'object_name': 'UserTestResult'},
'browser': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'get_updates': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task1': ('django.db.models.fields.TextField', [], {}),
'task2': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'task3': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'videos.video': {
'Meta': {'object_name': 'Video'},
'allow_community_edits': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'bliptv_fileid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bliptv_flv_url': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'dailymotion_videoid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'video_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'video_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'video_url': ('django.db.models.fields.URLField', [], {'max_length': '2048', 'blank': 'True'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vimeo_videoid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'was_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'widget_views_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'writelock_owners'", 'null': 'True', 'to': "orm['auth.CustomUser']"}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'youtube_videoid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'})
},
'videos.videocaption': {
'Meta': {'object_name': 'VideoCaption'},
'caption_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'caption_text': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'end_time': ('django.db.models.fields.FloatField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'null_captions': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.NullVideoCaptions']", 'null': 'True'}),
'start_time': ('django.db.models.fields.FloatField', [], {}),
'sub_order': ('django.db.models.fields.FloatField', [], {}),
'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.VideoCaptionVersion']", 'null': 'True'})
},
'videos.videocaptionversion': {
'Meta': {'object_name': 'VideoCaptionVersion'},
'datetime_started': ('django.db.models.fields.DateTimeField', [], {}),
'finished': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'notification_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'text_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'time_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'version_no': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
}
}
complete_apps = ['videos']
|
kemalakyol48/python-for-android | refs/heads/master | python-modules/twisted/twisted/conch/topfiles/setup.py | 52 | # Copyright (c) 2009 Twisted Matrix Laboratories.
# See LICENSE for details.
import sys
try:
from twisted.python import dist
except ImportError:
raise SystemExit("twisted.python.dist module not found. Make sure you "
"have installed the Twisted core package before "
"attempting to install any other Twisted projects.")
if __name__ == '__main__':
if sys.version_info[:2] >= (2, 4):
extraMeta = dict(
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Environment :: No Input/Output (Daemon)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Topic :: Internet",
"Topic :: Security",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Terminals",
])
else:
extraMeta = {}
dist.setup(
twisted_subproject="conch",
scripts=dist.getScripts("conch"),
# metadata
name="Twisted Conch",
description="Twisted SSHv2 implementation.",
author="Twisted Matrix Laboratories",
author_email="twisted-python@twistedmatrix.com",
maintainer="Paul Swartz",
url="http://twistedmatrix.com/trac/wiki/TwistedConch",
license="MIT",
long_description="""\
Conch is an SSHv2 implementation using the Twisted framework. It
includes a server, client, a SFTP client, and a key generator.
""",
**extraMeta)
|
lambday/shogun | refs/heads/develop | examples/undocumented/python/converter_localitypreservingprojections.py | 4 | #!/usr/bin/env python
data = '../data/fm_train_real.dat'
parameter_list = [[data,20],[data,30]]
def converter_localitypreservingprojections (data_fname,k):
from shogun import CSVFile
from shogun import LocalityPreservingProjections
features = sg.features(CSVFile(data_fname))
converter = LocalityPreservingProjections()
converter.set_target_dim(1)
converter.set_k(k)
converter.set_tau(2.0)
converter.transform(features)
return features
if __name__=='__main__':
print('LocalityPreservingProjections')
#converter_localitypreservingprojections(*parameter_list[0])
|
wolfskaempf/ga_statistics | refs/heads/master | lib/python2.7/site-packages/crispy_forms/tests/test_layout.py | 10 | # -*- coding: utf-8 -*-
import django
from django import forms
from django.conf import settings
from django.core.urlresolvers import reverse
from django.forms.models import formset_factory, modelformset_factory
from django.middleware.csrf import _get_new_csrf_key
from django.shortcuts import render_to_response
from django.template import (
Context, RequestContext, loader
)
from django.test import RequestFactory
from django.utils.translation import ugettext_lazy as _
from .base import CrispyTestCase
from .forms import (
TestForm, TestForm2, TestForm3, CheckboxesTestForm,
TestForm4, CrispyTestModel, TestForm5
)
from .utils import override_settings
from crispy_forms.bootstrap import InlineCheckboxes
from crispy_forms.compatibility import PY2
from crispy_forms.helper import FormHelper
from crispy_forms.layout import (
Layout, Fieldset, MultiField, Row, Column, HTML, ButtonHolder,
Div, Submit
)
from crispy_forms.utils import render_crispy_form
class TestFormLayout(CrispyTestCase):
urls = 'crispy_forms.tests.urls'
def test_invalid_unicode_characters(self):
# Adds a BooleanField that uses non valid unicode characters "ñ"
form_helper = FormHelper()
form_helper.add_layout(
Layout(
'españa'
)
)
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
c = Context({'form': TestForm(), 'form_helper': form_helper})
settings.CRISPY_FAIL_SILENTLY = False
self.assertRaises(Exception, lambda: template.render(c))
del settings.CRISPY_FAIL_SILENTLY
def test_unicode_form_field(self):
class UnicodeForm(forms.Form):
def __init__(self, *args, **kwargs):
super(UnicodeForm, self).__init__(*args, **kwargs)
self.fields['contraseña'] = forms.CharField()
helper = FormHelper()
helper.layout = Layout(u'contraseña')
if PY2:
self.assertRaises(Exception, lambda: render_crispy_form(UnicodeForm()))
else:
html = render_crispy_form(UnicodeForm())
self.assertTrue('id="id_contraseña"' in html)
def test_meta_extra_fields_with_missing_fields(self):
class FormWithMeta(TestForm):
class Meta:
fields = ('email', 'first_name', 'last_name')
form = FormWithMeta()
# We remove email field on the go
del form.fields['email']
form_helper = FormHelper()
form_helper.layout = Layout(
'first_name',
)
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
c = Context({'form': form, 'form_helper': form_helper})
html = template.render(c)
self.assertFalse('email' in html)
def test_layout_unresolved_field(self):
form_helper = FormHelper()
form_helper.add_layout(
Layout(
'typo'
)
)
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
c = Context({'form': TestForm(), 'form_helper': form_helper})
settings.CRISPY_FAIL_SILENTLY = False
self.assertRaises(Exception, lambda:template.render(c))
del settings.CRISPY_FAIL_SILENTLY
def test_double_rendered_field(self):
form_helper = FormHelper()
form_helper.add_layout(
Layout(
'is_company',
'is_company',
)
)
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
c = Context({'form': TestForm(), 'form_helper': form_helper})
settings.CRISPY_FAIL_SILENTLY = False
self.assertRaises(Exception, lambda:template.render(c))
del settings.CRISPY_FAIL_SILENTLY
def test_context_pollution(self):
class ExampleForm(forms.Form):
comment = forms.CharField()
form = ExampleForm()
form2 = TestForm()
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{{ form.as_ul }}
{% crispy form2 %}
{{ form.as_ul }}
""")
c = Context({'form': form, 'form2': form2})
html = template.render(c)
self.assertEqual(html.count('name="comment"'), 2)
self.assertEqual(html.count('name="is_company"'), 1)
def test_layout_fieldset_row_html_with_unicode_fieldnames(self):
form_helper = FormHelper()
form_helper.add_layout(
Layout(
Fieldset(
u'Company Data',
u'is_company',
css_id = "fieldset_company_data",
css_class = "fieldsets",
title = "fieldset_title",
test_fieldset = "123"
),
Fieldset(
u'User Data',
u'email',
Row(
u'password1',
u'password2',
css_id = "row_passwords",
css_class = "rows",
),
HTML('<a href="#" id="testLink">test link</a>'),
HTML(u"""
{% if flag %}{{ message }}{% endif %}
"""),
u'first_name',
u'last_name',
)
)
)
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
c = Context({
'form': TestForm(),
'form_helper': form_helper,
'flag': True,
'message': "Hello!",
})
html = template.render(c)
self.assertTrue('id="fieldset_company_data"' in html)
self.assertTrue('class="fieldsets' in html)
self.assertTrue('title="fieldset_title"' in html)
self.assertTrue('test-fieldset="123"' in html)
self.assertTrue('id="row_passwords"' in html)
self.assertEqual(html.count('<label'), 6)
if self.current_template_pack == 'uni_form':
self.assertTrue('class="formRow rows"' in html)
else:
self.assertTrue('class="row rows"' in html)
self.assertTrue('Hello!' in html)
self.assertTrue('testLink' in html)
def test_second_layout_multifield_column_buttonholder_submit_div(self):
form_helper = FormHelper()
form_helper.add_layout(
Layout(
MultiField("Some company data",
'is_company',
'email',
css_id = "multifield_info",
title = "multifield_title",
multifield_test = "123"
),
Column(
'first_name',
'last_name',
css_id = "column_name",
css_class = "columns",
),
ButtonHolder(
Submit('Save the world', '{{ value_var }}', css_class='button white', data_id='test', data_name='test'),
Submit('store', 'Store results')
),
Div(
'password1',
'password2',
css_id="custom-div",
css_class="customdivs",
test_markup="123"
)
)
)
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
c = Context({'form': TestForm(), 'form_helper': form_helper, 'value_var': "Save"})
html = template.render(c)
self.assertTrue('multiField' in html)
self.assertTrue('formColumn' in html)
self.assertTrue('id="multifield_info"' in html)
self.assertTrue('title="multifield_title"' in html)
self.assertTrue('multifield-test="123"' in html)
self.assertTrue('id="column_name"' in html)
self.assertTrue('class="formColumn columns"' in html)
self.assertTrue('class="buttonHolder">' in html)
self.assertTrue('input type="submit"' in html)
self.assertTrue('button white' in html)
self.assertTrue('data-id="test"' in html)
self.assertTrue('data-name="test"' in html)
self.assertTrue('name="save-the-world"' in html)
self.assertTrue('value="Save"' in html)
self.assertTrue('name="store"' in html)
self.assertTrue('value="Store results"' in html)
self.assertTrue('id="custom-div"' in html)
self.assertTrue('class="customdivs"' in html)
self.assertTrue('test-markup="123"' in html)
def test_layout_composition(self):
form_helper = FormHelper()
form_helper.add_layout(
Layout(
Layout(
MultiField("Some company data",
'is_company',
'email',
css_id = "multifield_info",
),
),
Column(
'first_name',
# 'last_name', Missing a field on purpose
css_id = "column_name",
css_class = "columns",
),
ButtonHolder(
Submit('Save', 'Save', css_class='button white'),
),
Div(
'password1',
'password2',
css_id="custom-div",
css_class="customdivs",
)
)
)
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
c = Context({'form': TestForm(), 'form_helper': form_helper})
html = template.render(c)
self.assertTrue('multiField' in html)
self.assertTrue('formColumn' in html)
self.assertTrue('id="multifield_info"' in html)
self.assertTrue('id="column_name"' in html)
self.assertTrue('class="formColumn columns"' in html)
self.assertTrue('class="buttonHolder">' in html)
self.assertTrue('input type="submit"' in html)
self.assertTrue('name="Save"' in html)
self.assertTrue('id="custom-div"' in html)
self.assertTrue('class="customdivs"' in html)
self.assertFalse('last_name' in html)
def test_change_layout_dynamically_delete_field(self):
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
form = TestForm()
form_helper = FormHelper()
form_helper.add_layout(
Layout(
Fieldset(
u'Company Data',
'is_company',
'email',
'password1',
'password2',
css_id = "multifield_info",
),
Column(
'first_name',
'last_name',
css_id = "column_name",
)
)
)
# We remove email field on the go
# Layout needs to be adapted for the new form fields
del form.fields['email']
del form_helper.layout.fields[0].fields[1]
c = Context({'form': form, 'form_helper': form_helper})
html = template.render(c)
self.assertFalse('email' in html)
def test_formset_layout(self):
TestFormSet = formset_factory(TestForm, extra=3)
formset = TestFormSet()
helper = FormHelper()
helper.form_id = 'thisFormsetRocks'
helper.form_class = 'formsets-that-rock'
helper.form_method = 'POST'
helper.form_action = 'simpleAction'
helper.layout = Layout(
Fieldset("Item {{ forloop.counter }}",
'is_company',
'email',
),
HTML("{% if forloop.first %}Note for first form only{% endif %}"),
Row('password1', 'password2'),
Fieldset("",
'first_name',
'last_name'
)
)
html = render_crispy_form(
form=formset, helper=helper, context={'csrf_token': _get_new_csrf_key()}
)
# Check formset fields
django_version = django.get_version()
if django_version < '1.5':
self.assertEqual(html.count(
'type="hidden" name="form-TOTAL_FORMS" value="3" id="id_form-TOTAL_FORMS"'
), 1)
self.assertEqual(html.count(
'type="hidden" name="form-INITIAL_FORMS" value="0" id="id_form-INITIAL_FORMS"'
), 1)
if (django_version >= '1.4' and django_version < '1.4.4') or django_version < '1.3.6':
self.assertEqual(html.count(
'type="hidden" name="form-MAX_NUM_FORMS" id="id_form-MAX_NUM_FORMS"'
), 1)
else:
self.assertEqual(html.count(
'type="hidden" name="form-MAX_NUM_FORMS" value="1000" id="id_form-MAX_NUM_FORMS"'
), 1)
else:
self.assertEqual(html.count(
'id="id_form-TOTAL_FORMS" name="form-TOTAL_FORMS" type="hidden" value="3"'
), 1)
self.assertEqual(html.count(
'id="id_form-INITIAL_FORMS" name="form-INITIAL_FORMS" type="hidden" value="0"'
), 1)
self.assertEqual(html.count(
'id="id_form-MAX_NUM_FORMS" name="form-MAX_NUM_FORMS" type="hidden" value="1000"'
), 1)
self.assertEqual(html.count("hidden"), 4)
# Check form structure
self.assertEqual(html.count('<form'), 1)
self.assertEqual(html.count("<input type='hidden' name='csrfmiddlewaretoken'"), 1)
self.assertTrue('formsets-that-rock' in html)
self.assertTrue('method="post"' in html)
self.assertTrue('id="thisFormsetRocks"' in html)
self.assertTrue('action="%s"' % reverse('simpleAction') in html)
# Check form layout
self.assertTrue('Item 1' in html)
self.assertTrue('Item 2' in html)
self.assertTrue('Item 3' in html)
self.assertEqual(html.count('Note for first form only'), 1)
if self.current_template_pack == 'uni_form':
self.assertEqual(html.count('formRow'), 3)
else:
self.assertEqual(html.count('row'), 3)
def test_modelformset_layout(self):
CrispyModelFormSet = modelformset_factory(CrispyTestModel, form=TestForm4, extra=3)
formset = CrispyModelFormSet(queryset=CrispyTestModel.objects.none())
helper = FormHelper()
helper.layout = Layout(
'email'
)
html = render_crispy_form(form=formset, helper=helper)
self.assertEqual(html.count("id_form-0-id"), 1)
self.assertEqual(html.count("id_form-1-id"), 1)
self.assertEqual(html.count("id_form-2-id"), 1)
django_version = django.get_version()
if django_version < '1.5':
self.assertEqual(html.count(
'type="hidden" name="form-TOTAL_FORMS" value="3" id="id_form-TOTAL_FORMS"'
), 1)
self.assertEqual(html.count(
'type="hidden" name="form-INITIAL_FORMS" value="0" id="id_form-INITIAL_FORMS"'
), 1)
if (django_version >= '1.4' and django_version < '1.4.4') or django_version < '1.3.6':
self.assertEqual(html.count(
'type="hidden" name="form-MAX_NUM_FORMS" id="id_form-MAX_NUM_FORMS"'
), 1)
else:
self.assertEqual(html.count(
'type="hidden" name="form-MAX_NUM_FORMS" value="1000" id="id_form-MAX_NUM_FORMS"'
), 1)
else:
self.assertEqual(html.count(
'id="id_form-TOTAL_FORMS" name="form-TOTAL_FORMS" type="hidden" value="3"'
), 1)
self.assertEqual(html.count(
'id="id_form-INITIAL_FORMS" name="form-INITIAL_FORMS" type="hidden" value="0"'
), 1)
self.assertEqual(html.count(
'id="id_form-MAX_NUM_FORMS" name="form-MAX_NUM_FORMS" type="hidden" value="1000"'
), 1)
self.assertEqual(html.count('name="form-0-email"'), 1)
self.assertEqual(html.count('name="form-1-email"'), 1)
self.assertEqual(html.count('name="form-2-email"'), 1)
self.assertEqual(html.count('name="form-3-email"'), 0)
self.assertEqual(html.count('password'), 0)
def test_i18n(self):
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form.helper %}
""")
form = TestForm()
form_helper = FormHelper()
form_helper.layout = Layout(
HTML(_("i18n text")),
Fieldset(
_("i18n legend"),
'first_name',
'last_name',
)
)
form.helper = form_helper
html = template.render(Context({'form': form}))
self.assertEqual(html.count('i18n legend'), 1)
@override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True)
def test_l10n(self):
form = TestForm5(data={'pk': 1000})
html = render_crispy_form(form)
# Make sure values are unlocalized
self.assertTrue('value="1,000"' not in html)
# Make sure label values are NOT localized
self.assertTrue(html.count('1000'), 2)
def test_default_layout(self):
test_form = TestForm2()
self.assertEqual(test_form.helper.layout.fields, [
'is_company', 'email', 'password1', 'password2',
'first_name', 'last_name', 'datetime_field',
])
def test_default_layout_two(self):
test_form = TestForm3()
self.assertEqual(test_form.helper.layout.fields, ['email'])
def test_modelform_layout_without_meta(self):
test_form = TestForm4()
test_form.helper = FormHelper()
test_form.helper.layout = Layout('email')
html = render_crispy_form(test_form)
self.assertTrue('email' in html)
self.assertFalse('password' in html)
class TestBootstrapFormLayout(CrispyTestCase):
urls = 'crispy_forms.tests.urls'
def test_keepcontext_context_manager(self):
# Test case for issue #180
# Apparently it only manifest when using render_to_response this exact way
form = CheckboxesTestForm()
form.helper = FormHelper()
# We use here InlineCheckboxes as it updates context in an unsafe way
form.helper.layout = Layout(
'checkboxes',
InlineCheckboxes('alphacheckboxes'),
'numeric_multiple_checkboxes'
)
request_factory = RequestFactory()
request = request_factory.get('/')
context = RequestContext(request, {'form': form})
response = render_to_response('crispy_render_template.html', context)
if self.current_template_pack == 'bootstrap':
self.assertEqual(response.content.count(b'checkbox inline'), 3)
elif self.current_template_pack == 'bootstrap3':
self.assertEqual(response.content.count(b'checkbox-inline'), 3)
class TestBootstrap3FormLayout(CrispyTestCase):
urls = 'crispy_forms.tests.urls'
def test_form_inline(self):
form = TestForm()
form.helper = FormHelper()
form.helper.form_class = 'form-inline'
form.helper.field_template = 'bootstrap3/layout/inline_field.html'
form.helper.layout = Layout(
'email',
'password1',
'last_name',
)
html = render_crispy_form(form)
self.assertEqual(html.count('class="form-inline"'), 1)
self.assertEqual(html.count('class="form-group"'), 3)
self.assertEqual(html.count('<label for="id_email" class="sr-only'), 1)
self.assertEqual(html.count('id="div_id_email" class="form-group"'), 1)
self.assertEqual(html.count('placeholder="email"'), 1)
self.assertEqual(html.count('</label> <input'), 3)
|
joke2k/faker | refs/heads/master | faker/providers/currency/ro_RO/__init__.py | 1 | from .. import Provider as CurrencyProvider
class Provider(CurrencyProvider):
price_formats = ["#,##", "%#,##", "%##,##", "%.###,##", "%#.###,##"]
def pricetag(self):
return (
self.numerify(self.random_element(self.price_formats))
+ "\N{no-break space}Lei"
)
|
jonasstein/cryspy | refs/heads/master | tests/blendertest/blenderscript2.py | 3 | import bpy
import bmesh
for ob in bpy.data.objects:
if ob.name.startswith('structure'):
ob.select = True
bpy.ops.object.delete()
for me in bpy.data.meshes:
if me.name.startswith('structure'):
bpy.data.meshes.remove(me)
for mat in bpy.data.materials:
if mat.name.startswith('structure'):
bpy.data.materials.remove(mat)
bpy.ops.object.select_all(action='DESELECT')
for object in bpy.data.objects:
if object.type == 'LAMP':
object.select = True
bpy.ops.object.delete()
bpy.data.worlds['World'].horizon_color = (1, 1, 1)
bpy.ops.object.lamp_add(type='POINT')
l = bpy.context.object
l.name = 'structure.Lamp1'
l.location = (5, -5, 10)
bpy.ops.object.lamp_add(type='HEMI')
l = bpy.context.object
l.name = 'structure.LampHemi'
l.location = (-10, -10, 10)
l.data.energy = 0.5000
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 6, diameter1 = 0.0500, diameter2 = 0.0500, depth = 4.7931)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 2.3966))
mesh = bpy.data.meshes.new('structure.meshXAxis_cylinder')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.XAxis_cylinder', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ -0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.0000, 0.0000, 0.0000)
bpy.context.scene.objects.link(ob1)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 24, diameter1 = 0.2000, diameter2 = 0.0100, depth = 0.5000)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 5.0431))
mesh = bpy.data.meshes.new('structure.meshXAxis_cone')
bm.to_mesh(mesh)
ob2 = bpy.data.objects.new('structure.XAxis_cone', mesh)
ob2.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob2.data.transform([[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ -0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob2.location = ( 0.0000, 0.0000, 0.0000)
bpy.context.scene.objects.link(ob2)
bpy.ops.object.select_all(action='DESELECT')
ob1.select = True
ob2.select = True
bpy.context.scene.objects.active = ob1
bpy.ops.object.join()
mat = bpy.data.materials.new('structure.material.XAxis')
mat.diffuse_color = (0, 0, 0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 6, diameter1 = 0.0500, diameter2 = 0.0500, depth = 5.3384)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 2.6692))
mesh = bpy.data.meshes.new('structure.meshYAxis_cylinder')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.YAxis_cylinder', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ -1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.0000, 0.0000, 0.0000)
bpy.context.scene.objects.link(ob1)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 24, diameter1 = 0.2000, diameter2 = 0.0100, depth = 0.5000)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 5.5884))
mesh = bpy.data.meshes.new('structure.meshYAxis_cone')
bm.to_mesh(mesh)
ob2 = bpy.data.objects.new('structure.YAxis_cone', mesh)
ob2.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob2.data.transform([[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ -1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob2.location = ( 0.0000, 0.0000, 0.0000)
bpy.context.scene.objects.link(ob2)
bpy.ops.object.select_all(action='DESELECT')
ob1.select = True
ob2.select = True
bpy.context.scene.objects.active = ob1
bpy.ops.object.join()
mat = bpy.data.materials.new('structure.material.YAxis')
mat.diffuse_color = (0, 0, 0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 6, diameter1 = 0.0500, diameter2 = 0.0500, depth = 6.9025)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 3.4512))
mesh = bpy.data.meshes.new('structure.meshZAxis_cylinder')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.ZAxis_cylinder', mesh)
ob1.data.transform([[ 1.0000, 0.0000, -0.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.7071, 0.7071, 0.0000, 0.0000], \
[ -0.7071, 0.7071, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.0000, 0.0000, 0.0000)
bpy.context.scene.objects.link(ob1)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 24, diameter1 = 0.2000, diameter2 = 0.0100, depth = 0.5000)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 7.1525))
mesh = bpy.data.meshes.new('structure.meshZAxis_cone')
bm.to_mesh(mesh)
ob2 = bpy.data.objects.new('structure.ZAxis_cone', mesh)
ob2.data.transform([[ 1.0000, 0.0000, -0.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob2.data.transform([[ 0.7071, 0.7071, 0.0000, 0.0000], \
[ -0.7071, 0.7071, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob2.location = ( 0.0000, 0.0000, 0.0000)
bpy.context.scene.objects.link(ob2)
bpy.ops.object.select_all(action='DESELECT')
ob1.select = True
ob2.select = True
bpy.context.scene.objects.active = ob1
bpy.ops.object.join()
mat = bpy.data.materials.new('structure.material.ZAxis')
mat.diffuse_color = (0, 0, 0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bpy.ops.mesh.primitive_cube_add(location=(0,0,0))
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.delete(type='VERT')
bpy.ops.object.mode_set(mode='OBJECT')
posobject = bpy.context.object
posobject.name = 'structure.Positions'
bpy.ops.mesh.primitive_ico_sphere_add(location=(0,0,0), size=0.292400, subdivisions=3)
ob = bpy.context.object
me = ob.data
me.name = 'structure.mesh.Mn'
bpy.ops.object.delete()
mat = bpy.data.materials.new('structure.material.Mn')
mat.diffuse_color = (0.6118, 0.4784, 0.7804)
me.materials.append(mat)
bpy.ops.mesh.primitive_ico_sphere_add(location=(0,0,0), size=0.200000, subdivisions=3)
ob = bpy.context.object
me = ob.data
me.name = 'structure.mesh.O'
bpy.ops.object.delete()
mat = bpy.data.materials.new('structure.material.O')
mat.diffuse_color = (1.0, 0.051, 0.051)
me.materials.append(mat)
bpy.ops.mesh.primitive_ico_sphere_add(location=(0,0,0), size=0.402100, subdivisions=3)
ob = bpy.context.object
me = ob.data
me.name = 'structure.mesh.Tb'
bpy.ops.object.delete()
mat = bpy.data.materials.new('structure.material.Tb')
mat.diffuse_color = (0.1882, 1.0, 0.7804)
me.materials.append(mat)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-2.646570, 0.000000, 3.701250)
ob = bpy.data.objects.new( 'structure.Atom001(Mn1_3r_1l)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (-2.646570, 0.000000, 3.701250)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (7.939710, 5.838400, 0.000000)
ob = bpy.data.objects.new( 'structure.Atom002(Mn1_3r_3rf)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (7.939710, 5.838400, 0.000000)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (5.293140, 2.919200, 0.000000)
ob = bpy.data.objects.new( 'structure.Atom003(Mn1_3r_2r)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (5.293140, 2.919200, 0.000000)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-2.097142, 0.194419, 5.551875)
ob = bpy.data.objects.new( 'structure.Atom004(O1_1l)', bpy.data.meshes['structure.mesh.O'])
ob.location = (-2.097142, 0.194419, 5.551875)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-2.557116, 3.400284, 1.850625)
ob = bpy.data.objects.new( 'structure.Atom005(Tb1_3l)', bpy.data.meshes['structure.mesh.Tb'])
ob.location = (-2.557116, 3.400284, 1.850625)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (7.939710, 5.838400, 3.701250)
ob = bpy.data.objects.new( 'structure.Atom006(Mn1_3r_1rf)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (7.939710, 5.838400, 3.701250)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (3.195998, 6.032819, 5.551875)
ob = bpy.data.objects.new( 'structure.Atom007(O1_1f)', bpy.data.meshes['structure.mesh.O'])
ob.location = (3.195998, 6.032819, 5.551875)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (6.372411, 1.014714, 7.024972)
ob = bpy.data.objects.new( 'structure.Atom008(O2_6l_7r)', bpy.data.meshes['structure.mesh.O'])
ob.location = (6.372411, 1.014714, 7.024972)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (4.743712, 3.113619, 5.551875)
ob = bpy.data.objects.new( 'structure.Atom009(O1_2)', bpy.data.meshes['structure.mesh.O'])
ob.location = (4.743712, 3.113619, 5.551875)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (3.195998, 0.194419, 5.551875)
ob = bpy.data.objects.new( 'structure.Atom010(O1_1)', bpy.data.meshes['structure.mesh.O'])
ob.location = (3.195998, 0.194419, 5.551875)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (2.557116, 2.438116, 5.551875)
ob = bpy.data.objects.new( 'structure.Atom011(Tb1_5)', bpy.data.meshes['structure.mesh.Tb'])
ob.location = (2.557116, 2.438116, 5.551875)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (5.842568, 2.724781, 1.850625)
ob = bpy.data.objects.new( 'structure.Atom012(O1r)', bpy.data.meshes['structure.mesh.O'])
ob.location = (5.842568, 2.724781, 1.850625)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (1.567299, 3.933914, 4.078777)
ob = bpy.data.objects.new( 'structure.Atom013(O2_6l_4)', bpy.data.meshes['structure.mesh.O'])
ob.location = (1.567299, 3.933914, 4.078777)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (7.390282, -0.194419, 1.850625)
ob = bpy.data.objects.new( 'structure.Atom014(O1_3rb)', bpy.data.meshes['structure.mesh.O'])
ob.location = (7.390282, -0.194419, 1.850625)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-1.567299, 1.904486, 0.377527)
ob = bpy.data.objects.new( 'structure.Atom015(O2_6l_6l)', bpy.data.meshes['structure.mesh.O'])
ob.location = (-1.567299, 1.904486, 0.377527)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-0.549428, 3.113619, 5.551875)
ob = bpy.data.objects.new( 'structure.Atom016(O1_2l)', bpy.data.meshes['structure.mesh.O'])
ob.location = (-0.549428, 3.113619, 5.551875)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (3.725841, 1.904486, 0.377527)
ob = bpy.data.objects.new( 'structure.Atom017(O2_6l_6)', bpy.data.meshes['structure.mesh.O'])
ob.location = (3.725841, 1.904486, 0.377527)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (7.939710, 0.000000, 0.000000)
ob = bpy.data.objects.new( 'structure.Atom018(Mn1_3r_3r)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (7.939710, 0.000000, 0.000000)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (0.000000, 2.919200, 7.402500)
ob = bpy.data.objects.new( 'structure.Atom019(Mn1_3r_2u)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (0.000000, 2.919200, 7.402500)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (3.725841, 1.904486, 3.323722)
ob = bpy.data.objects.new( 'structure.Atom020(O2_6l)', bpy.data.meshes['structure.mesh.O'])
ob.location = (3.725841, 1.904486, 3.323722)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-1.079271, 4.823686, 0.377527)
ob = bpy.data.objects.new( 'structure.Atom021(O2_6l_3l)', bpy.data.meshes['structure.mesh.O'])
ob.location = (-1.079271, 4.823686, 0.377527)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (7.939710, 5.838400, 7.402500)
ob = bpy.data.objects.new( 'structure.Atom022(Mn1_3r_3rfu)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (7.939710, 5.838400, 7.402500)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (5.293140, 2.919200, 7.402500)
ob = bpy.data.objects.new( 'structure.Atom023(Mn1_3r_2ru)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (5.293140, 2.919200, 7.402500)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (2.736024, 3.400284, 1.850625)
ob = bpy.data.objects.new( 'structure.Atom024(Tb1_3)', bpy.data.meshes['structure.mesh.Tb'])
ob.location = (2.736024, 3.400284, 1.850625)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (2.097142, -0.194419, 1.850625)
ob = bpy.data.objects.new( 'structure.Atom025(O1_3b)', bpy.data.meshes['structure.mesh.O'])
ob.location = (2.097142, -0.194419, 1.850625)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (2.646570, 0.000000, 7.402500)
ob = bpy.data.objects.new( 'structure.Atom026(Mn1_3r_3u)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (2.646570, 0.000000, 7.402500)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (5.293140, 2.919200, 3.701250)
ob = bpy.data.objects.new( 'structure.Atom027(Mn1_3rr)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (5.293140, 2.919200, 3.701250)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-2.646570, 5.838400, 7.402500)
ob = bpy.data.objects.new( 'structure.Atom028(Mn1_3r_3lfu)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (-2.646570, 5.838400, 7.402500)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (7.939710, 0.000000, 7.402500)
ob = bpy.data.objects.new( 'structure.Atom029(Mn1_3r_3ru)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (7.939710, 0.000000, 7.402500)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-2.646570, 5.838400, 0.000000)
ob = bpy.data.objects.new( 'structure.Atom030(Mn1_3r_3lf)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (-2.646570, 5.838400, 0.000000)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (5.203686, 0.481084, 1.850625)
ob = bpy.data.objects.new( 'structure.Atom031(Tb1_4)', bpy.data.meshes['structure.mesh.Tb'])
ob.location = (5.203686, 0.481084, 1.850625)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (4.213869, 4.823686, 3.323722)
ob = bpy.data.objects.new( 'structure.Atom032(O2_6l_5)', bpy.data.meshes['structure.mesh.O'])
ob.location = (4.213869, 4.823686, 3.323722)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (0.089454, 5.357316, 5.551875)
ob = bpy.data.objects.new( 'structure.Atom033(Tb1_2)', bpy.data.meshes['structure.mesh.Tb'])
ob.location = (0.089454, 5.357316, 5.551875)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (1.567299, 3.933914, 7.024972)
ob = bpy.data.objects.new( 'structure.Atom034(O2_6l_2)', bpy.data.meshes['structure.mesh.O'])
ob.location = (1.567299, 3.933914, 7.024972)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (6.372411, 1.014714, 4.078777)
ob = bpy.data.objects.new( 'structure.Atom035(O2_6l_1r)', bpy.data.meshes['structure.mesh.O'])
ob.location = (6.372411, 1.014714, 4.078777)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (2.646570, 5.838400, 0.000000)
ob = bpy.data.objects.new( 'structure.Atom036(Mn1_3r_3f)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (2.646570, 5.838400, 0.000000)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-2.646570, 0.000000, 0.000000)
ob = bpy.data.objects.new( 'structure.Atom037(Mn1_3r_3l)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (-2.646570, 0.000000, 0.000000)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (4.213869, 4.823686, 0.377527)
ob = bpy.data.objects.new( 'structure.Atom038(O2_6l_3)', bpy.data.meshes['structure.mesh.O'])
ob.location = (4.213869, 4.823686, 0.377527)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (2.646570, 0.000000, 0.000000)
ob = bpy.data.objects.new( 'structure.Atom039(Mn1_3r_3)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (2.646570, 0.000000, 0.000000)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (7.390282, 5.643981, 1.850625)
ob = bpy.data.objects.new( 'structure.Atom040(O1_3r)', bpy.data.meshes['structure.mesh.O'])
ob.location = (7.390282, 5.643981, 1.850625)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (2.646570, 5.838400, 7.402500)
ob = bpy.data.objects.new( 'structure.Atom041(Mn1_3r_3fu)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (2.646570, 5.838400, 7.402500)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (0.549428, 2.724781, 1.850625)
ob = bpy.data.objects.new( 'structure.Atom042(O1)', bpy.data.meshes['structure.mesh.O'])
ob.location = (0.549428, 2.724781, 1.850625)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (2.646570, 0.000000, 3.701250)
ob = bpy.data.objects.new( 'structure.Atom043(Mn1_3r_1)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (2.646570, 0.000000, 3.701250)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-1.079271, 4.823686, 3.323722)
ob = bpy.data.objects.new( 'structure.Atom044(O2_6l_5l)', bpy.data.meshes['structure.mesh.O'])
ob.location = (-1.079271, 4.823686, 3.323722)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (7.850256, 2.438116, 5.551875)
ob = bpy.data.objects.new( 'structure.Atom045(Tb1_5r)', bpy.data.meshes['structure.mesh.Tb'])
ob.location = (7.850256, 2.438116, 5.551875)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (5.382594, 5.357316, 5.551875)
ob = bpy.data.objects.new( 'structure.Atom046(Tb1_2r)', bpy.data.meshes['structure.mesh.Tb'])
ob.location = (5.382594, 5.357316, 5.551875)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (7.939710, 0.000000, 3.701250)
ob = bpy.data.objects.new( 'structure.Atom047(Mn1_3r_1r)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (7.939710, 0.000000, 3.701250)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (2.097142, 5.643981, 1.850625)
ob = bpy.data.objects.new( 'structure.Atom048(O1_3)', bpy.data.meshes['structure.mesh.O'])
ob.location = (2.097142, 5.643981, 1.850625)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (0.000000, 2.919200, 0.000000)
ob = bpy.data.objects.new( 'structure.Atom049(Mn1_3r_2)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (0.000000, 2.919200, 0.000000)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-2.646570, 5.838400, 3.701250)
ob = bpy.data.objects.new( 'structure.Atom050(Mn1_3r_1lf)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (-2.646570, 5.838400, 3.701250)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (1.079271, 1.014714, 4.078777)
ob = bpy.data.objects.new( 'structure.Atom051(O2_6l_1)', bpy.data.meshes['structure.mesh.O'])
ob.location = (1.079271, 1.014714, 4.078777)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-2.646570, 0.000000, 7.402500)
ob = bpy.data.objects.new( 'structure.Atom052(Mn1_3r_3lu)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (-2.646570, 0.000000, 7.402500)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-2.097142, 6.032819, 5.551875)
ob = bpy.data.objects.new( 'structure.Atom053(O1_1lf)', bpy.data.meshes['structure.mesh.O'])
ob.location = (-2.097142, 6.032819, 5.551875)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (6.860439, 3.933914, 7.024972)
ob = bpy.data.objects.new( 'structure.Atom054(O2_6l_2r)', bpy.data.meshes['structure.mesh.O'])
ob.location = (6.860439, 3.933914, 7.024972)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (6.860439, 3.933914, 4.078777)
ob = bpy.data.objects.new( 'structure.Atom055(O2_6l_4r)', bpy.data.meshes['structure.mesh.O'])
ob.location = (6.860439, 3.933914, 4.078777)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-1.567299, 1.904486, 3.323722)
ob = bpy.data.objects.new( 'structure.Atom056(O2_6ll)', bpy.data.meshes['structure.mesh.O'])
ob.location = (-1.567299, 1.904486, 3.323722)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (1.079271, 1.014714, 7.024972)
ob = bpy.data.objects.new( 'structure.Atom057(O2_6l_7)', bpy.data.meshes['structure.mesh.O'])
ob.location = (1.079271, 1.014714, 7.024972)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (-0.089454, 0.481084, 1.850625)
ob = bpy.data.objects.new( 'structure.Atom058(Tb1_4l)', bpy.data.meshes['structure.mesh.Tb'])
ob.location = (-0.089454, 0.481084, 1.850625)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (0.000000, 2.919200, 3.701250)
ob = bpy.data.objects.new( 'structure.Atom059(Mn1_3r)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (0.000000, 2.919200, 3.701250)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
posobject.data.vertices.add(1)
posobject.data.vertices[-1].co = (2.646570, 5.838400, 3.701250)
ob = bpy.data.objects.new( 'structure.Atom060(Mn1_3r_1f)', bpy.data.meshes['structure.mesh.Mn'])
ob.location = (2.646570, 5.838400, 3.701250)
bpy.ops.object.shade_smooth()
bpy.context.scene.objects.link(ob)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond001')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond001', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, -0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond001')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond002')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond002', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 7.7429, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond002')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond003')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond003', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond003')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond004')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond004', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond004')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond005')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond005', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond005')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond006')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond006', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond006')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond007')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond007', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 7.7429, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond007')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond008')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond008', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond008')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond009')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond009', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond009')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond010')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond010', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond010')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond011')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond011', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond011')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond012')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond012', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, 0.7650, 0.0000, 0.0000], \
[ -0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond012')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond013')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond013', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond013')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond014')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond014', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 7.7429, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond014')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond015')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond015', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, 0.7900, 0.0000, 0.0000], \
[ -0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.5494, 2.7248, 1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond015')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond016')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond016', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond016')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond017')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond017', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond017')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond018')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond018', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, -0.3613, 0.0000, 0.0000], \
[ 0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.5494, 2.7248, 1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond018')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond019')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond019', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond019')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond020')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond020', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, -0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -0.5494, 3.1136, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond020')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond021')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond021', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, -1.9045, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond021')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond022')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond022', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, -1.9045, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond022')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond023')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond023', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, -0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.7437, 3.1136, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond023')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond024')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond024', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 7.7429, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond024')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond025')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond025', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond025')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond026')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond026', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond026')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond027')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond027', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond027')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond028')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond028', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond028')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond029')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond029', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond029')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond030')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond030', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond030')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond031')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond031', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond031')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond032')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond032', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 6.8531, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond032')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond033')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond033', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, -1.0147, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond033')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond034')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond034', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond034')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond035')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond035', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond035')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond036')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond036', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond036')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond037')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond037', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond037')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond038')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond038', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond038')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond039')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond039', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond039')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond040')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond040', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond040')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond041')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond041', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 1.9045, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond041')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond042')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond042', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond042')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond043')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond043', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond043')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond044')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond044', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, 3.9339, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond044')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond045')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond045', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 6.8531, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond045')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond046')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond046', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond046')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond047')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond047', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, 0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.7437, 3.1136, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond047')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond048')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond048', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 1.9045, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond048')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond049')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond049', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond049')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond050')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond050', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, 0.7650, 0.0000, 0.0000], \
[ -0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.5494, 2.7248, 1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond050')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond051')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond051', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond051')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond052')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond052', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond052')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond053')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond053', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond053')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond054')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond054', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, -0.9552, 0.0000, 0.0000], \
[ 0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.5494, 2.7248, 1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond054')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond055')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond055', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond055')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond056')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond056', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond056')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond057')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond057', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, -1.9045, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond057')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond058')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond058', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond058')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond059')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond059', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, -0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.7437, 3.1136, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond059')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond060')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond060', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, -1.9045, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond060')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond061')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond061', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond061')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond062')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond062', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond062')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond063')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond063', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond063')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond064')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond064', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond064')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond065')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond065', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, -1.0147, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond065')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond066')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond066', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, -1.9045, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond066')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond067')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond067', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond067')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond068')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond068', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond068')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond069')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond069', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 6.8531, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond069')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond070')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond070', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, 0.7900, 0.0000, 0.0000], \
[ -0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond070')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond071')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond071', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond071')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond072')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond072', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, -0.9552, 0.0000, 0.0000], \
[ 0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond072')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond073')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond073', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond073')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond074')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond074', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, -0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond074')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond075')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond075', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond075')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond076')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond076', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 7.7429, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond076')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond077')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond077', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond077')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond078')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond078', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond078')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond079')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond079', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond079')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond080')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond080', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, -1.0147, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond080')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond081')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond081', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, 0.7900, 0.0000, 0.0000], \
[ -0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond081')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond082')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond082', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond082')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond083')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond083', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond083')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond084')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond084', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond084')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond085')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond085', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, -1.0147, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond085')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond086')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond086', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 6.8531, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond086')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond087')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond087', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, -0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond087')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond088')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond088', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, -1.0147, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond088')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond089')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond089', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond089')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond090')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond090', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, -1.9045, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond090')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond091')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond091', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, -1.0147, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond091')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond092')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond092', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, -0.9552, 0.0000, 0.0000], \
[ 0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond092')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond093')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond093', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond093')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond094')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond094', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, -1.0147, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond094')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond095')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond095', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 1.9045, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond095')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond096')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond096', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond096')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond097')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond097', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond097')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond098')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond098', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, -1.0147, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond098')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond099')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond099', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, -1.0147, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond099')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond100')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond100', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 6.8531, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond100')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond101')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond101', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, -0.3613, 0.0000, 0.0000], \
[ 0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond101')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond102')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond102', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond102')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond103')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond103', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, -1.9045, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond103')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond104')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond104', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond104')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond105')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond105', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond105')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond106')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond106', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond106')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond107')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond107', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 6.8531, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond107')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond108')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond108', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond108')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond109')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond109', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond109')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond110')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond110', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, 0.7650, 0.0000, 0.0000], \
[ -0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond110')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond111')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond111', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, -0.9552, 0.0000, 0.0000], \
[ 0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond111')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond112')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond112', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, -1.0147, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond112')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond113')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond113', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond113')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond114')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond114', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond114')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond115')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond115', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond115')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond116')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond116', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 7.7429, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond116')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond117')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond117', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond117')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond118')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond118', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, 0.7650, 0.0000, 0.0000], \
[ -0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond118')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond119')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond119', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond119')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond120')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond120', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond120')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond121')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond121', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, -1.9045, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond121')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond122')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond122', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond122')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond123')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond123', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, 0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond123')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond124')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond124', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 6.8531, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond124')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond125')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond125', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, 0.7900, 0.0000, 0.0000], \
[ -0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.5494, 2.7248, 9.2531)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond125')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond126')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond126', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 1.0147, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond126')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond127')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond127', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, 0.7900, 0.0000, 0.0000], \
[ -0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 5.8426, 2.7248, 1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond127')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond128')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond128', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond128')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond129')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond129', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 1.0147, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond129')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond130')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond130', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond130')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond131')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond131', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, 0.7900, 0.0000, 0.0000], \
[ -0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 5.8426, 2.7248, 9.2531)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond131')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond132')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond132', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond132')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond133')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond133', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 6.8531, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond133')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond134')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond134', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond134')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond135')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond135', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond135')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond136')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond136', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond136')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond137')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond137', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond137')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond138')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond138', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 6.8531, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond138')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond139')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond139', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond139')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond140')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond140', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond140')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond141')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond141', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, 0.7900, 0.0000, 0.0000], \
[ -0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond141')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond142')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond142', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond142')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond143')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond143', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond143')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond144')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond144', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond144')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond145')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond145', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, -0.9552, 0.0000, 0.0000], \
[ 0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.5494, 2.7248, 9.2531)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond145')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond146')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond146', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond146')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond147')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond147', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond147')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond148')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond148', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond148')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond149')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond149', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond149')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond150')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond150', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond150')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond151')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond151', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 1.0147, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond151')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond152')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond152', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, -1.0147, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond152')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond153')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond153', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, 3.9339, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond153')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond154')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond154', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, -0.9552, 0.0000, 0.0000], \
[ 0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 5.8426, 2.7248, 1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond154')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond155')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond155', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, -0.3613, 0.0000, 0.0000], \
[ 0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 5.8426, 2.7248, 9.2531)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond155')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond156')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond156', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 7.7429, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond156')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond157')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond157', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, -0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -0.5494, 3.1136, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond157')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond158')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond158', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond158')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond159')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond159', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, 4.8237, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond159')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond160')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond160', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond160')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond161')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond161', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 7.7429, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond161')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond162')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond162', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, 4.8237, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond162')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond163')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond163', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond163')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond164')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond164', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 1.9045, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond164')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond165')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond165', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 7.7429, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond165')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond166')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond166', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 6.8531, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond166')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond167')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond167', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, -0.3613, 0.0000, 0.0000], \
[ 0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond167')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond168')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond168', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, -1.9045, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond168')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond169')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond169', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, -1.0147, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond169')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond170')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond170', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond170')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond171')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond171', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond171')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond172')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond172', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond172')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond173')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond173', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 6.8531, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond173')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond174')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond174', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 7.7429, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond174')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond175')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond175', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 6.8531, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond175')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond176')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond176', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 1.9045, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond176')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond177')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond177', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, 0.7650, 0.0000, 0.0000], \
[ -0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 5.8426, 2.7248, 1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond177')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond178')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond178', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond178')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond179')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond179', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 7.7429, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond179')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond180')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond180', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond180')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond181')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond181', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond181')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond182')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond182', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond182')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond183')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond183', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, 3.9339, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond183')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond184')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond184', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond184')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond185')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond185', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond185')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond186')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond186', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond186')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond187')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond187', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, -0.3613, 0.0000, 0.0000], \
[ 0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond187')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond188')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond188', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, 4.8237, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond188')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond189')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond189', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond189')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond190')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond190', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond190')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond191')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond191', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 6.8531, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond191')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond192')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond192', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond192')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond193')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond193', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, -1.9045, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond193')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond194')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond194', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, 4.8237, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond194')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond195')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond195', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, -1.0147, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond195')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond196')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond196', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond196')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond197')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond197', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 7.7429, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond197')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond198')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond198', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond198')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond199')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond199', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond199')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond200')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond200', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond200')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond201')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond201', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, 0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -0.5494, 3.1136, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond201')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond202')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond202', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond202')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond203')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond203', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond203')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond204')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond204', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, 4.8237, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond204')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond205')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond205', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 1.0147, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond205')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond206')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond206', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond206')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond207')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond207', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, -1.9045, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond207')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond208')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond208', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, 0.7650, 0.0000, 0.0000], \
[ -0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond208')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond209')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond209', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 6.8531, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond209')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond210')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond210', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 1.9045, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond210')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond211')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond211', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, 3.9339, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond211')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond212')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond212', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond212')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond213')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond213', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 7.7429, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond213')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond214')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond214', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond214')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond215')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond215', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond215')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond216')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond216', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 7.7429, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond216')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond217')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond217', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, -0.3613, 0.0000, 0.0000], \
[ 0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 5.8426, 2.7248, 1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond217')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond218')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond218', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond218')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond219')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond219', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, 0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond219')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond220')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond220', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, 0.7650, 0.0000, 0.0000], \
[ -0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 5.8426, 2.7248, 9.2531)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond220')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond221')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond221', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, 0.7650, 0.0000, 0.0000], \
[ -0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.5494, 2.7248, 9.2531)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond221')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond222')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond222', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, -1.9045, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond222')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond223')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond223', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond223')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond224')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond224', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 7.7429, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond224')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond225')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond225', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond225')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond226')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond226', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, -1.9045, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond226')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond227')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond227', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond227')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond228')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond228', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond228')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond229')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond229', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 7.7429, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond229')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond230')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond230', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 1.0147, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond230')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond231')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond231', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -4.2139, 1.0147, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond231')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond232')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond232', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond232')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond233')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond233', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, 4.8237, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond233')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond234')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond234', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, 0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond234')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond235')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond235', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, -0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond235')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond236')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond236', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, -1.0147, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond236')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond237')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond237', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 6.8531, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond237')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond238')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond238', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond238')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond239')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond239', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, 3.9339, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond239')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond240')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond240', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond240')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond241')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond241', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, 0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond241')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond242')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond242', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, 0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -0.5494, 3.1136, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond242')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond243')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond243', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, -1.9045, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond243')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond244')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond244', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond244')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond245')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond245', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond245')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond246')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond246', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond246')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond247')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond247', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond247')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond248')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond248', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond248')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond249')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond249', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond249')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond250')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond250', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.8604, -1.9045, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond250')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond251')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond251', mesh)
ob1.data.transform([[ 0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, 0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond251')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond252')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond252', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 7.7429, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond252')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond253')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond253', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, -1.9045, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond253')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond254')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond254', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, 3.9339, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond254')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond255')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond255', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond255')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond256')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond256', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, 0.9552, 0.0000, 0.0000], \
[ -0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, 3.9339, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond256')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond257')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond257', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, 4.8237, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond257')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond258')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond258', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond258')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond259')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond259', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond259')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond260')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond260', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, 0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.7437, 3.1136, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond260')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond261')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond261', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond261')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond262')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond262', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond262')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond263')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond263', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, -1.9045, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond263')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond264')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond264', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -3.7258, -1.9045, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond264')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond265')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond265', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond265')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond266')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond266', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond266')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond267')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond267', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond267')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond268')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond268', mesh)
ob1.data.transform([[ -0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, -0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, 0.7900, 0.0000, 0.0000], \
[ -0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 1.0147, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond268')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond269')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond269', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 1.0147, 7.0250)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond269')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond270')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond270', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.0793, -1.0147, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond270')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond271')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond271', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.0793, 6.8531, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond271')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond272')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond272', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.0190, 7.7429, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond272')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond273')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond273', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, -0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, -1.0147, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond273')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond274')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond274', mesh)
ob1.data.transform([[ 0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, 0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond274')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond275')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond275', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, -0.9863, 0.0000, 0.0000], \
[ 0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 6.8531, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond275')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond276')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond276', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, -0.9552, 0.0000, 0.0000], \
[ 0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 4.2139, 4.8237, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond276')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond277')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond277', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, -1.0147, 3.3237)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond277')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond278')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond278', mesh)
ob1.data.transform([[ -0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, -0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9479, -0.3187, 0.0000, 0.0000], \
[ 0.3187, -0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -1.5673, 1.9045, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond278')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond279')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond279', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 8.4891, 6.0328, -1.8506)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond279')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond280')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond280', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, -0.3613, 0.0000, 0.0000], \
[ 0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 0.5494, 2.7248, 9.2531)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond280')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.9597)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4799))
mesh = bpy.data.meshes.new('structure.meshBond281')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond281', mesh)
ob1.data.transform([[ 0.0000, 0.0000, -1.0000, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 1.0000, 0.0000, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.1649, 0.9863, 0.0000, 0.0000], \
[ -0.9863, 0.1649, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.7258, 1.9045, 7.7800)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond281')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8583)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4291))
mesh = bpy.data.meshes.new('structure.meshBond282')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond282', mesh)
ob1.data.transform([[ -0.7795, 0.0000, -0.6263, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.6263, 0.0000, -0.7795, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.2960, -0.9552, 0.0000, 0.0000], \
[ 0.9552, 0.2960, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 5.8426, 2.7248, 9.2531)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond282')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond283')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond283', mesh)
ob1.data.transform([[ 0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, 0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, 0.3613, 0.0000, 0.0000], \
[ -0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 9.5070, -1.0147, 0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond283')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.8924)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.4462))
mesh = bpy.data.meshes.new('structure.meshBond284')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond284', mesh)
ob1.data.transform([[ 0.2610, 0.0000, -0.9653, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.9653, 0.0000, 0.2610, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.9479, 0.3187, 0.0000, 0.0000], \
[ -0.3187, 0.9479, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond284')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond285')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond285', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 6.3724, 6.8531, 4.0788)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond285')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7062)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3531))
mesh = bpy.data.meshes.new('structure.meshBond286')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond286', mesh)
ob1.data.transform([[ -0.5443, 0.0000, -0.8389, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8389, 0.0000, -0.5443, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.9324, -0.3613, 0.0000, 0.0000], \
[ 0.3613, -0.9324, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 1.5673, 3.9339, -0.3775)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond286')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 2.7318)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.3659))
mesh = bpy.data.meshes.new('structure.meshBond287')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond287', mesh)
ob1.data.transform([[ -0.8156, 0.0000, -0.5786, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.5786, 0.0000, -0.8156, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ 0.6440, -0.7650, 0.0000, 0.0000], \
[ 0.7650, 0.6440, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( -2.0971, 6.0328, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond287')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
bm = bmesh.new()
bmesh.ops.create_cone(bm, cap_ends = True, cap_tris = True, segments = 32, diameter1 = 0.0500, diameter2 = 0.0500, depth = 3.0378)
bmesh.ops.translate(bm, verts=bm.verts, vec = (0, 0, 1.5189))
mesh = bpy.data.meshes.new('structure.meshBond288')
bm.to_mesh(mesh)
ob1 = bpy.data.objects.new('structure.Bond288', mesh)
ob1.data.transform([[ 0.4849, 0.0000, -0.8746, 0.0000], \
[ 0.0000, 1.0000, 0.0000, 0.0000], \
[ 0.8746, 0.0000, 0.4849, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.data.transform([[ -0.6131, -0.7900, 0.0000, 0.0000], \
[ 0.7900, -0.6131, 0.0000, 0.0000], \
[ 0.0000, 0.0000, 1.0000, 0.0000], \
[ 0.0000, 0.0000, 0.0000, 1.0000]])
ob1.location = ( 3.1960, 0.1944, 5.5519)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Bond288')
mat.diffuse_color = (0.0, 0.0, 0.0)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace001')
mesh_data.from_pydata([( 6.8604, -1.9045, 7.0250), ( 9.5070, -1.0147, 7.7800), ( 8.4891, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face001', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face001')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace002')
mesh_data.from_pydata([( 1.0793, 6.8531, 7.0250), ( 1.5673, 3.9339, 7.0250), ( 3.1960, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face002', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face002')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace003')
mesh_data.from_pydata([( 9.0190, 7.7429, 0.3775), ( 6.3724, 6.8531, -0.3775), ( 7.3903, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face003', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face003')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace004')
mesh_data.from_pydata([( 3.7258, 1.9045, 7.7800), ( 1.0793, 1.0147, 7.0250), ( 2.0971, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face004', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face004')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace005')
mesh_data.from_pydata([( 1.5673, 3.9339, -0.3775), ( 4.2139, 4.8237, 0.3775), ( 3.1960, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face005', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face005')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace006')
mesh_data.from_pydata([( 6.8604, 3.9339, -0.3775), ( 9.5070, 4.8237, 0.3775), ( 7.3903, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face006', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face006')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace007')
mesh_data.from_pydata([( 6.8604, -1.9045, 7.0250), ( 9.5070, -1.0147, 7.7800), ( 7.3903, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face007', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face007')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace008')
mesh_data.from_pydata([( 1.5673, 3.9339, 7.0250), ( 4.2139, 4.8237, 7.7800), ( 2.0971, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face008', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face008')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace009')
mesh_data.from_pydata([( -1.5673, 1.9045, 3.3237), ( -4.2139, 1.0147, 4.0788), ( -3.1960, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face009', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face009')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace010')
mesh_data.from_pydata([( 6.3724, 6.8531, -0.3775), ( 6.8604, 3.9339, -0.3775), ( 8.4891, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face010', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face010')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace011')
mesh_data.from_pydata([( -1.5673, 1.9045, 7.7800), ( -4.2139, 1.0147, 7.0250), ( -3.1960, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face011', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face011')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace012')
mesh_data.from_pydata([( 9.0190, 7.7429, 7.7800), ( 6.3724, 6.8531, 7.0250), ( 8.4891, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face012', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face012')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace013')
mesh_data.from_pydata([( -1.0793, 4.8237, 7.7800), ( -1.5673, 7.7429, 7.7800), ( -3.1960, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face013', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face013')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace014')
mesh_data.from_pydata([( -1.0793, 4.8237, 0.3775), ( -1.5673, 7.7429, 0.3775), ( -2.0971, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face014', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face014')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace015')
mesh_data.from_pydata([( -3.7258, 3.9339, 4.0788), ( -1.0793, 4.8237, 3.3237), ( -2.0971, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face015', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face015')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace016')
mesh_data.from_pydata([( -1.5673, 1.9045, 3.3237), ( -1.0793, 4.8237, 3.3237), ( -0.5494, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face016', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face016')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace017')
mesh_data.from_pydata([( -3.7258, 3.9339, 7.0250), ( -1.0793, 4.8237, 7.7800), ( -3.1960, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face017', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face017')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace018')
mesh_data.from_pydata([( -4.2139, 1.0147, 4.0788), ( -3.7258, -1.9045, 4.0788), ( -2.0971, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face018', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face018')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace019')
mesh_data.from_pydata([( 6.8604, 3.9339, -0.3775), ( 6.3724, 1.0147, -0.3775), ( 5.8426, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face019', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face019')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace020')
mesh_data.from_pydata([( 1.5673, 3.9339, 4.0788), ( 1.0793, 1.0147, 4.0788), ( -0.5494, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face020', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face020')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace021')
mesh_data.from_pydata([( 6.8604, 3.9339, 4.0788), ( 9.5070, 4.8237, 3.3237), ( 8.4891, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face021', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face021')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace022')
mesh_data.from_pydata([( 1.5673, 3.9339, 4.0788), ( 1.0793, 1.0147, 4.0788), ( 0.5494, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face022', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face022')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace023')
mesh_data.from_pydata([( 4.2139, -1.0147, 7.7800), ( 3.7258, 1.9045, 7.7800), ( 2.0971, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face023', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face023')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace024')
mesh_data.from_pydata([( 4.2139, 4.8237, 3.3237), ( 6.8604, 3.9339, 4.0788), ( 5.8426, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face024', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face024')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace025')
mesh_data.from_pydata([( 4.2139, -1.0147, 0.3775), ( 3.7258, 1.9045, 0.3775), ( 2.0971, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face025', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face025')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace026')
mesh_data.from_pydata([( 1.0793, 6.8531, 4.0788), ( 1.5673, 3.9339, 4.0788), ( 2.0971, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face026', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face026')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace027')
mesh_data.from_pydata([( -1.5673, 1.9045, 7.7800), ( -4.2139, 1.0147, 7.0250), ( -2.0971, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face027', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face027')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace028')
mesh_data.from_pydata([( 3.7258, 1.9045, 0.3775), ( 1.0793, 1.0147, -0.3775), ( 2.0971, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face028', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face028')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace029')
mesh_data.from_pydata([( 1.0793, 1.0147, 4.0788), ( -1.5673, 1.9045, 3.3237), ( -0.5494, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face029', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face029')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace030')
mesh_data.from_pydata([( 1.0793, 6.8531, -0.3775), ( 1.5673, 3.9339, -0.3775), ( 2.0971, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face030', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face030')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace031')
mesh_data.from_pydata([( 9.5070, -1.0147, 3.3237), ( 9.0190, 1.9045, 3.3237), ( 8.4891, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face031', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face031')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace032')
mesh_data.from_pydata([( -3.7258, -1.9045, 7.0250), ( -1.0793, -1.0147, 7.7800), ( -2.0971, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face032', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face032')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace033')
mesh_data.from_pydata([( 1.0793, 1.0147, 4.0788), ( -1.5673, 1.9045, 3.3237), ( 0.5494, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face033', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face033')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace034')
mesh_data.from_pydata([( -1.5673, 1.9045, 0.3775), ( -4.2139, 1.0147, -0.3775), ( -3.1960, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face034', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face034')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace035')
mesh_data.from_pydata([( 1.5673, -1.9045, 7.0250), ( 4.2139, -1.0147, 7.7800), ( 2.0971, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face035', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face035')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace036')
mesh_data.from_pydata([( 6.3724, 6.8531, 4.0788), ( 6.8604, 3.9339, 4.0788), ( 8.4891, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face036', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face036')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace037')
mesh_data.from_pydata([( 4.2139, 4.8237, 0.3775), ( 6.8604, 3.9339, -0.3775), ( 4.7437, 3.1136, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face037', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face037')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace038')
mesh_data.from_pydata([( -1.0793, -1.0147, 3.3237), ( -1.5673, 1.9045, 3.3237), ( -2.0971, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face038', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face038')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace039')
mesh_data.from_pydata([( 6.3724, 6.8531, 7.0250), ( 6.8604, 3.9339, 7.0250), ( 7.3903, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face039', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face039')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace040')
mesh_data.from_pydata([( 3.7258, 1.9045, 7.7800), ( 4.2139, 4.8237, 7.7800), ( 5.8426, 2.7248, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face040', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face040')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace041')
mesh_data.from_pydata([( 4.2139, -1.0147, 0.3775), ( 3.7258, 1.9045, 0.3775), ( 3.1960, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face041', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face041')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace042')
mesh_data.from_pydata([( 4.2139, 4.8237, 3.3237), ( 3.7258, 7.7429, 3.3237), ( 3.1960, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face042', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face042')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace043')
mesh_data.from_pydata([( -3.7258, 3.9339, -0.3775), ( -1.0793, 4.8237, 0.3775), ( -3.1960, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face043', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face043')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace044')
mesh_data.from_pydata([( 4.2139, 4.8237, 3.3237), ( 6.8604, 3.9339, 4.0788), ( 4.7437, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face044', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face044')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace045')
mesh_data.from_pydata([( -1.0793, 4.8237, 7.7800), ( 1.5673, 3.9339, 7.0250), ( -0.5494, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face045', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face045')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace046')
mesh_data.from_pydata([( 6.3724, 1.0147, 4.0788), ( 6.8604, -1.9045, 4.0788), ( 8.4891, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face046', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face046')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace047')
mesh_data.from_pydata([( -1.5673, 1.9045, 3.3237), ( -4.2139, 1.0147, 4.0788), ( -2.0971, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face047', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face047')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace048')
mesh_data.from_pydata([( -1.0793, 4.8237, 3.3237), ( -1.5673, 7.7429, 3.3237), ( -2.0971, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face048', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face048')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace049')
mesh_data.from_pydata([( -1.5673, 1.9045, 7.7800), ( -1.0793, 4.8237, 7.7800), ( -0.5494, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face049', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face049')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace050')
mesh_data.from_pydata([( 9.0190, 7.7429, 3.3237), ( 6.3724, 6.8531, 4.0788), ( 7.3903, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face050', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face050')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace051')
mesh_data.from_pydata([( 6.8604, -1.9045, -0.3775), ( 9.5070, -1.0147, 0.3775), ( 7.3903, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face051', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face051')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace052')
mesh_data.from_pydata([( -3.7258, 3.9339, 7.0250), ( -1.0793, 4.8237, 7.7800), ( -2.0971, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face052', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face052')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace053')
mesh_data.from_pydata([( 9.5070, 4.8237, 0.3775), ( 9.0190, 7.7429, 0.3775), ( 7.3903, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face053', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face053')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace054')
mesh_data.from_pydata([( 1.5673, 3.9339, -0.3775), ( 1.0793, 1.0147, -0.3775), ( 0.5494, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face054', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face054')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace055')
mesh_data.from_pydata([( 9.5070, -1.0147, 0.3775), ( 9.0190, 1.9045, 0.3775), ( 8.4891, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face055', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face055')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace056')
mesh_data.from_pydata([( 9.5070, 4.8237, 7.7800), ( 9.0190, 7.7429, 7.7800), ( 7.3903, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face056', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face056')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace057')
mesh_data.from_pydata([( 6.3724, 1.0147, 7.0250), ( 3.7258, 1.9045, 7.7800), ( 4.7437, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face057', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face057')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace058')
mesh_data.from_pydata([( 6.3724, 6.8531, 4.0788), ( 6.8604, 3.9339, 4.0788), ( 7.3903, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face058', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face058')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace059')
mesh_data.from_pydata([( 9.5070, -1.0147, 0.3775), ( 9.0190, 1.9045, 0.3775), ( 7.3903, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face059', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face059')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace060')
mesh_data.from_pydata([( 3.7258, 1.9045, 7.7800), ( 4.2139, 4.8237, 7.7800), ( 4.7437, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face060', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face060')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace061')
mesh_data.from_pydata([( 6.8604, 3.9339, 7.0250), ( 6.3724, 1.0147, 7.0250), ( 4.7437, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face061', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face061')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace062')
mesh_data.from_pydata([( 9.5070, -1.0147, 7.7800), ( 9.0190, 1.9045, 7.7800), ( 8.4891, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face062', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face062')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace063')
mesh_data.from_pydata([( 4.2139, 4.8237, 7.7800), ( 6.8604, 3.9339, 7.0250), ( 4.7437, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face063', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face063')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace064')
mesh_data.from_pydata([( 4.2139, -1.0147, 7.7800), ( 3.7258, 1.9045, 7.7800), ( 3.1960, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face064', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face064')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace065')
mesh_data.from_pydata([( 9.5070, 4.8237, 3.3237), ( 9.0190, 7.7429, 3.3237), ( 7.3903, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face065', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face065')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace066')
mesh_data.from_pydata([( 9.5070, 4.8237, 3.3237), ( 9.0190, 7.7429, 3.3237), ( 8.4891, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face066', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face066')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace067')
mesh_data.from_pydata([( 6.8604, 3.9339, 7.0250), ( 9.5070, 4.8237, 7.7800), ( 8.4891, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face067', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face067')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace068')
mesh_data.from_pydata([( 9.0190, 1.9045, 0.3775), ( 6.3724, 1.0147, -0.3775), ( 8.4891, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face068', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face068')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace069')
mesh_data.from_pydata([( -3.7258, 3.9339, -0.3775), ( -1.0793, 4.8237, 0.3775), ( -2.0971, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face069', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face069')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace070')
mesh_data.from_pydata([( 6.8604, 3.9339, -0.3775), ( 6.3724, 1.0147, -0.3775), ( 4.7437, 3.1136, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face070', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face070')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace071')
mesh_data.from_pydata([( 3.7258, 1.9045, 0.3775), ( 1.0793, 1.0147, -0.3775), ( 3.1960, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face071', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face071')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace072')
mesh_data.from_pydata([( -4.2139, 6.8531, 4.0788), ( -3.7258, 3.9339, 4.0788), ( -3.1960, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face072', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face072')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace073')
mesh_data.from_pydata([( -4.2139, 6.8531, 7.0250), ( -3.7258, 3.9339, 7.0250), ( -2.0971, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face073', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face073')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace074')
mesh_data.from_pydata([( -1.5673, 1.9045, 3.3237), ( -1.0793, 4.8237, 3.3237), ( 0.5494, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face074', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face074')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace075')
mesh_data.from_pydata([( -3.7258, -1.9045, -0.3775), ( -1.0793, -1.0147, 0.3775), ( -2.0971, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face075', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face075')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace076')
mesh_data.from_pydata([( -1.5673, 1.9045, 7.7800), ( -1.0793, 4.8237, 7.7800), ( 0.5494, 2.7248, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face076', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face076')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace077')
mesh_data.from_pydata([( 3.7258, 7.7429, 3.3237), ( 1.0793, 6.8531, 4.0788), ( 3.1960, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face077', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face077')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace078')
mesh_data.from_pydata([( 9.0190, 7.7429, 3.3237), ( 6.3724, 6.8531, 4.0788), ( 8.4891, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face078', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face078')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace079')
mesh_data.from_pydata([( 9.5070, -1.0147, 3.3237), ( 9.0190, 1.9045, 3.3237), ( 7.3903, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face079', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face079')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace080')
mesh_data.from_pydata([( -1.0793, 4.8237, 7.7800), ( -1.5673, 7.7429, 7.7800), ( -2.0971, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face080', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face080')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace081')
mesh_data.from_pydata([( -3.7258, -1.9045, -0.3775), ( -1.0793, -1.0147, 0.3775), ( -3.1960, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face081', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face081')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace082')
mesh_data.from_pydata([( 1.0793, 1.0147, 7.0250), ( -1.5673, 1.9045, 7.7800), ( 0.5494, 2.7248, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face082', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face082')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace083')
mesh_data.from_pydata([( 1.0793, 1.0147, 4.0788), ( 1.5673, -1.9045, 4.0788), ( 3.1960, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face083', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face083')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace084')
mesh_data.from_pydata([( 4.2139, 4.8237, 0.3775), ( 3.7258, 7.7429, 0.3775), ( 3.1960, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face084', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face084')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace085')
mesh_data.from_pydata([( -3.7258, 3.9339, 4.0788), ( -1.0793, 4.8237, 3.3237), ( -3.1960, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face085', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face085')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace086')
mesh_data.from_pydata([( 3.7258, 1.9045, 0.3775), ( 4.2139, 4.8237, 0.3775), ( 5.8426, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face086', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face086')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace087')
mesh_data.from_pydata([( 1.0793, 1.0147, -0.3775), ( 1.5673, -1.9045, -0.3775), ( 3.1960, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face087', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face087')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace088')
mesh_data.from_pydata([( 3.7258, 1.9045, 3.3237), ( 1.0793, 1.0147, 4.0788), ( 3.1960, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face088', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face088')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace089')
mesh_data.from_pydata([( 1.5673, -1.9045, -0.3775), ( 4.2139, -1.0147, 0.3775), ( 2.0971, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face089', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face089')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace090')
mesh_data.from_pydata([( -1.5673, 1.9045, 0.3775), ( -1.0793, 4.8237, 0.3775), ( 0.5494, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face090', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face090')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace091')
mesh_data.from_pydata([( 3.7258, 1.9045, 0.3775), ( 4.2139, 4.8237, 0.3775), ( 4.7437, 3.1136, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face091', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face091')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace092')
mesh_data.from_pydata([( 6.8604, -1.9045, 4.0788), ( 9.5070, -1.0147, 3.3237), ( 8.4891, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face092', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face092')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace093')
mesh_data.from_pydata([( 9.0190, 7.7429, 0.3775), ( 6.3724, 6.8531, -0.3775), ( 8.4891, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face093', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face093')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace094')
mesh_data.from_pydata([( 6.3724, 1.0147, -0.3775), ( 6.8604, -1.9045, -0.3775), ( 8.4891, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face094', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face094')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace095')
mesh_data.from_pydata([( 4.2139, 4.8237, 7.7800), ( 3.7258, 7.7429, 7.7800), ( 2.0971, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face095', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face095')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace096')
mesh_data.from_pydata([( 6.3724, 1.0147, 4.0788), ( 3.7258, 1.9045, 3.3237), ( 5.8426, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face096', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face096')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace097')
mesh_data.from_pydata([( 6.8604, 3.9339, 7.0250), ( 9.5070, 4.8237, 7.7800), ( 7.3903, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face097', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face097')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace098')
mesh_data.from_pydata([( 3.7258, 7.7429, 0.3775), ( 1.0793, 6.8531, -0.3775), ( 2.0971, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face098', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face098')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace099')
mesh_data.from_pydata([( -3.7258, -1.9045, 7.0250), ( -1.0793, -1.0147, 7.7800), ( -3.1960, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face099', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face099')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace100')
mesh_data.from_pydata([( 3.7258, 1.9045, 3.3237), ( 4.2139, 4.8237, 3.3237), ( 4.7437, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face100', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face100')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace101')
mesh_data.from_pydata([( -1.0793, 4.8237, 3.3237), ( 1.5673, 3.9339, 4.0788), ( -0.5494, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face101', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face101')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace102')
mesh_data.from_pydata([( 6.8604, 3.9339, 4.0788), ( 9.5070, 4.8237, 3.3237), ( 7.3903, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face102', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face102')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace103')
mesh_data.from_pydata([( -1.0793, -1.0147, 7.7800), ( -1.5673, 1.9045, 7.7800), ( -2.0971, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face103', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face103')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace104')
mesh_data.from_pydata([( 4.2139, 4.8237, 7.7800), ( 6.8604, 3.9339, 7.0250), ( 5.8426, 2.7248, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face104', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face104')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace105')
mesh_data.from_pydata([( -1.0793, 4.8237, 7.7800), ( 1.5673, 3.9339, 7.0250), ( 0.5494, 2.7248, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face105', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face105')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace106')
mesh_data.from_pydata([( 9.5070, 4.8237, 0.3775), ( 9.0190, 7.7429, 0.3775), ( 8.4891, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face106', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face106')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace107')
mesh_data.from_pydata([( 6.8604, 3.9339, 4.0788), ( 6.3724, 1.0147, 4.0788), ( 4.7437, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face107', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face107')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace108')
mesh_data.from_pydata([( -1.5673, 7.7429, 7.7800), ( -4.2139, 6.8531, 7.0250), ( -2.0971, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face108', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face108')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace109')
mesh_data.from_pydata([( -4.2139, 6.8531, -0.3775), ( -3.7258, 3.9339, -0.3775), ( -2.0971, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face109', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face109')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace110')
mesh_data.from_pydata([( 9.5070, -1.0147, 7.7800), ( 9.0190, 1.9045, 7.7800), ( 7.3903, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face110', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face110')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace111')
mesh_data.from_pydata([( 1.5673, -1.9045, 4.0788), ( 4.2139, -1.0147, 3.3237), ( 2.0971, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face111', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face111')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace112')
mesh_data.from_pydata([( 6.8604, -1.9045, 4.0788), ( 9.5070, -1.0147, 3.3237), ( 7.3903, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face112', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face112')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace113')
mesh_data.from_pydata([( 6.3724, 1.0147, 4.0788), ( 3.7258, 1.9045, 3.3237), ( 4.7437, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face113', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face113')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace114')
mesh_data.from_pydata([( 3.7258, 7.7429, 7.7800), ( 1.0793, 6.8531, 7.0250), ( 3.1960, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face114', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face114')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace115')
mesh_data.from_pydata([( 4.2139, -1.0147, 3.3237), ( 3.7258, 1.9045, 3.3237), ( 2.0971, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face115', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face115')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace116')
mesh_data.from_pydata([( 1.5673, 3.9339, 7.0250), ( 1.0793, 1.0147, 7.0250), ( 0.5494, 2.7248, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face116', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face116')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace117')
mesh_data.from_pydata([( 6.3724, 1.0147, 7.0250), ( 6.8604, -1.9045, 7.0250), ( 8.4891, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face117', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face117')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace118')
mesh_data.from_pydata([( -4.2139, 6.8531, -0.3775), ( -3.7258, 3.9339, -0.3775), ( -3.1960, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face118', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face118')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace119')
mesh_data.from_pydata([( -1.5673, 7.7429, 3.3237), ( -4.2139, 6.8531, 4.0788), ( -2.0971, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face119', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face119')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace120')
mesh_data.from_pydata([( -3.7258, -1.9045, 4.0788), ( -1.0793, -1.0147, 3.3237), ( -3.1960, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face120', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face120')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace121')
mesh_data.from_pydata([( 1.0793, 1.0147, 7.0250), ( 1.5673, -1.9045, 7.0250), ( 3.1960, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face121', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face121')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace122')
mesh_data.from_pydata([( 3.7258, 7.7429, 0.3775), ( 1.0793, 6.8531, -0.3775), ( 3.1960, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face122', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face122')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace123')
mesh_data.from_pydata([( 1.0793, 1.0147, 4.0788), ( 1.5673, -1.9045, 4.0788), ( 2.0971, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face123', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face123')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace124')
mesh_data.from_pydata([( 1.5673, -1.9045, 4.0788), ( 4.2139, -1.0147, 3.3237), ( 3.1960, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face124', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face124')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace125')
mesh_data.from_pydata([( 3.7258, 1.9045, 3.3237), ( 4.2139, 4.8237, 3.3237), ( 5.8426, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face125', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face125')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace126')
mesh_data.from_pydata([( -1.5673, 7.7429, 7.7800), ( -4.2139, 6.8531, 7.0250), ( -3.1960, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face126', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face126')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace127')
mesh_data.from_pydata([( -1.0793, -1.0147, 7.7800), ( -1.5673, 1.9045, 7.7800), ( -3.1960, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face127', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face127')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace128')
mesh_data.from_pydata([( 4.2139, -1.0147, 3.3237), ( 3.7258, 1.9045, 3.3237), ( 3.1960, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face128', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face128')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace129')
mesh_data.from_pydata([( 1.5673, 3.9339, 7.0250), ( 1.0793, 1.0147, 7.0250), ( -0.5494, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face129', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face129')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace130')
mesh_data.from_pydata([( 1.0793, 6.8531, 7.0250), ( 1.5673, 3.9339, 7.0250), ( 2.0971, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face130', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face130')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace131')
mesh_data.from_pydata([( 1.0793, 1.0147, -0.3775), ( -1.5673, 1.9045, 0.3775), ( 0.5494, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face131', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face131')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace132')
mesh_data.from_pydata([( -4.2139, 1.0147, -0.3775), ( -3.7258, -1.9045, -0.3775), ( -2.0971, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face132', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face132')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace133')
mesh_data.from_pydata([( 6.8604, 3.9339, 4.0788), ( 6.3724, 1.0147, 4.0788), ( 5.8426, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face133', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face133')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace134')
mesh_data.from_pydata([( -1.5673, 7.7429, 3.3237), ( -4.2139, 6.8531, 4.0788), ( -3.1960, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face134', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face134')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace135')
mesh_data.from_pydata([( 6.3724, 1.0147, 4.0788), ( 6.8604, -1.9045, 4.0788), ( 7.3903, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face135', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face135')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace136')
mesh_data.from_pydata([( -4.2139, 1.0147, 7.0250), ( -3.7258, -1.9045, 7.0250), ( -2.0971, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face136', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face136')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace137')
mesh_data.from_pydata([( 4.2139, 4.8237, 0.3775), ( 3.7258, 7.7429, 0.3775), ( 2.0971, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face137', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face137')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace138')
mesh_data.from_pydata([( 4.2139, 4.8237, 0.3775), ( 6.8604, 3.9339, -0.3775), ( 5.8426, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face138', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face138')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace139')
mesh_data.from_pydata([( 1.5673, -1.9045, -0.3775), ( 4.2139, -1.0147, 0.3775), ( 3.1960, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face139', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face139')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace140')
mesh_data.from_pydata([( -1.5673, 7.7429, 0.3775), ( -4.2139, 6.8531, -0.3775), ( -2.0971, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face140', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face140')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace141')
mesh_data.from_pydata([( -4.2139, 1.0147, 4.0788), ( -3.7258, -1.9045, 4.0788), ( -3.1960, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face141', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face141')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace142')
mesh_data.from_pydata([( 9.0190, 1.9045, 0.3775), ( 6.3724, 1.0147, -0.3775), ( 7.3903, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face142', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face142')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace143')
mesh_data.from_pydata([( 9.0190, 1.9045, 3.3237), ( 6.3724, 1.0147, 4.0788), ( 7.3903, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face143', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face143')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace144')
mesh_data.from_pydata([( 6.3724, 6.8531, 7.0250), ( 6.8604, 3.9339, 7.0250), ( 8.4891, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face144', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face144')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace145')
mesh_data.from_pydata([( -1.0793, -1.0147, 0.3775), ( -1.5673, 1.9045, 0.3775), ( -2.0971, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face145', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face145')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace146')
mesh_data.from_pydata([( 4.2139, 4.8237, 7.7800), ( 3.7258, 7.7429, 7.7800), ( 3.1960, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face146', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face146')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace147')
mesh_data.from_pydata([( 1.5673, 3.9339, -0.3775), ( 1.0793, 1.0147, -0.3775), ( -0.5494, 3.1136, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face147', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face147')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace148')
mesh_data.from_pydata([( -4.2139, 6.8531, 4.0788), ( -3.7258, 3.9339, 4.0788), ( -2.0971, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face148', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face148')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace149')
mesh_data.from_pydata([( -1.0793, 4.8237, 3.3237), ( -1.5673, 7.7429, 3.3237), ( -3.1960, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face149', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face149')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace150')
mesh_data.from_pydata([( 9.0190, 7.7429, 7.7800), ( 6.3724, 6.8531, 7.0250), ( 7.3903, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face150', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face150')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace151')
mesh_data.from_pydata([( 9.5070, 4.8237, 7.7800), ( 9.0190, 7.7429, 7.7800), ( 8.4891, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face151', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face151')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace152')
mesh_data.from_pydata([( -1.0793, 4.8237, 0.3775), ( 1.5673, 3.9339, -0.3775), ( -0.5494, 3.1136, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face152', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face152')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace153')
mesh_data.from_pydata([( 6.3724, 1.0147, -0.3775), ( 3.7258, 1.9045, 0.3775), ( 4.7437, 3.1136, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face153', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face153')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace154')
mesh_data.from_pydata([( 1.5673, 3.9339, -0.3775), ( 4.2139, 4.8237, 0.3775), ( 2.0971, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face154', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face154')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace155')
mesh_data.from_pydata([( 3.7258, 7.7429, 3.3237), ( 1.0793, 6.8531, 4.0788), ( 2.0971, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face155', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face155')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace156')
mesh_data.from_pydata([( 6.3724, 1.0147, 7.0250), ( 3.7258, 1.9045, 7.7800), ( 5.8426, 2.7248, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face156', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face156')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace157')
mesh_data.from_pydata([( 1.0793, 1.0147, 7.0250), ( -1.5673, 1.9045, 7.7800), ( -0.5494, 3.1136, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face157', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face157')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace158')
mesh_data.from_pydata([( -1.0793, 4.8237, 0.3775), ( 1.5673, 3.9339, -0.3775), ( 0.5494, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face158', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face158')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace159')
mesh_data.from_pydata([( -1.5673, 7.7429, 0.3775), ( -4.2139, 6.8531, -0.3775), ( -3.1960, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face159', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face159')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace160')
mesh_data.from_pydata([( 1.0793, 6.8531, -0.3775), ( 1.5673, 3.9339, -0.3775), ( 3.1960, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face160', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face160')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace161')
mesh_data.from_pydata([( 1.5673, 3.9339, 4.0788), ( 4.2139, 4.8237, 3.3237), ( 3.1960, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face161', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face161')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace162')
mesh_data.from_pydata([( -1.0793, 4.8237, 3.3237), ( 1.5673, 3.9339, 4.0788), ( 0.5494, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face162', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face162')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace163')
mesh_data.from_pydata([( -4.2139, 6.8531, 7.0250), ( -3.7258, 3.9339, 7.0250), ( -3.1960, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face163', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face163')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace164')
mesh_data.from_pydata([( 3.7258, 7.7429, 7.7800), ( 1.0793, 6.8531, 7.0250), ( 2.0971, 5.6440, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face164', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face164')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace165')
mesh_data.from_pydata([( 6.3724, 1.0147, -0.3775), ( 6.8604, -1.9045, -0.3775), ( 7.3903, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face165', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face165')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace166')
mesh_data.from_pydata([( 6.3724, 1.0147, 7.0250), ( 6.8604, -1.9045, 7.0250), ( 7.3903, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face166', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face166')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace167')
mesh_data.from_pydata([( 6.3724, 6.8531, -0.3775), ( 6.8604, 3.9339, -0.3775), ( 7.3903, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face167', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face167')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace168')
mesh_data.from_pydata([( 3.7258, 1.9045, 3.3237), ( 1.0793, 1.0147, 4.0788), ( 2.0971, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face168', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face168')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace169')
mesh_data.from_pydata([( -1.5673, 1.9045, 0.3775), ( -1.0793, 4.8237, 0.3775), ( -0.5494, 3.1136, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face169', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face169')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace170')
mesh_data.from_pydata([( -4.2139, 1.0147, -0.3775), ( -3.7258, -1.9045, -0.3775), ( -3.1960, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face170', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face170')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace171')
mesh_data.from_pydata([( 1.5673, 3.9339, 4.0788), ( 4.2139, 4.8237, 3.3237), ( 2.0971, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face171', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face171')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace172')
mesh_data.from_pydata([( 1.0793, 1.0147, 7.0250), ( 1.5673, -1.9045, 7.0250), ( 2.0971, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face172', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face172')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace173')
mesh_data.from_pydata([( 3.7258, 1.9045, 7.7800), ( 1.0793, 1.0147, 7.0250), ( 3.1960, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face173', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face173')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace174')
mesh_data.from_pydata([( -3.7258, -1.9045, 4.0788), ( -1.0793, -1.0147, 3.3237), ( -2.0971, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face174', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face174')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace175')
mesh_data.from_pydata([( 9.0190, 1.9045, 7.7800), ( 6.3724, 1.0147, 7.0250), ( 7.3903, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face175', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face175')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace176')
mesh_data.from_pydata([( 6.8604, 3.9339, -0.3775), ( 9.5070, 4.8237, 0.3775), ( 8.4891, 6.0328, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face176', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face176')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace177')
mesh_data.from_pydata([( 1.0793, 1.0147, -0.3775), ( -1.5673, 1.9045, 0.3775), ( -0.5494, 3.1136, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face177', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face177')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace178')
mesh_data.from_pydata([( 4.2139, 4.8237, 3.3237), ( 3.7258, 7.7429, 3.3237), ( 2.0971, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face178', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face178')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace179')
mesh_data.from_pydata([( -1.0793, -1.0147, 0.3775), ( -1.5673, 1.9045, 0.3775), ( -3.1960, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face179', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face179')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace180')
mesh_data.from_pydata([( 1.0793, 6.8531, 4.0788), ( 1.5673, 3.9339, 4.0788), ( 3.1960, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face180', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face180')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace181')
mesh_data.from_pydata([( 9.0190, 1.9045, 7.7800), ( 6.3724, 1.0147, 7.0250), ( 8.4891, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face181', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face181')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace182')
mesh_data.from_pydata([( 6.8604, -1.9045, -0.3775), ( 9.5070, -1.0147, 0.3775), ( 8.4891, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face182', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face182')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace183')
mesh_data.from_pydata([( 9.0190, 1.9045, 3.3237), ( 6.3724, 1.0147, 4.0788), ( 8.4891, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face183', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face183')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace184')
mesh_data.from_pydata([( 1.0793, 1.0147, -0.3775), ( 1.5673, -1.9045, -0.3775), ( 2.0971, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face184', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face184')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace185')
mesh_data.from_pydata([( -1.0793, -1.0147, 3.3237), ( -1.5673, 1.9045, 3.3237), ( -3.1960, -0.1944, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face185', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face185')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace186')
mesh_data.from_pydata([( -4.2139, 1.0147, 7.0250), ( -3.7258, -1.9045, 7.0250), ( -3.1960, -0.1944, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face186', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face186')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace187')
mesh_data.from_pydata([( -1.5673, 1.9045, 0.3775), ( -4.2139, 1.0147, -0.3775), ( -2.0971, 0.1944, -1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face187', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face187')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace188')
mesh_data.from_pydata([( 1.5673, -1.9045, 7.0250), ( 4.2139, -1.0147, 7.7800), ( 3.1960, 0.1944, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face188', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face188')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace189')
mesh_data.from_pydata([( 1.5673, 3.9339, 7.0250), ( 4.2139, 4.8237, 7.7800), ( 3.1960, 6.0328, 5.5519)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face189', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face189')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace190')
mesh_data.from_pydata([( 6.8604, 3.9339, 7.0250), ( 6.3724, 1.0147, 7.0250), ( 5.8426, 2.7248, 9.2531)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face190', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face190')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace191')
mesh_data.from_pydata([( 6.3724, 1.0147, -0.3775), ( 3.7258, 1.9045, 0.3775), ( 5.8426, 2.7248, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face191', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face191')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
mesh_data = bpy.data.meshes.new('structure.meshFace192')
mesh_data.from_pydata([( -1.0793, 4.8237, 0.3775), ( -1.5673, 7.7429, 0.3775), ( -3.1960, 5.6440, 1.8506)], [], [(0, 1, 2)])
mesh_data.update()
ob1 = bpy.data.objects.new('structure.Face192', mesh_data)
bpy.context.scene.objects.link(ob1)
mat = bpy.data.materials.new('structure.material.Face192')
mat.use_transparency = True
mat.alpha = 0.7000
mat.diffuse_color = (0.8, 0.1, 0.4)
mat.specular_color = (0, 0, 0)
ob1.data.materials.append(mat)
for ob in bpy.data.objects:
if ob.name.startswith('structure.Atom'):
ob.select = True
else:
ob.select = False
bpy.ops.object.shade_smooth()
|
qifeigit/scikit-learn | refs/heads/master | sklearn/metrics/tests/test_pairwise.py | 105 | import numpy as np
from numpy import linalg
from scipy.sparse import dok_matrix, csr_matrix, issparse
from scipy.spatial.distance import cosine, cityblock, minkowski, wminkowski
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_true
from sklearn.externals.six import iteritems
from sklearn.metrics.pairwise import euclidean_distances
from sklearn.metrics.pairwise import manhattan_distances
from sklearn.metrics.pairwise import linear_kernel
from sklearn.metrics.pairwise import chi2_kernel, additive_chi2_kernel
from sklearn.metrics.pairwise import polynomial_kernel
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.metrics.pairwise import sigmoid_kernel
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics.pairwise import cosine_distances
from sklearn.metrics.pairwise import pairwise_distances
from sklearn.metrics.pairwise import pairwise_distances_argmin_min
from sklearn.metrics.pairwise import pairwise_distances_argmin
from sklearn.metrics.pairwise import pairwise_kernels
from sklearn.metrics.pairwise import PAIRWISE_KERNEL_FUNCTIONS
from sklearn.metrics.pairwise import PAIRWISE_DISTANCE_FUNCTIONS
from sklearn.metrics.pairwise import PAIRED_DISTANCES
from sklearn.metrics.pairwise import check_pairwise_arrays
from sklearn.metrics.pairwise import check_paired_arrays
from sklearn.metrics.pairwise import _parallel_pairwise
from sklearn.metrics.pairwise import paired_distances
from sklearn.metrics.pairwise import paired_euclidean_distances
from sklearn.metrics.pairwise import paired_manhattan_distances
from sklearn.preprocessing import normalize
def test_pairwise_distances():
# Test the pairwise_distance helper function.
rng = np.random.RandomState(0)
# Euclidean distance should be equivalent to calling the function.
X = rng.random_sample((5, 4))
S = pairwise_distances(X, metric="euclidean")
S2 = euclidean_distances(X)
assert_array_almost_equal(S, S2)
# Euclidean distance, with Y != X.
Y = rng.random_sample((2, 4))
S = pairwise_distances(X, Y, metric="euclidean")
S2 = euclidean_distances(X, Y)
assert_array_almost_equal(S, S2)
# Test with tuples as X and Y
X_tuples = tuple([tuple([v for v in row]) for row in X])
Y_tuples = tuple([tuple([v for v in row]) for row in Y])
S2 = pairwise_distances(X_tuples, Y_tuples, metric="euclidean")
assert_array_almost_equal(S, S2)
# "cityblock" uses sklearn metric, cityblock (function) is scipy.spatial.
S = pairwise_distances(X, metric="cityblock")
S2 = pairwise_distances(X, metric=cityblock)
assert_equal(S.shape[0], S.shape[1])
assert_equal(S.shape[0], X.shape[0])
assert_array_almost_equal(S, S2)
# The manhattan metric should be equivalent to cityblock.
S = pairwise_distances(X, Y, metric="manhattan")
S2 = pairwise_distances(X, Y, metric=cityblock)
assert_equal(S.shape[0], X.shape[0])
assert_equal(S.shape[1], Y.shape[0])
assert_array_almost_equal(S, S2)
# Low-level function for manhattan can divide in blocks to avoid
# using too much memory during the broadcasting
S3 = manhattan_distances(X, Y, size_threshold=10)
assert_array_almost_equal(S, S3)
# Test cosine as a string metric versus cosine callable
# "cosine" uses sklearn metric, cosine (function) is scipy.spatial
S = pairwise_distances(X, Y, metric="cosine")
S2 = pairwise_distances(X, Y, metric=cosine)
assert_equal(S.shape[0], X.shape[0])
assert_equal(S.shape[1], Y.shape[0])
assert_array_almost_equal(S, S2)
# Tests that precomputed metric returns pointer to, and not copy of, X.
S = np.dot(X, X.T)
S2 = pairwise_distances(S, metric="precomputed")
assert_true(S is S2)
# Test with sparse X and Y,
# currently only supported for Euclidean, L1 and cosine.
X_sparse = csr_matrix(X)
Y_sparse = csr_matrix(Y)
S = pairwise_distances(X_sparse, Y_sparse, metric="euclidean")
S2 = euclidean_distances(X_sparse, Y_sparse)
assert_array_almost_equal(S, S2)
S = pairwise_distances(X_sparse, Y_sparse, metric="cosine")
S2 = cosine_distances(X_sparse, Y_sparse)
assert_array_almost_equal(S, S2)
S = pairwise_distances(X_sparse, Y_sparse.tocsc(), metric="manhattan")
S2 = manhattan_distances(X_sparse.tobsr(), Y_sparse.tocoo())
assert_array_almost_equal(S, S2)
S2 = manhattan_distances(X, Y)
assert_array_almost_equal(S, S2)
# Test with scipy.spatial.distance metric, with a kwd
kwds = {"p": 2.0}
S = pairwise_distances(X, Y, metric="minkowski", **kwds)
S2 = pairwise_distances(X, Y, metric=minkowski, **kwds)
assert_array_almost_equal(S, S2)
# same with Y = None
kwds = {"p": 2.0}
S = pairwise_distances(X, metric="minkowski", **kwds)
S2 = pairwise_distances(X, metric=minkowski, **kwds)
assert_array_almost_equal(S, S2)
# Test that scipy distance metrics throw an error if sparse matrix given
assert_raises(TypeError, pairwise_distances, X_sparse, metric="minkowski")
assert_raises(TypeError, pairwise_distances, X, Y_sparse,
metric="minkowski")
# Test that a value error is raised if the metric is unkown
assert_raises(ValueError, pairwise_distances, X, Y, metric="blah")
def check_pairwise_parallel(func, metric, kwds):
rng = np.random.RandomState(0)
for make_data in (np.array, csr_matrix):
X = make_data(rng.random_sample((5, 4)))
Y = make_data(rng.random_sample((3, 4)))
try:
S = func(X, metric=metric, n_jobs=1, **kwds)
except (TypeError, ValueError) as exc:
# Not all metrics support sparse input
# ValueError may be triggered by bad callable
if make_data is csr_matrix:
assert_raises(type(exc), func, X, metric=metric,
n_jobs=2, **kwds)
continue
else:
raise
S2 = func(X, metric=metric, n_jobs=2, **kwds)
assert_array_almost_equal(S, S2)
S = func(X, Y, metric=metric, n_jobs=1, **kwds)
S2 = func(X, Y, metric=metric, n_jobs=2, **kwds)
assert_array_almost_equal(S, S2)
def test_pairwise_parallel():
wminkowski_kwds = {'w': np.arange(1, 5).astype('double'), 'p': 1}
metrics = [(pairwise_distances, 'euclidean', {}),
(pairwise_distances, wminkowski, wminkowski_kwds),
(pairwise_distances, 'wminkowski', wminkowski_kwds),
(pairwise_kernels, 'polynomial', {'degree': 1}),
(pairwise_kernels, callable_rbf_kernel, {'gamma': .1}),
]
for func, metric, kwds in metrics:
yield check_pairwise_parallel, func, metric, kwds
def test_pairwise_callable_nonstrict_metric():
# paired_distances should allow callable metric where metric(x, x) != 0
# Knowing that the callable is a strict metric would allow the diagonal to
# be left uncalculated and set to 0.
assert_equal(pairwise_distances([[1]], metric=lambda x, y: 5)[0, 0], 5)
def callable_rbf_kernel(x, y, **kwds):
# Callable version of pairwise.rbf_kernel.
K = rbf_kernel(np.atleast_2d(x), np.atleast_2d(y), **kwds)
return K
def test_pairwise_kernels():
# Test the pairwise_kernels helper function.
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((2, 4))
# Test with all metrics that should be in PAIRWISE_KERNEL_FUNCTIONS.
test_metrics = ["rbf", "sigmoid", "polynomial", "linear", "chi2",
"additive_chi2"]
for metric in test_metrics:
function = PAIRWISE_KERNEL_FUNCTIONS[metric]
# Test with Y=None
K1 = pairwise_kernels(X, metric=metric)
K2 = function(X)
assert_array_almost_equal(K1, K2)
# Test with Y=Y
K1 = pairwise_kernels(X, Y=Y, metric=metric)
K2 = function(X, Y=Y)
assert_array_almost_equal(K1, K2)
# Test with tuples as X and Y
X_tuples = tuple([tuple([v for v in row]) for row in X])
Y_tuples = tuple([tuple([v for v in row]) for row in Y])
K2 = pairwise_kernels(X_tuples, Y_tuples, metric=metric)
assert_array_almost_equal(K1, K2)
# Test with sparse X and Y
X_sparse = csr_matrix(X)
Y_sparse = csr_matrix(Y)
if metric in ["chi2", "additive_chi2"]:
# these don't support sparse matrices yet
assert_raises(ValueError, pairwise_kernels,
X_sparse, Y=Y_sparse, metric=metric)
continue
K1 = pairwise_kernels(X_sparse, Y=Y_sparse, metric=metric)
assert_array_almost_equal(K1, K2)
# Test with a callable function, with given keywords.
metric = callable_rbf_kernel
kwds = {}
kwds['gamma'] = 0.1
K1 = pairwise_kernels(X, Y=Y, metric=metric, **kwds)
K2 = rbf_kernel(X, Y=Y, **kwds)
assert_array_almost_equal(K1, K2)
# callable function, X=Y
K1 = pairwise_kernels(X, Y=X, metric=metric, **kwds)
K2 = rbf_kernel(X, Y=X, **kwds)
assert_array_almost_equal(K1, K2)
def test_pairwise_kernels_filter_param():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((2, 4))
K = rbf_kernel(X, Y, gamma=0.1)
params = {"gamma": 0.1, "blabla": ":)"}
K2 = pairwise_kernels(X, Y, metric="rbf", filter_params=True, **params)
assert_array_almost_equal(K, K2)
assert_raises(TypeError, pairwise_kernels, X, Y, "rbf", **params)
def test_paired_distances():
# Test the pairwise_distance helper function.
rng = np.random.RandomState(0)
# Euclidean distance should be equivalent to calling the function.
X = rng.random_sample((5, 4))
# Euclidean distance, with Y != X.
Y = rng.random_sample((5, 4))
for metric, func in iteritems(PAIRED_DISTANCES):
S = paired_distances(X, Y, metric=metric)
S2 = func(X, Y)
assert_array_almost_equal(S, S2)
S3 = func(csr_matrix(X), csr_matrix(Y))
assert_array_almost_equal(S, S3)
if metric in PAIRWISE_DISTANCE_FUNCTIONS:
# Check the the pairwise_distances implementation
# gives the same value
distances = PAIRWISE_DISTANCE_FUNCTIONS[metric](X, Y)
distances = np.diag(distances)
assert_array_almost_equal(distances, S)
# Check the callable implementation
S = paired_distances(X, Y, metric='manhattan')
S2 = paired_distances(X, Y, metric=lambda x, y: np.abs(x - y).sum(axis=0))
assert_array_almost_equal(S, S2)
# Test that a value error is raised when the lengths of X and Y should not
# differ
Y = rng.random_sample((3, 4))
assert_raises(ValueError, paired_distances, X, Y)
def test_pairwise_distances_argmin_min():
# Check pairwise minimum distances computation for any metric
X = [[0], [1]]
Y = [[-1], [2]]
Xsp = dok_matrix(X)
Ysp = csr_matrix(Y, dtype=np.float32)
# euclidean metric
D, E = pairwise_distances_argmin_min(X, Y, metric="euclidean")
D2 = pairwise_distances_argmin(X, Y, metric="euclidean")
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(D2, [0, 1])
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# sparse matrix case
Dsp, Esp = pairwise_distances_argmin_min(Xsp, Ysp, metric="euclidean")
assert_array_equal(Dsp, D)
assert_array_equal(Esp, E)
# We don't want np.matrix here
assert_equal(type(Dsp), np.ndarray)
assert_equal(type(Esp), np.ndarray)
# Non-euclidean sklearn metric
D, E = pairwise_distances_argmin_min(X, Y, metric="manhattan")
D2 = pairwise_distances_argmin(X, Y, metric="manhattan")
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(D2, [0, 1])
assert_array_almost_equal(E, [1., 1.])
D, E = pairwise_distances_argmin_min(Xsp, Ysp, metric="manhattan")
D2 = pairwise_distances_argmin(Xsp, Ysp, metric="manhattan")
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# Non-euclidean Scipy distance (callable)
D, E = pairwise_distances_argmin_min(X, Y, metric=minkowski,
metric_kwargs={"p": 2})
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# Non-euclidean Scipy distance (string)
D, E = pairwise_distances_argmin_min(X, Y, metric="minkowski",
metric_kwargs={"p": 2})
assert_array_almost_equal(D, [0, 1])
assert_array_almost_equal(E, [1., 1.])
# Compare with naive implementation
rng = np.random.RandomState(0)
X = rng.randn(97, 149)
Y = rng.randn(111, 149)
dist = pairwise_distances(X, Y, metric="manhattan")
dist_orig_ind = dist.argmin(axis=0)
dist_orig_val = dist[dist_orig_ind, range(len(dist_orig_ind))]
dist_chunked_ind, dist_chunked_val = pairwise_distances_argmin_min(
X, Y, axis=0, metric="manhattan", batch_size=50)
np.testing.assert_almost_equal(dist_orig_ind, dist_chunked_ind, decimal=7)
np.testing.assert_almost_equal(dist_orig_val, dist_chunked_val, decimal=7)
def test_euclidean_distances():
# Check the pairwise Euclidean distances computation
X = [[0]]
Y = [[1], [2]]
D = euclidean_distances(X, Y)
assert_array_almost_equal(D, [[1., 2.]])
X = csr_matrix(X)
Y = csr_matrix(Y)
D = euclidean_distances(X, Y)
assert_array_almost_equal(D, [[1., 2.]])
# Paired distances
def test_paired_euclidean_distances():
# Check the paired Euclidean distances computation
X = [[0], [0]]
Y = [[1], [2]]
D = paired_euclidean_distances(X, Y)
assert_array_almost_equal(D, [1., 2.])
def test_paired_manhattan_distances():
# Check the paired manhattan distances computation
X = [[0], [0]]
Y = [[1], [2]]
D = paired_manhattan_distances(X, Y)
assert_array_almost_equal(D, [1., 2.])
def test_chi_square_kernel():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((10, 4))
K_add = additive_chi2_kernel(X, Y)
gamma = 0.1
K = chi2_kernel(X, Y, gamma=gamma)
assert_equal(K.dtype, np.float)
for i, x in enumerate(X):
for j, y in enumerate(Y):
chi2 = -np.sum((x - y) ** 2 / (x + y))
chi2_exp = np.exp(gamma * chi2)
assert_almost_equal(K_add[i, j], chi2)
assert_almost_equal(K[i, j], chi2_exp)
# check diagonal is ones for data with itself
K = chi2_kernel(Y)
assert_array_equal(np.diag(K), 1)
# check off-diagonal is < 1 but > 0:
assert_true(np.all(K > 0))
assert_true(np.all(K - np.diag(np.diag(K)) < 1))
# check that float32 is preserved
X = rng.random_sample((5, 4)).astype(np.float32)
Y = rng.random_sample((10, 4)).astype(np.float32)
K = chi2_kernel(X, Y)
assert_equal(K.dtype, np.float32)
# check integer type gets converted,
# check that zeros are handled
X = rng.random_sample((10, 4)).astype(np.int32)
K = chi2_kernel(X, X)
assert_true(np.isfinite(K).all())
assert_equal(K.dtype, np.float)
# check that kernel of similar things is greater than dissimilar ones
X = [[.3, .7], [1., 0]]
Y = [[0, 1], [.9, .1]]
K = chi2_kernel(X, Y)
assert_greater(K[0, 0], K[0, 1])
assert_greater(K[1, 1], K[1, 0])
# test negative input
assert_raises(ValueError, chi2_kernel, [[0, -1]])
assert_raises(ValueError, chi2_kernel, [[0, -1]], [[-1, -1]])
assert_raises(ValueError, chi2_kernel, [[0, 1]], [[-1, -1]])
# different n_features in X and Y
assert_raises(ValueError, chi2_kernel, [[0, 1]], [[.2, .2, .6]])
# sparse matrices
assert_raises(ValueError, chi2_kernel, csr_matrix(X), csr_matrix(Y))
assert_raises(ValueError, additive_chi2_kernel,
csr_matrix(X), csr_matrix(Y))
def test_kernel_symmetry():
# Valid kernels should be symmetric
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
sigmoid_kernel, cosine_similarity):
K = kernel(X, X)
assert_array_almost_equal(K, K.T, 15)
def test_kernel_sparse():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
X_sparse = csr_matrix(X)
for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
sigmoid_kernel, cosine_similarity):
K = kernel(X, X)
K2 = kernel(X_sparse, X_sparse)
assert_array_almost_equal(K, K2)
def test_linear_kernel():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
K = linear_kernel(X, X)
# the diagonal elements of a linear kernel are their squared norm
assert_array_almost_equal(K.flat[::6], [linalg.norm(x) ** 2 for x in X])
def test_rbf_kernel():
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
K = rbf_kernel(X, X)
# the diagonal elements of a rbf kernel are 1
assert_array_almost_equal(K.flat[::6], np.ones(5))
def test_cosine_similarity_sparse_output():
# Test if cosine_similarity correctly produces sparse output.
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((3, 4))
Xcsr = csr_matrix(X)
Ycsr = csr_matrix(Y)
K1 = cosine_similarity(Xcsr, Ycsr, dense_output=False)
assert_true(issparse(K1))
K2 = pairwise_kernels(Xcsr, Y=Ycsr, metric="cosine")
assert_array_almost_equal(K1.todense(), K2)
def test_cosine_similarity():
# Test the cosine_similarity.
rng = np.random.RandomState(0)
X = rng.random_sample((5, 4))
Y = rng.random_sample((3, 4))
Xcsr = csr_matrix(X)
Ycsr = csr_matrix(Y)
for X_, Y_ in ((X, None), (X, Y),
(Xcsr, None), (Xcsr, Ycsr)):
# Test that the cosine is kernel is equal to a linear kernel when data
# has been previously normalized by L2-norm.
K1 = pairwise_kernels(X_, Y=Y_, metric="cosine")
X_ = normalize(X_)
if Y_ is not None:
Y_ = normalize(Y_)
K2 = pairwise_kernels(X_, Y=Y_, metric="linear")
assert_array_almost_equal(K1, K2)
def test_check_dense_matrices():
# Ensure that pairwise array check works for dense matrices.
# Check that if XB is None, XB is returned as reference to XA
XA = np.resize(np.arange(40), (5, 8))
XA_checked, XB_checked = check_pairwise_arrays(XA, None)
assert_true(XA_checked is XB_checked)
assert_array_equal(XA, XA_checked)
def test_check_XB_returned():
# Ensure that if XA and XB are given correctly, they return as equal.
# Check that if XB is not None, it is returned equal.
# Note that the second dimension of XB is the same as XA.
XA = np.resize(np.arange(40), (5, 8))
XB = np.resize(np.arange(32), (4, 8))
XA_checked, XB_checked = check_pairwise_arrays(XA, XB)
assert_array_equal(XA, XA_checked)
assert_array_equal(XB, XB_checked)
XB = np.resize(np.arange(40), (5, 8))
XA_checked, XB_checked = check_paired_arrays(XA, XB)
assert_array_equal(XA, XA_checked)
assert_array_equal(XB, XB_checked)
def test_check_different_dimensions():
# Ensure an error is raised if the dimensions are different.
XA = np.resize(np.arange(45), (5, 9))
XB = np.resize(np.arange(32), (4, 8))
assert_raises(ValueError, check_pairwise_arrays, XA, XB)
XB = np.resize(np.arange(4 * 9), (4, 9))
assert_raises(ValueError, check_paired_arrays, XA, XB)
def test_check_invalid_dimensions():
# Ensure an error is raised on 1D input arrays.
XA = np.arange(45)
XB = np.resize(np.arange(32), (4, 8))
assert_raises(ValueError, check_pairwise_arrays, XA, XB)
XA = np.resize(np.arange(45), (5, 9))
XB = np.arange(32)
assert_raises(ValueError, check_pairwise_arrays, XA, XB)
def test_check_sparse_arrays():
# Ensures that checks return valid sparse matrices.
rng = np.random.RandomState(0)
XA = rng.random_sample((5, 4))
XA_sparse = csr_matrix(XA)
XB = rng.random_sample((5, 4))
XB_sparse = csr_matrix(XB)
XA_checked, XB_checked = check_pairwise_arrays(XA_sparse, XB_sparse)
# compare their difference because testing csr matrices for
# equality with '==' does not work as expected.
assert_true(issparse(XA_checked))
assert_equal(abs(XA_sparse - XA_checked).sum(), 0)
assert_true(issparse(XB_checked))
assert_equal(abs(XB_sparse - XB_checked).sum(), 0)
XA_checked, XA_2_checked = check_pairwise_arrays(XA_sparse, XA_sparse)
assert_true(issparse(XA_checked))
assert_equal(abs(XA_sparse - XA_checked).sum(), 0)
assert_true(issparse(XA_2_checked))
assert_equal(abs(XA_2_checked - XA_checked).sum(), 0)
def tuplify(X):
# Turns a numpy matrix (any n-dimensional array) into tuples.
s = X.shape
if len(s) > 1:
# Tuplify each sub-array in the input.
return tuple(tuplify(row) for row in X)
else:
# Single dimension input, just return tuple of contents.
return tuple(r for r in X)
def test_check_tuple_input():
# Ensures that checks return valid tuples.
rng = np.random.RandomState(0)
XA = rng.random_sample((5, 4))
XA_tuples = tuplify(XA)
XB = rng.random_sample((5, 4))
XB_tuples = tuplify(XB)
XA_checked, XB_checked = check_pairwise_arrays(XA_tuples, XB_tuples)
assert_array_equal(XA_tuples, XA_checked)
assert_array_equal(XB_tuples, XB_checked)
def test_check_preserve_type():
# Ensures that type float32 is preserved.
XA = np.resize(np.arange(40), (5, 8)).astype(np.float32)
XB = np.resize(np.arange(40), (5, 8)).astype(np.float32)
XA_checked, XB_checked = check_pairwise_arrays(XA, None)
assert_equal(XA_checked.dtype, np.float32)
# both float32
XA_checked, XB_checked = check_pairwise_arrays(XA, XB)
assert_equal(XA_checked.dtype, np.float32)
assert_equal(XB_checked.dtype, np.float32)
# mismatched A
XA_checked, XB_checked = check_pairwise_arrays(XA.astype(np.float),
XB)
assert_equal(XA_checked.dtype, np.float)
assert_equal(XB_checked.dtype, np.float)
# mismatched B
XA_checked, XB_checked = check_pairwise_arrays(XA,
XB.astype(np.float))
assert_equal(XA_checked.dtype, np.float)
assert_equal(XB_checked.dtype, np.float)
|
lavish205/olympia | refs/heads/master | src/olympia/translations/tests/test_models.py | 1 | # -*- coding: utf-8 -*-
import re
import django
from django.conf import settings
from django.db import connections, reset_queries
from django.test import TransactionTestCase
from django.test.utils import override_settings
from django.utils import translation
from django.utils.functional import lazy
import jinja2
import multidb
import pytest
from mock import patch
from pyquery import PyQuery as pq
from olympia.amo.tests import BaseTestCase
from olympia.translations.models import (
LinkifiedTranslation, NoLinksNoMarkupTranslation, NoLinksTranslation,
PurifiedTranslation, Translation, TranslationSequence)
from olympia.translations.query import order_by_translation
from olympia.translations.tests.testapp.models import (
FancyModel, TranslatedModel, UntranslatedModel)
pytestmark = pytest.mark.django_db
def ids(qs):
return [o.id for o in qs]
class TranslationFixturelessTestCase(BaseTestCase):
"We want to be able to rollback stuff."
def test_whitespace(self):
t = Translation(localized_string=' khaaaaaan! ', id=999)
t.save()
assert 'khaaaaaan!' == t.localized_string
class TranslationSequenceTestCase(BaseTestCase):
"""
Make sure automatic translation sequence generation works
as expected.
"""
def test_empty_translations_seq(self):
"""Make sure we can handle an empty translation sequence table."""
TranslationSequence.objects.all().delete()
newtrans = Translation.new('abc', 'en-us')
newtrans.save()
assert newtrans.id > 0, (
'Empty translation table should still generate an ID.')
def test_single_translation_sequence(self):
"""Make sure we only ever have one translation sequence."""
TranslationSequence.objects.all().delete()
assert TranslationSequence.objects.count() == 0
for i in range(5):
newtrans = Translation.new(str(i), 'en-us')
newtrans.save()
assert TranslationSequence.objects.count() == 1
def test_translation_sequence_increases(self):
"""Make sure translation sequence increases monotonically."""
newtrans1 = Translation.new('abc', 'en-us')
newtrans1.save()
newtrans2 = Translation.new('def', 'de')
newtrans2.save()
assert newtrans2.pk > newtrans1.pk, (
'Translation sequence needs to keep increasing.')
class TranslationTestCase(BaseTestCase):
fixtures = ['testapp/test_models.json']
def setUp(self):
super(TranslationTestCase, self).setUp()
self.redirect_url = settings.REDIRECT_URL
self.redirect_secret_key = settings.REDIRECT_SECRET_KEY
settings.REDIRECT_URL = None
settings.REDIRECT_SECRET_KEY = 'sekrit'
translation.activate('en-US')
def tearDown(self):
settings.REDIRECT_URL = self.redirect_url
settings.REDIRECT_SECRET_KEY = self.redirect_secret_key
super(TranslationTestCase, self).tearDown()
def test_meta_translated_fields(self):
assert not hasattr(UntranslatedModel._meta, 'translated_fields')
assert set(TranslatedModel._meta.translated_fields) == (
set([TranslatedModel._meta.get_field('no_locale'),
TranslatedModel._meta.get_field('name'),
TranslatedModel._meta.get_field('description')]))
assert set(FancyModel._meta.translated_fields) == (
set([FancyModel._meta.get_field('purified'),
FancyModel._meta.get_field('linkified')]))
def test_fetch_translations(self):
"""Basic check of fetching translations in the current locale."""
o = TranslatedModel.objects.get(id=1)
self.trans_eq(o.name, 'some name', 'en-US')
self.trans_eq(o.description, 'some description', 'en-US')
def test_fetch_no_translations(self):
"""Make sure models with no translations aren't harmed."""
o = UntranslatedModel.objects.get(id=1)
assert o.number == 17
def test_fetch_translation_de_locale(self):
"""Check that locale fallbacks work."""
try:
translation.activate('de')
o = TranslatedModel.objects.get(id=1)
self.trans_eq(o.name, 'German!! (unst unst)', 'de')
self.trans_eq(o.description, 'some description', 'en-US')
finally:
translation.deactivate()
def test_create_translation(self):
o = TranslatedModel.objects.create(name='english name')
def get_model():
return TranslatedModel.objects.get(id=o.id)
self.trans_eq(o.name, 'english name', 'en-US')
assert o.description is None
# Make sure the translation id is stored on the model, not the autoid.
assert o.name.id == o.name_id
# Check that a different locale creates a new row with the same id.
translation.activate('de')
german = get_model()
self.trans_eq(o.name, 'english name', 'en-US')
german.name = u'Gemütlichkeit name'
german.description = u'clöüserw description'
german.save()
self.trans_eq(german.name, u'Gemütlichkeit name', 'de')
self.trans_eq(german.description, u'clöüserw description', 'de')
# ids should be the same, autoids are different.
assert o.name.id == german.name.id
assert o.name.autoid != german.name.autoid
# Check that de finds the right translation.
fresh_german = get_model()
self.trans_eq(fresh_german.name, u'Gemütlichkeit name', 'de')
self.trans_eq(fresh_german.description, u'clöüserw description', 'de')
# Check that en-US has the right translations.
translation.deactivate()
english = get_model()
self.trans_eq(english.name, 'english name', 'en-US')
english.debug = True
assert english.description is None
english.description = 'english description'
english.save()
fresh_english = get_model()
self.trans_eq(
fresh_english.description, 'english description', 'en-US')
assert fresh_english.description.id == fresh_german.description.id
def test_update_translation(self):
o = TranslatedModel.objects.get(id=1)
translation_id = o.name.autoid
o.name = 'new name'
o.save()
o = TranslatedModel.objects.get(id=1)
self.trans_eq(o.name, 'new name', 'en-US')
# Make sure it was an update, not an insert.
assert o.name.autoid == translation_id
def test_create_with_dict(self):
# Set translations with a dict.
strings = {'en-US': 'right language', 'de': 'wrong language'}
o = TranslatedModel.objects.create(name=strings)
# Make sure we get the English text since we're in en-US.
self.trans_eq(o.name, 'right language', 'en-US')
# Check that de was set.
translation.activate('de')
o = TranslatedModel.objects.get(id=o.id)
self.trans_eq(o.name, 'wrong language', 'de')
# We're in de scope, so we should see the de text.
de = TranslatedModel.objects.create(name=strings)
self.trans_eq(o.name, 'wrong language', 'de')
# Make sure en-US was still set.
translation.deactivate()
o = TranslatedModel.objects.get(id=de.id)
self.trans_eq(o.name, 'right language', 'en-US')
def test_update_with_dict(self):
def get_model():
return TranslatedModel.objects.get(id=1)
# There's existing en-US and de strings.
strings = {'de': None, 'fr': 'oui'}
# Don't try checking that the model's name value is en-US. It will be
# one of the other locales, but we don't know which one. You just set
# the name to a dict, deal with it.
m = get_model()
m.name = strings
m.save()
# en-US was not touched.
self.trans_eq(get_model().name, 'some name', 'en-US')
# de was updated to NULL, so it falls back to en-US.
translation.activate('de')
self.trans_eq(get_model().name, 'some name', 'en-US')
# fr was added.
translation.activate('fr')
self.trans_eq(get_model().name, 'oui', 'fr')
def test_dict_bad_locale(self):
m = TranslatedModel.objects.get(id=1)
m.name = {'de': 'oof', 'xxx': 'bam', 'es': 'si'}
m.save()
ts = Translation.objects.filter(id=m.name_id)
assert sorted(ts.values_list('locale', flat=True)) == (
['de', 'en-US', 'es'])
def test_sorting(self):
"""Test translation comparisons in Python code."""
b = Translation.new('bbbb', 'de')
a = Translation.new('aaaa', 'de')
c = Translation.new('cccc', 'de')
assert sorted([c, a, b]) == [a, b, c]
def test_sorting_en(self):
q = TranslatedModel.objects.all()
expected = [4, 1, 3]
assert ids(order_by_translation(q, 'name')) == expected
assert ids(order_by_translation(q, '-name')) == (
list(reversed(expected)))
def test_order_by_translations_query_uses_left_outer_join(self):
translation.activate('de')
qs = TranslatedModel.objects.all()
query = unicode(order_by_translation(qs, 'name').query)
# There should be 2 LEFT OUTER JOIN to find translations matching
# current language and fallback.
joins = re.findall('LEFT OUTER JOIN `translations`', query)
assert len(joins) == 2
def test_sorting_mixed(self):
translation.activate('de')
q = TranslatedModel.objects.all()
expected = [1, 4, 3]
assert ids(order_by_translation(q, 'name')) == expected
assert ids(order_by_translation(q, '-name')) == (
list(reversed(expected)))
def test_sorting_by_field(self):
field = TranslatedModel._meta.get_field('default_locale')
fallback = classmethod(lambda cls: field)
with patch.object(TranslatedModel, 'get_fallback',
fallback, create=True):
translation.activate('de')
qs = TranslatedModel.objects.all()
expected = [3, 1, 4]
assert ids(order_by_translation(qs, 'name')) == expected
assert ids(order_by_translation(qs, '-name')) == (
list(reversed(expected)))
def test_new_purified_field(self):
# This is not a full test of the html sanitizing. We expect the
# underlying bleach library to have full tests.
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m = FancyModel.objects.create(purified=s)
doc = pq(m.purified.localized_string_clean)
assert doc('a[href="http://xxx.com"][rel="nofollow"]')[0].text == 'yay'
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
assert m.purified.localized_string == s
def test_new_linkified_field(self):
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m = FancyModel.objects.create(linkified=s)
doc = pq(m.linkified.localized_string_clean)
assert doc('a[href="http://xxx.com"][rel="nofollow"]')[0].text == 'yay'
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
assert not doc('i')
assert '<i>' in m.linkified.localized_string_clean
assert m.linkified.localized_string == s
def test_update_purified_field(self):
m = FancyModel.objects.get(id=1)
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m.purified = s
m.save()
doc = pq(m.purified.localized_string_clean)
assert doc('a[href="http://xxx.com"][rel="nofollow"]')[0].text == 'yay'
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
assert m.purified.localized_string == s
def test_update_linkified_field(self):
m = FancyModel.objects.get(id=1)
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m.linkified = s
m.save()
doc = pq(m.linkified.localized_string_clean)
assert doc('a[href="http://xxx.com"][rel="nofollow"]')[0].text == 'yay'
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
assert '<i>' in m.linkified.localized_string_clean
assert m.linkified.localized_string == s
def test_purified_field_str(self):
m = FancyModel.objects.get(id=1)
stringified = u'%s' % m.purified
doc = pq(stringified)
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
assert doc('i')[0].text == 'x'
def test_linkified_field_str(self):
m = FancyModel.objects.get(id=1)
stringified = u'%s' % m.linkified
doc = pq(stringified)
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
assert not doc('i')
assert '<i>' in stringified
def test_purifed_linkified_fields_in_template(self):
m = FancyModel.objects.get(id=1)
env = jinja2.Environment()
t = env.from_string('{{ m.purified }}=={{ m.linkified }}')
s = t.render({'m': m})
assert s == u'%s==%s' % (m.purified.localized_string_clean,
m.linkified.localized_string_clean)
def test_outgoing_url(self):
"""
Make sure linkified field is properly bounced off our outgoing URL
redirector.
"""
s = 'I like http://example.org/awesomepage.html .'
with self.settings(REDIRECT_URL='http://example.com/'):
m = FancyModel.objects.create(linkified=s)
"""
assert m.linkified.localized_string_clean == (
'I like <a rel="nofollow" href="http://example.com/'
'40979175e3ef6d7a9081085f3b99f2f05447b22ba790130517dd62b7ee59ef94/'
'http%3A//example.org/'
'awesomepage.html">http://example.org/awesomepage'
'.html</a> .')
"""
doc = pq(m.linkified.localized_string_clean)
link = doc('a')[0]
assert link.attrib['href'] == (
"http://example.com/40979175e3ef6d7a9081085f3b99f2f05447b22ba7"
"90130517dd62b7ee59ef94/http%3A//example.org/awesomepage.html")
assert link.attrib['rel'] == "nofollow"
assert link.text == "http://example.org/awesomepage.html"
assert m.linkified.localized_string == s
def test_require_locale(self):
obj = TranslatedModel.objects.get(id=1)
assert unicode(obj.no_locale) == 'blammo'
assert obj.no_locale.locale == 'en-US'
# Switch the translation to a locale we wouldn't pick up by default.
obj.no_locale.locale = 'fr'
obj.no_locale.save()
obj = TranslatedModel.objects.get(id=1)
assert unicode(obj.no_locale) == 'blammo'
assert obj.no_locale.locale == 'fr'
def test_delete_set_null(self):
"""
Test that deleting a translation sets the corresponding FK to NULL,
if it was the only translation for this field.
"""
obj = TranslatedModel.objects.get(id=1)
trans_id = obj.description.id
assert Translation.objects.filter(id=trans_id).count() == 1
obj.description.delete()
obj = TranslatedModel.objects.get(id=1)
assert obj.description_id is None
assert obj.description is None
assert not Translation.objects.filter(id=trans_id).exists()
@patch.object(TranslatedModel, 'get_fallback', create=True)
def test_delete_keep_other_translations(self, get_fallback):
# To make sure both translations for the name are used, set the
# fallback to the second locale, which is 'de'.
get_fallback.return_value = 'de'
obj = TranslatedModel.objects.get(id=1)
orig_name_id = obj.name.id
assert obj.name.locale.lower() == 'en-us'
assert Translation.objects.filter(id=orig_name_id).count() == 2
obj.name.delete()
obj = TranslatedModel.objects.get(id=1)
assert Translation.objects.filter(id=orig_name_id).count() == 1
# We shouldn't have set name_id to None.
assert obj.name_id == orig_name_id
# We should find a Translation.
assert obj.name.id == orig_name_id
assert obj.name.locale == 'de'
class TranslationMultiDbTests(TransactionTestCase):
fixtures = ['testapp/test_models.json']
def setUp(self):
super(TranslationMultiDbTests, self).setUp()
translation.activate('en-US')
def tearDown(self):
self.cleanup_fake_connections()
super(TranslationMultiDbTests, self).tearDown()
def reset_queries(self):
# Django does a separate SQL query once per connection on MySQL, see
# https://code.djangoproject.com/ticket/16809 ; This pollutes the
# queries counts, so we initialize a connection cursor early ourselves
# before resetting queries to avoid this.
for con in django.db.connections:
connections[con].cursor()
reset_queries()
@property
def mocked_dbs(self):
return {
'default': settings.DATABASES['default'],
'slave-1': settings.DATABASES['default'].copy(),
'slave-2': settings.DATABASES['default'].copy(),
}
def cleanup_fake_connections(self):
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
for key in ('default', 'slave-1', 'slave-2'):
connections[key].close()
@override_settings(DEBUG=True)
def test_translations_queries(self):
# Make sure we are in a clean environnement.
self.reset_queries()
TranslatedModel.objects.get(pk=1)
assert len(connections['default'].queries) == 2
@override_settings(DEBUG=True)
@patch('multidb.get_slave', lambda: 'slave-2')
def test_translations_reading_from_multiple_db(self):
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
# Make sure we are in a clean environnement.
self.reset_queries()
TranslatedModel.objects.get(pk=1)
assert len(connections['default'].queries) == 0
assert len(connections['slave-1'].queries) == 0
assert len(connections['slave-2'].queries) == 2
@override_settings(DEBUG=True)
@patch('multidb.get_slave', lambda: 'slave-2')
@pytest.mark.xfail(reason='Needs django-queryset-transform patch to work')
def test_translations_reading_from_multiple_db_using(self):
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
# Make sure we are in a clean environnement.
self.reset_queries()
TranslatedModel.objects.using('slave-1').get(pk=1)
assert len(connections['default'].queries) == 0
assert len(connections['slave-1'].queries) == 2
assert len(connections['slave-2'].queries) == 0
@override_settings(DEBUG=True)
@patch('multidb.get_slave', lambda: 'slave-2')
def test_translations_reading_from_multiple_db_pinning(self):
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
# Make sure we are in a clean environnement.
self.reset_queries()
with multidb.pinning.use_master:
TranslatedModel.objects.get(pk=1)
assert len(connections['default'].queries) == 2
assert len(connections['slave-1'].queries) == 0
assert len(connections['slave-2'].queries) == 0
class PurifiedTranslationTest(BaseTestCase):
def test_output(self):
assert isinstance(PurifiedTranslation().__html__(), unicode)
def test_raw_text(self):
s = u' This is some text '
x = PurifiedTranslation(localized_string=s)
assert x.__html__() == 'This is some text'
def test_allowed_tags(self):
s = u'<b>bold text</b> or <code>code</code>'
x = PurifiedTranslation(localized_string=s)
assert x.__html__() == u'<b>bold text</b> or <code>code</code>'
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script>'
x = PurifiedTranslation(localized_string=s)
assert x.__html__() == '<script>some naughty xss</script>'
def test_internal_link(self):
s = u'<b>markup</b> <a href="http://addons.mozilla.org/foo">bar</a>'
x = PurifiedTranslation(localized_string=s)
doc = pq(x.__html__())
links = doc('a[href="http://addons.mozilla.org/foo"][rel="nofollow"]')
assert links[0].text == 'bar'
assert doc('b')[0].text == 'markup'
@patch('olympia.amo.urlresolvers.get_outgoing_url')
def test_external_link(self, get_outgoing_url_mock):
get_outgoing_url_mock.return_value = 'http://external.url'
s = u'<b>markup</b> <a href="http://example.com">bar</a>'
x = PurifiedTranslation(localized_string=s)
doc = pq(x.__html__())
links = doc('a[href="http://external.url"][rel="nofollow"]')
assert links[0].text == 'bar'
assert doc('b')[0].text == 'markup'
@patch('olympia.amo.urlresolvers.get_outgoing_url')
def test_external_text_link(self, get_outgoing_url_mock):
get_outgoing_url_mock.return_value = 'http://external.url'
s = u'<b>markup</b> http://example.com'
x = PurifiedTranslation(localized_string=s)
doc = pq(x.__html__())
links = doc('a[href="http://external.url"][rel="nofollow"]')
assert links[0].text == 'http://example.com'
assert doc('b')[0].text == 'markup'
class LinkifiedTranslationTest(BaseTestCase):
@patch('olympia.amo.urlresolvers.get_outgoing_url')
def test_allowed_tags(self, get_outgoing_url_mock):
get_outgoing_url_mock.return_value = 'http://external.url'
s = u'<a href="http://example.com">bar</a>'
x = LinkifiedTranslation(localized_string=s)
doc = pq(x.__html__())
links = doc('a[href="http://external.url"][rel="nofollow"]')
assert links[0].text == 'bar'
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script> <b>bold</b>'
x = LinkifiedTranslation(localized_string=s)
assert x.__html__() == (
'<script>some naughty xss</script> '
'<b>bold</b>')
class NoLinksTranslationTest(BaseTestCase):
def test_allowed_tags(self):
s = u'<b>bold text</b> or <code>code</code>'
x = NoLinksTranslation(localized_string=s)
assert x.__html__() == u'<b>bold text</b> or <code>code</code>'
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script>'
x = NoLinksTranslation(localized_string=s)
assert x.__html__() == '<script>some naughty xss</script>'
def test_links_stripped(self):
# Link with markup.
s = u'a <a href="http://example.com">link</a> with markup'
x = NoLinksTranslation(localized_string=s)
assert x.__html__() == u'a with markup'
# Text link.
s = u'a text http://example.com link'
x = NoLinksTranslation(localized_string=s)
assert x.__html__() == u'a text link'
# Text link, markup link, allowed tags, forbidden tags and bad markup.
s = (u'a <a href="http://example.com">link</a> with markup, a text '
u'http://example.com link, <b>with allowed tags</b>, '
u'<script>forbidden tags</script> and <http://bad.markup.com')
x = NoLinksTranslation(localized_string=s)
assert x.__html__() == (
u'a with markup, a text link, '
u'<b>with allowed tags</b>, '
u'<script>forbidden tags</script> and')
class NoLinksNoMarkupTranslationTest(BaseTestCase):
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script> <b>bold</b>'
x = NoLinksNoMarkupTranslation(localized_string=s)
assert x.__html__() == (
'<script>some naughty xss</script> '
'<b>bold</b>')
def test_links_stripped(self):
# Link with markup.
s = u'a <a href="http://example.com">link</a> with markup'
x = NoLinksNoMarkupTranslation(localized_string=s)
assert x.__html__() == u'a with markup'
# Text link.
s = u'a text http://example.com link'
x = NoLinksNoMarkupTranslation(localized_string=s)
assert x.__html__() == u'a text link'
# Text link, markup link, forbidden tags and bad markup.
s = (u'a <a href="http://example.com">link</a> with markup, a text '
u'http://example.com link, <b>with forbidden tags</b>, '
u'<script>forbidden tags</script> and <http://bad.markup.com')
x = NoLinksNoMarkupTranslation(localized_string=s)
assert x.__html__() == (
u'a with markup, a text link, '
u'<b>with forbidden tags</b>, '
u'<script>forbidden tags</script> and')
def test_translation_bool():
def t(s):
return Translation(localized_string=s)
assert bool(t('text')) is True
assert bool(t(' ')) is False
assert bool(t('')) is False
assert bool(t(None)) is False
def test_translation_unicode():
def t(s):
return Translation(localized_string=s)
assert unicode(t('hello')) == 'hello'
assert unicode(t(None)) == ''
def test_comparison_with_lazy():
x = Translation(localized_string='xxx')
lazy_u = lazy(lambda x: x, unicode)
x == lazy_u('xxx')
lazy_u('xxx') == x
|
ivan-fedorov/intellij-community | refs/heads/master | python/testData/inspections/PyTypeCheckerInspection/DictLiterals.py | 49 | def test():
xs = {'foo': 1, 'bar': 2}
for v in xs.values():
print(v + <warning descr="Expected type 'Number', got 'None' instead">None</warning>)
for k in xs.keys():
print(k + <warning descr="Expected type 'Union[str, unicode]', got 'None' instead">None</warning>)
for k in xs:
print(k + <warning descr="Expected type 'Union[str, unicode]', got 'None' instead">None</warning>)
|
Justin-Yuan/Image2Music-Generator | refs/heads/master | library/jython2.5.3/Lib/zlib.py | 82 | """
The functions in this module allow compression and decompression using the
zlib library, which is based on GNU zip.
adler32(string[, start]) -- Compute an Adler-32 checksum.
compress(string[, level]) -- Compress string, with compression level in 1-9.
compressobj([level]) -- Return a compressor object.
crc32(string[, start]) -- Compute a CRC-32 checksum.
decompress(string,[wbits],[bufsize]) -- Decompresses a compressed string.
decompressobj([wbits]) -- Return a decompressor object.
'wbits' is window buffer size.
Compressor objects support compress() and flush() methods; decompressor
objects support decompress() and flush().
"""
import array
import binascii
import jarray
from java.util.zip import Adler32, Deflater, Inflater, DataFormatException
from java.lang import Long, String
from cStringIO import StringIO
class error(Exception):
pass
DEFLATED = 8
MAX_WBITS = 15
DEF_MEM_LEVEL = 8
ZLIB_VERSION = "1.1.3"
Z_BEST_COMPRESSION = 9
Z_BEST_SPEED = 1
Z_FILTERED = 1
Z_HUFFMAN_ONLY = 2
Z_DEFAULT_COMPRESSION = -1
Z_DEFAULT_STRATEGY = 0
# Most options are removed because java does not support them
# Z_NO_FLUSH = 0
# Z_SYNC_FLUSH = 2
# Z_FULL_FLUSH = 3
Z_FINISH = 4
_valid_flush_modes = (Z_FINISH,)
def adler32(s, value=1):
if value != 1:
raise ValueError, "adler32 only support start value of 1"
checksum = Adler32()
checksum.update(String.getBytes(s, 'iso-8859-1'))
return Long(checksum.getValue()).intValue()
def crc32(string, value=0):
return binascii.crc32(string, value)
def compress(string, level=6):
if level < Z_BEST_SPEED or level > Z_BEST_COMPRESSION:
raise error, "Bad compression level"
deflater = Deflater(level, 0)
string = _to_input(string)
deflater.setInput(string, 0, len(string))
deflater.finish()
return _get_deflate_data(deflater)
def decompress(string, wbits=0, bufsize=16384):
inflater = Inflater(wbits < 0)
inflater.setInput(_to_input(string))
return _get_inflate_data(inflater)
class compressobj:
# all jython uses wbits for is deciding whether to skip the header if it's negative
def __init__(self, level=6, method=DEFLATED, wbits=MAX_WBITS,
memLevel=0, strategy=0):
if abs(wbits) > MAX_WBITS or abs(wbits) < 8:
raise ValueError, "Invalid initialization option"
self.deflater = Deflater(level, wbits < 0)
self.deflater.setStrategy(strategy)
if wbits < 0:
_get_deflate_data(self.deflater)
self._ended = False
def compress(self, string):
if self._ended:
raise error("compressobj may not be used after flush(Z_FINISH)")
string = _to_input(string)
self.deflater.setInput(string, 0, len(string))
return _get_deflate_data(self.deflater)
def flush(self, mode=Z_FINISH):
if self._ended:
raise error("compressobj may not be used after flush(Z_FINISH)")
if mode not in _valid_flush_modes:
raise ValueError, "Invalid flush option"
self.deflater.finish()
last = _get_deflate_data(self.deflater)
if mode == Z_FINISH:
self.deflater.end()
self._ended = True
return last
class decompressobj:
# all jython uses wbits for is deciding whether to skip the header if it's negative
def __init__(self, wbits=MAX_WBITS):
if abs(wbits) > MAX_WBITS or abs(wbits) < 8:
raise ValueError, "Invalid initialization option"
self.inflater = Inflater(wbits < 0)
self.unused_data = ""
self._ended = False
def decompress(self, string, max_length=0):
if self._ended:
raise error("decompressobj may not be used after flush()")
# unused_data is always "" until inflation is finished; then it is
# the unused bytes of the input;
# unconsumed_tail is whatever input was not used because max_length
# was exceeded before inflation finished.
# Thus, at most one of {unused_data, unconsumed_tail} may be non-empty.
self.unused_data = ""
self.unconsumed_tail = ""
if max_length < 0:
raise ValueError("max_length must be a positive integer")
string = _to_input(string)
self.inflater.setInput(string)
inflated = _get_inflate_data(self.inflater, max_length)
r = self.inflater.getRemaining()
if r:
if max_length:
self.unconsumed_tail = string[-r:]
else:
self.unused_data = string[-r:]
return inflated
def flush(self, length=None):
if self._ended:
raise error("decompressobj may not be used after flush()")
if length is None:
length = 0
elif length <= 0:
raise ValueError('length must be greater than zero')
last = _get_inflate_data(self.inflater, length)
self.inflater.end()
return last
def _to_input(string):
return string.tostring() if isinstance(string, array.array) else string
def _get_deflate_data(deflater):
buf = jarray.zeros(1024, 'b')
s = StringIO()
while not deflater.finished():
l = deflater.deflate(buf)
if l == 0:
break
s.write(String(buf, 0, 0, l))
s.seek(0)
return s.read()
def _get_inflate_data(inflater, max_length=0):
buf = jarray.zeros(1024, 'b')
s = StringIO()
total = 0
while not inflater.finished():
try:
if max_length:
l = inflater.inflate(buf, 0, min(1024, max_length - total))
else:
l = inflater.inflate(buf)
except DataFormatException, e:
raise error(str(e))
if l == 0:
break
total += l
s.write(String(buf, 0, 0, l))
if max_length and total == max_length:
break
s.seek(0)
return s.read()
|
JaviMerino/workload-automation | refs/heads/master | wlauto/utils/types.py | 2 | # Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Routines for doing various type conversions. These usually embody some higher-level
semantics than are present in standard Python types (e.g. ``boolean`` will convert the
string ``"false"`` to ``False``, where as non-empty strings are usually considered to be
``True``).
A lot of these are intened to stpecify type conversions declaratively in place like
``Parameter``'s ``kind`` argument. These are basically "hacks" around the fact that Python
is not the best language to use for configuration.
"""
import os
import re
import math
import shlex
from collections import defaultdict
from wlauto.utils.misc import isiterable, to_identifier
def identifier(text):
"""Converts text to a valid Python identifier by replacing all
whitespace and punctuation."""
return to_identifier(text)
def boolean(value):
"""
Returns bool represented by the value. This is different from
calling the builtin bool() in that it will interpret string representations.
e.g. boolean('0') and boolean('false') will both yield False.
"""
false_strings = ['', '0', 'n', 'no']
if isinstance(value, basestring):
value = value.lower()
if value in false_strings or 'false'.startswith(value):
return False
return bool(value)
def integer(value):
"""Handles conversions for string respresentations of binary, octal and hex."""
if isinstance(value, basestring):
return int(value, 0)
else:
return int(value)
def numeric(value):
"""
Returns the value as number (int if possible, or float otherwise), or
raises ``ValueError`` if the specified ``value`` does not have a straight
forward numeric conversion.
"""
if isinstance(value, int):
return value
try:
fvalue = float(value)
except ValueError:
raise ValueError('Not numeric: {}'.format(value))
if not math.isnan(fvalue) and not math.isinf(fvalue):
ivalue = int(fvalue)
if ivalue == fvalue: # yeah, yeah, I know. Whatever. This is best-effort.
return ivalue
return fvalue
def list_of_strs(value):
"""
Value must be iterable. All elements will be converted to strings.
"""
if not isiterable(value):
raise ValueError(value)
return map(str, value)
list_of_strings = list_of_strs
def list_of_ints(value):
"""
Value must be iterable. All elements will be converted to ``int``\ s.
"""
if not isiterable(value):
raise ValueError(value)
return map(int, value)
list_of_integers = list_of_ints
def list_of_numbers(value):
"""
Value must be iterable. All elements will be converted to numbers (either ``ints`` or
``float``\ s depending on the elements).
"""
if not isiterable(value):
raise ValueError(value)
return map(numeric, value)
def list_of_bools(value, interpret_strings=True):
"""
Value must be iterable. All elements will be converted to ``bool``\ s.
.. note:: By default, ``boolean()`` conversion function will be used, which means that
strings like ``"0"`` or ``"false"`` will be interpreted as ``False``. If this
is undesirable, set ``interpret_strings`` to ``False``.
"""
if not isiterable(value):
raise ValueError(value)
if interpret_strings:
return map(boolean, value)
else:
return map(bool, value)
def list_of(type_):
"""Generates a "list of" callable for the specified type. The callable
attempts to convert all elements in the passed value to the specifed
``type_``, raising ``ValueError`` on error."""
def __init__(self, values):
list.__init__(self, map(type_, values))
def append(self, value):
list.append(self, type_(value))
def extend(self, other):
list.extend(self, map(type_, other))
def __setitem__(self, idx, value):
list.__setitem__(self, idx, type_(value))
return type('list_of_{}s'.format(type_.__name__),
(list, ), {
"__init__": __init__,
"__setitem__": __setitem__,
"append": append,
"extend": extend,
})
def list_or_string(value):
"""
Converts the value into a list of strings. If the value is not iterable,
a one-element list with stringified value will be returned.
"""
if isinstance(value, basestring):
return [value]
else:
try:
return list(value)
except ValueError:
return [str(value)]
def list_or_caseless_string(value):
"""
Converts the value into a list of ``caseless_string``'s. If the value is not iterable
a one-element list with stringified value will be returned.
"""
if isinstance(value, basestring):
return [caseless_string(value)]
else:
try:
return map(caseless_string, value)
except ValueError:
return [caseless_string(value)]
def list_or(type_):
"""
Generator for "list or" types. These take either a single value or a list values
and return a list of the specfied ``type_`` performing the conversion on the value
(if a single value is specified) or each of the elemented of the specified list.
"""
list_type = list_of(type_)
class list_or_type(list_type):
def __init__(self, value):
# pylint: disable=non-parent-init-called,super-init-not-called
if isiterable(value):
list_type.__init__(self, value)
else:
list_type.__init__(self, [value])
return list_or_type
list_or_integer = list_or(integer)
list_or_number = list_or(numeric)
list_or_bool = list_or(boolean)
regex_type = type(re.compile(''))
def regex(value):
"""
Regular expression. If value is a string, it will be complied with no flags. If you
want to specify flags, value must be precompiled.
"""
if isinstance(value, regex_type):
return value
else:
return re.compile(value)
__counters = defaultdict(int)
def reset_counter(name=None):
__counters[name] = 0
def counter(name=None):
"""
An auto incremeting value (kind of like an AUTO INCREMENT field in SQL).
Optionally, the name of the counter to be used is specified (each counter
increments separately).
Counts start at 1, not 0.
"""
__counters[name] += 1
value = __counters[name]
return value
class caseless_string(str):
"""
Just like built-in Python string except case-insensitive on comparisons. However, the
case is preserved otherwise.
"""
def __eq__(self, other):
if isinstance(other, basestring):
other = other.lower()
return self.lower() == other
def __ne__(self, other):
return not self.__eq__(other)
def __cmp__(self, other):
if isinstance(basestring, other):
other = other.lower()
return cmp(self.lower(), other)
def format(self, *args, **kwargs):
return caseless_string(super(caseless_string, self).format(*args, **kwargs))
class arguments(list):
"""
Represents command line arguments to be passed to a program.
"""
def __init__(self, value=None):
if isiterable(value):
super(arguments, self).__init__(map(str, value))
elif isinstance(value, basestring):
posix = os.name != 'nt'
super(arguments, self).__init__(shlex.split(value, posix=posix))
elif value is None:
super(arguments, self).__init__()
else:
super(arguments, self).__init__([str(value)])
def append(self, value):
return super(arguments, self).append(str(value))
def extend(self, values):
return super(arguments, self).extend(map(str, values))
def __str__(self):
return ' '.join(self)
|
wenhuizhang/neutron | refs/heads/master | neutron/extensions/dhcpagentscheduler.py | 29 | # Copyright (c) 2013 OpenStack Foundation.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from neutron.api import extensions
from neutron.api.v2 import base
from neutron.api.v2 import resource
from neutron.common import constants
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.extensions import agent
from neutron import manager
from neutron import policy
from neutron import wsgi
DHCP_NET = 'dhcp-network'
DHCP_NETS = DHCP_NET + 's'
DHCP_AGENT = 'dhcp-agent'
DHCP_AGENTS = DHCP_AGENT + 's'
class NetworkSchedulerController(wsgi.Controller):
def index(self, request, **kwargs):
plugin = manager.NeutronManager.get_plugin()
policy.enforce(request.context,
"get_%s" % DHCP_NETS,
{})
return plugin.list_networks_on_dhcp_agent(
request.context, kwargs['agent_id'])
def create(self, request, body, **kwargs):
plugin = manager.NeutronManager.get_plugin()
policy.enforce(request.context,
"create_%s" % DHCP_NET,
{})
agent_id = kwargs['agent_id']
network_id = body['network_id']
result = plugin.add_network_to_dhcp_agent(request.context, agent_id,
network_id)
notify(request.context, 'dhcp_agent.network.add', network_id, agent_id)
return result
def delete(self, request, id, **kwargs):
plugin = manager.NeutronManager.get_plugin()
policy.enforce(request.context,
"delete_%s" % DHCP_NET,
{})
agent_id = kwargs['agent_id']
result = plugin.remove_network_from_dhcp_agent(request.context,
agent_id, id)
notify(request.context, 'dhcp_agent.network.remove', id, agent_id)
return result
class DhcpAgentsHostingNetworkController(wsgi.Controller):
def index(self, request, **kwargs):
plugin = manager.NeutronManager.get_plugin()
policy.enforce(request.context,
"get_%s" % DHCP_AGENTS,
{})
return plugin.list_dhcp_agents_hosting_network(
request.context, kwargs['network_id'])
class Dhcpagentscheduler(extensions.ExtensionDescriptor):
"""Extension class supporting dhcp agent scheduler.
"""
@classmethod
def get_name(cls):
return "DHCP Agent Scheduler"
@classmethod
def get_alias(cls):
return constants.DHCP_AGENT_SCHEDULER_EXT_ALIAS
@classmethod
def get_description(cls):
return "Schedule networks among dhcp agents"
@classmethod
def get_updated(cls):
return "2013-02-07T10:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
exts = []
parent = dict(member_name="agent",
collection_name="agents")
controller = resource.Resource(NetworkSchedulerController(),
base.FAULT_MAP)
exts.append(extensions.ResourceExtension(
DHCP_NETS, controller, parent))
parent = dict(member_name="network",
collection_name="networks")
controller = resource.Resource(DhcpAgentsHostingNetworkController(),
base.FAULT_MAP)
exts.append(extensions.ResourceExtension(
DHCP_AGENTS, controller, parent))
return exts
def get_extended_resources(self, version):
return {}
class InvalidDHCPAgent(agent.AgentNotFound):
message = _("Agent %(id)s is not a valid DHCP Agent or has been disabled")
class NetworkHostedByDHCPAgent(exceptions.Conflict):
message = _("The network %(network_id)s has been already hosted"
" by the DHCP Agent %(agent_id)s.")
class NetworkNotHostedByDhcpAgent(exceptions.Conflict):
message = _("The network %(network_id)s is not hosted"
" by the DHCP agent %(agent_id)s.")
class DhcpAgentSchedulerPluginBase(object):
"""REST API to operate the DHCP agent scheduler.
All of method must be in an admin context.
"""
@abc.abstractmethod
def add_network_to_dhcp_agent(self, context, id, network_id):
pass
@abc.abstractmethod
def remove_network_from_dhcp_agent(self, context, id, network_id):
pass
@abc.abstractmethod
def list_networks_on_dhcp_agent(self, context, id):
pass
@abc.abstractmethod
def list_dhcp_agents_hosting_network(self, context, network_id):
pass
def notify(context, action, network_id, agent_id):
info = {'id': agent_id, 'network_id': network_id}
notifier = n_rpc.get_notifier('network')
notifier.info(context, action, {'agent': info})
|
vmax-feihu/hue | refs/heads/master | desktop/core/ext-py/tablib-0.10.0/tablib/packages/yaml3/scanner.py | 235 |
# Scanner produces tokens of the following types:
# STREAM-START
# STREAM-END
# DIRECTIVE(name, value)
# DOCUMENT-START
# DOCUMENT-END
# BLOCK-SEQUENCE-START
# BLOCK-MAPPING-START
# BLOCK-END
# FLOW-SEQUENCE-START
# FLOW-MAPPING-START
# FLOW-SEQUENCE-END
# FLOW-MAPPING-END
# BLOCK-ENTRY
# FLOW-ENTRY
# KEY
# VALUE
# ALIAS(value)
# ANCHOR(value)
# TAG(value)
# SCALAR(value, plain, style)
#
# Read comments in the Scanner code for more details.
#
__all__ = ['Scanner', 'ScannerError']
from .error import MarkedYAMLError
from .tokens import *
class ScannerError(MarkedYAMLError):
pass
class SimpleKey:
# See below simple keys treatment.
def __init__(self, token_number, required, index, line, column, mark):
self.token_number = token_number
self.required = required
self.index = index
self.line = line
self.column = column
self.mark = mark
class Scanner:
def __init__(self):
"""Initialize the scanner."""
# It is assumed that Scanner and Reader will have a common descendant.
# Reader do the dirty work of checking for BOM and converting the
# input data to Unicode. It also adds NUL to the end.
#
# Reader supports the following methods
# self.peek(i=0) # peek the next i-th character
# self.prefix(l=1) # peek the next l characters
# self.forward(l=1) # read the next l characters and move the pointer.
# Had we reached the end of the stream?
self.done = False
# The number of unclosed '{' and '['. `flow_level == 0` means block
# context.
self.flow_level = 0
# List of processed tokens that are not yet emitted.
self.tokens = []
# Add the STREAM-START token.
self.fetch_stream_start()
# Number of tokens that were emitted through the `get_token` method.
self.tokens_taken = 0
# The current indentation level.
self.indent = -1
# Past indentation levels.
self.indents = []
# Variables related to simple keys treatment.
# A simple key is a key that is not denoted by the '?' indicator.
# Example of simple keys:
# ---
# block simple key: value
# ? not a simple key:
# : { flow simple key: value }
# We emit the KEY token before all keys, so when we find a potential
# simple key, we try to locate the corresponding ':' indicator.
# Simple keys should be limited to a single line and 1024 characters.
# Can a simple key start at the current position? A simple key may
# start:
# - at the beginning of the line, not counting indentation spaces
# (in block context),
# - after '{', '[', ',' (in the flow context),
# - after '?', ':', '-' (in the block context).
# In the block context, this flag also signifies if a block collection
# may start at the current position.
self.allow_simple_key = True
# Keep track of possible simple keys. This is a dictionary. The key
# is `flow_level`; there can be no more that one possible simple key
# for each level. The value is a SimpleKey record:
# (token_number, required, index, line, column, mark)
# A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
# '[', or '{' tokens.
self.possible_simple_keys = {}
# Public methods.
def check_token(self, *choices):
# Check if the next token is one of the given types.
while self.need_more_tokens():
self.fetch_more_tokens()
if self.tokens:
if not choices:
return True
for choice in choices:
if isinstance(self.tokens[0], choice):
return True
return False
def peek_token(self):
# Return the next token, but do not delete if from the queue.
while self.need_more_tokens():
self.fetch_more_tokens()
if self.tokens:
return self.tokens[0]
def get_token(self):
# Return the next token.
while self.need_more_tokens():
self.fetch_more_tokens()
if self.tokens:
self.tokens_taken += 1
return self.tokens.pop(0)
# Private methods.
def need_more_tokens(self):
if self.done:
return False
if not self.tokens:
return True
# The current token may be a potential simple key, so we
# need to look further.
self.stale_possible_simple_keys()
if self.next_possible_simple_key() == self.tokens_taken:
return True
def fetch_more_tokens(self):
# Eat whitespaces and comments until we reach the next token.
self.scan_to_next_token()
# Remove obsolete possible simple keys.
self.stale_possible_simple_keys()
# Compare the current indentation and column. It may add some tokens
# and decrease the current indentation level.
self.unwind_indent(self.column)
# Peek the next character.
ch = self.peek()
# Is it the end of stream?
if ch == '\0':
return self.fetch_stream_end()
# Is it a directive?
if ch == '%' and self.check_directive():
return self.fetch_directive()
# Is it the document start?
if ch == '-' and self.check_document_start():
return self.fetch_document_start()
# Is it the document end?
if ch == '.' and self.check_document_end():
return self.fetch_document_end()
# TODO: support for BOM within a stream.
#if ch == '\uFEFF':
# return self.fetch_bom() <-- issue BOMToken
# Note: the order of the following checks is NOT significant.
# Is it the flow sequence start indicator?
if ch == '[':
return self.fetch_flow_sequence_start()
# Is it the flow mapping start indicator?
if ch == '{':
return self.fetch_flow_mapping_start()
# Is it the flow sequence end indicator?
if ch == ']':
return self.fetch_flow_sequence_end()
# Is it the flow mapping end indicator?
if ch == '}':
return self.fetch_flow_mapping_end()
# Is it the flow entry indicator?
if ch == ',':
return self.fetch_flow_entry()
# Is it the block entry indicator?
if ch == '-' and self.check_block_entry():
return self.fetch_block_entry()
# Is it the key indicator?
if ch == '?' and self.check_key():
return self.fetch_key()
# Is it the value indicator?
if ch == ':' and self.check_value():
return self.fetch_value()
# Is it an alias?
if ch == '*':
return self.fetch_alias()
# Is it an anchor?
if ch == '&':
return self.fetch_anchor()
# Is it a tag?
if ch == '!':
return self.fetch_tag()
# Is it a literal scalar?
if ch == '|' and not self.flow_level:
return self.fetch_literal()
# Is it a folded scalar?
if ch == '>' and not self.flow_level:
return self.fetch_folded()
# Is it a single quoted scalar?
if ch == '\'':
return self.fetch_single()
# Is it a double quoted scalar?
if ch == '\"':
return self.fetch_double()
# It must be a plain scalar then.
if self.check_plain():
return self.fetch_plain()
# No? It's an error. Let's produce a nice error message.
raise ScannerError("while scanning for the next token", None,
"found character %r that cannot start any token" % ch,
self.get_mark())
# Simple keys treatment.
def next_possible_simple_key(self):
# Return the number of the nearest possible simple key. Actually we
# don't need to loop through the whole dictionary. We may replace it
# with the following code:
# if not self.possible_simple_keys:
# return None
# return self.possible_simple_keys[
# min(self.possible_simple_keys.keys())].token_number
min_token_number = None
for level in self.possible_simple_keys:
key = self.possible_simple_keys[level]
if min_token_number is None or key.token_number < min_token_number:
min_token_number = key.token_number
return min_token_number
def stale_possible_simple_keys(self):
# Remove entries that are no longer possible simple keys. According to
# the YAML specification, simple keys
# - should be limited to a single line,
# - should be no longer than 1024 characters.
# Disabling this procedure will allow simple keys of any length and
# height (may cause problems if indentation is broken though).
for level in list(self.possible_simple_keys):
key = self.possible_simple_keys[level]
if key.line != self.line \
or self.index-key.index > 1024:
if key.required:
raise ScannerError("while scanning a simple key", key.mark,
"could not found expected ':'", self.get_mark())
del self.possible_simple_keys[level]
def save_possible_simple_key(self):
# The next token may start a simple key. We check if it's possible
# and save its position. This function is called for
# ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
# Check if a simple key is required at the current position.
required = not self.flow_level and self.indent == self.column
# A simple key is required only if it is the first token in the current
# line. Therefore it is always allowed.
assert self.allow_simple_key or not required
# The next token might be a simple key. Let's save it's number and
# position.
if self.allow_simple_key:
self.remove_possible_simple_key()
token_number = self.tokens_taken+len(self.tokens)
key = SimpleKey(token_number, required,
self.index, self.line, self.column, self.get_mark())
self.possible_simple_keys[self.flow_level] = key
def remove_possible_simple_key(self):
# Remove the saved possible key position at the current flow level.
if self.flow_level in self.possible_simple_keys:
key = self.possible_simple_keys[self.flow_level]
if key.required:
raise ScannerError("while scanning a simple key", key.mark,
"could not found expected ':'", self.get_mark())
del self.possible_simple_keys[self.flow_level]
# Indentation functions.
def unwind_indent(self, column):
## In flow context, tokens should respect indentation.
## Actually the condition should be `self.indent >= column` according to
## the spec. But this condition will prohibit intuitively correct
## constructions such as
## key : {
## }
#if self.flow_level and self.indent > column:
# raise ScannerError(None, None,
# "invalid intendation or unclosed '[' or '{'",
# self.get_mark())
# In the flow context, indentation is ignored. We make the scanner less
# restrictive then specification requires.
if self.flow_level:
return
# In block context, we may need to issue the BLOCK-END tokens.
while self.indent > column:
mark = self.get_mark()
self.indent = self.indents.pop()
self.tokens.append(BlockEndToken(mark, mark))
def add_indent(self, column):
# Check if we need to increase indentation.
if self.indent < column:
self.indents.append(self.indent)
self.indent = column
return True
return False
# Fetchers.
def fetch_stream_start(self):
# We always add STREAM-START as the first token and STREAM-END as the
# last token.
# Read the token.
mark = self.get_mark()
# Add STREAM-START.
self.tokens.append(StreamStartToken(mark, mark,
encoding=self.encoding))
def fetch_stream_end(self):
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys.
self.remove_possible_simple_key()
self.allow_simple_key = False
self.possible_simple_keys = {}
# Read the token.
mark = self.get_mark()
# Add STREAM-END.
self.tokens.append(StreamEndToken(mark, mark))
# The steam is finished.
self.done = True
def fetch_directive(self):
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys.
self.remove_possible_simple_key()
self.allow_simple_key = False
# Scan and add DIRECTIVE.
self.tokens.append(self.scan_directive())
def fetch_document_start(self):
self.fetch_document_indicator(DocumentStartToken)
def fetch_document_end(self):
self.fetch_document_indicator(DocumentEndToken)
def fetch_document_indicator(self, TokenClass):
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys. Note that there could not be a block collection
# after '---'.
self.remove_possible_simple_key()
self.allow_simple_key = False
# Add DOCUMENT-START or DOCUMENT-END.
start_mark = self.get_mark()
self.forward(3)
end_mark = self.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
def fetch_flow_sequence_start(self):
self.fetch_flow_collection_start(FlowSequenceStartToken)
def fetch_flow_mapping_start(self):
self.fetch_flow_collection_start(FlowMappingStartToken)
def fetch_flow_collection_start(self, TokenClass):
# '[' and '{' may start a simple key.
self.save_possible_simple_key()
# Increase the flow level.
self.flow_level += 1
# Simple keys are allowed after '[' and '{'.
self.allow_simple_key = True
# Add FLOW-SEQUENCE-START or FLOW-MAPPING-START.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
def fetch_flow_sequence_end(self):
self.fetch_flow_collection_end(FlowSequenceEndToken)
def fetch_flow_mapping_end(self):
self.fetch_flow_collection_end(FlowMappingEndToken)
def fetch_flow_collection_end(self, TokenClass):
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Decrease the flow level.
self.flow_level -= 1
# No simple keys after ']' or '}'.
self.allow_simple_key = False
# Add FLOW-SEQUENCE-END or FLOW-MAPPING-END.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
def fetch_flow_entry(self):
# Simple keys are allowed after ','.
self.allow_simple_key = True
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add FLOW-ENTRY.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(FlowEntryToken(start_mark, end_mark))
def fetch_block_entry(self):
# Block context needs additional checks.
if not self.flow_level:
# Are we allowed to start a new entry?
if not self.allow_simple_key:
raise ScannerError(None, None,
"sequence entries are not allowed here",
self.get_mark())
# We may need to add BLOCK-SEQUENCE-START.
if self.add_indent(self.column):
mark = self.get_mark()
self.tokens.append(BlockSequenceStartToken(mark, mark))
# It's an error for the block entry to occur in the flow context,
# but we let the parser detect this.
else:
pass
# Simple keys are allowed after '-'.
self.allow_simple_key = True
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add BLOCK-ENTRY.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(BlockEntryToken(start_mark, end_mark))
def fetch_key(self):
# Block context needs additional checks.
if not self.flow_level:
# Are we allowed to start a key (not nessesary a simple)?
if not self.allow_simple_key:
raise ScannerError(None, None,
"mapping keys are not allowed here",
self.get_mark())
# We may need to add BLOCK-MAPPING-START.
if self.add_indent(self.column):
mark = self.get_mark()
self.tokens.append(BlockMappingStartToken(mark, mark))
# Simple keys are allowed after '?' in the block context.
self.allow_simple_key = not self.flow_level
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add KEY.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(KeyToken(start_mark, end_mark))
def fetch_value(self):
# Do we determine a simple key?
if self.flow_level in self.possible_simple_keys:
# Add KEY.
key = self.possible_simple_keys[self.flow_level]
del self.possible_simple_keys[self.flow_level]
self.tokens.insert(key.token_number-self.tokens_taken,
KeyToken(key.mark, key.mark))
# If this key starts a new block mapping, we need to add
# BLOCK-MAPPING-START.
if not self.flow_level:
if self.add_indent(key.column):
self.tokens.insert(key.token_number-self.tokens_taken,
BlockMappingStartToken(key.mark, key.mark))
# There cannot be two simple keys one after another.
self.allow_simple_key = False
# It must be a part of a complex key.
else:
# Block context needs additional checks.
# (Do we really need them? They will be catched by the parser
# anyway.)
if not self.flow_level:
# We are allowed to start a complex value if and only if
# we can start a simple key.
if not self.allow_simple_key:
raise ScannerError(None, None,
"mapping values are not allowed here",
self.get_mark())
# If this value starts a new block mapping, we need to add
# BLOCK-MAPPING-START. It will be detected as an error later by
# the parser.
if not self.flow_level:
if self.add_indent(self.column):
mark = self.get_mark()
self.tokens.append(BlockMappingStartToken(mark, mark))
# Simple keys are allowed after ':' in the block context.
self.allow_simple_key = not self.flow_level
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add VALUE.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(ValueToken(start_mark, end_mark))
def fetch_alias(self):
# ALIAS could be a simple key.
self.save_possible_simple_key()
# No simple keys after ALIAS.
self.allow_simple_key = False
# Scan and add ALIAS.
self.tokens.append(self.scan_anchor(AliasToken))
def fetch_anchor(self):
# ANCHOR could start a simple key.
self.save_possible_simple_key()
# No simple keys after ANCHOR.
self.allow_simple_key = False
# Scan and add ANCHOR.
self.tokens.append(self.scan_anchor(AnchorToken))
def fetch_tag(self):
# TAG could start a simple key.
self.save_possible_simple_key()
# No simple keys after TAG.
self.allow_simple_key = False
# Scan and add TAG.
self.tokens.append(self.scan_tag())
def fetch_literal(self):
self.fetch_block_scalar(style='|')
def fetch_folded(self):
self.fetch_block_scalar(style='>')
def fetch_block_scalar(self, style):
# A simple key may follow a block scalar.
self.allow_simple_key = True
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Scan and add SCALAR.
self.tokens.append(self.scan_block_scalar(style))
def fetch_single(self):
self.fetch_flow_scalar(style='\'')
def fetch_double(self):
self.fetch_flow_scalar(style='"')
def fetch_flow_scalar(self, style):
# A flow scalar could be a simple key.
self.save_possible_simple_key()
# No simple keys after flow scalars.
self.allow_simple_key = False
# Scan and add SCALAR.
self.tokens.append(self.scan_flow_scalar(style))
def fetch_plain(self):
# A plain scalar could be a simple key.
self.save_possible_simple_key()
# No simple keys after plain scalars. But note that `scan_plain` will
# change this flag if the scan is finished at the beginning of the
# line.
self.allow_simple_key = False
# Scan and add SCALAR. May change `allow_simple_key`.
self.tokens.append(self.scan_plain())
# Checkers.
def check_directive(self):
# DIRECTIVE: ^ '%' ...
# The '%' indicator is already checked.
if self.column == 0:
return True
def check_document_start(self):
# DOCUMENT-START: ^ '---' (' '|'\n')
if self.column == 0:
if self.prefix(3) == '---' \
and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
return True
def check_document_end(self):
# DOCUMENT-END: ^ '...' (' '|'\n')
if self.column == 0:
if self.prefix(3) == '...' \
and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
return True
def check_block_entry(self):
# BLOCK-ENTRY: '-' (' '|'\n')
return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
def check_key(self):
# KEY(flow context): '?'
if self.flow_level:
return True
# KEY(block context): '?' (' '|'\n')
else:
return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
def check_value(self):
# VALUE(flow context): ':'
if self.flow_level:
return True
# VALUE(block context): ':' (' '|'\n')
else:
return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
def check_plain(self):
# A plain scalar may start with any non-space character except:
# '-', '?', ':', ',', '[', ']', '{', '}',
# '#', '&', '*', '!', '|', '>', '\'', '\"',
# '%', '@', '`'.
#
# It may also start with
# '-', '?', ':'
# if it is followed by a non-space character.
#
# Note that we limit the last rule to the block context (except the
# '-' character) because we want the flow context to be space
# independent.
ch = self.peek()
return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \
or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029'
and (ch == '-' or (not self.flow_level and ch in '?:')))
# Scanners.
def scan_to_next_token(self):
# We ignore spaces, line breaks and comments.
# If we find a line break in the block context, we set the flag
# `allow_simple_key` on.
# The byte order mark is stripped if it's the first character in the
# stream. We do not yet support BOM inside the stream as the
# specification requires. Any such mark will be considered as a part
# of the document.
#
# TODO: We need to make tab handling rules more sane. A good rule is
# Tabs cannot precede tokens
# BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END,
# KEY(block), VALUE(block), BLOCK-ENTRY
# So the checking code is
# if <TAB>:
# self.allow_simple_keys = False
# We also need to add the check for `allow_simple_keys == True` to
# `unwind_indent` before issuing BLOCK-END.
# Scanners for block, flow, and plain scalars need to be modified.
if self.index == 0 and self.peek() == '\uFEFF':
self.forward()
found = False
while not found:
while self.peek() == ' ':
self.forward()
if self.peek() == '#':
while self.peek() not in '\0\r\n\x85\u2028\u2029':
self.forward()
if self.scan_line_break():
if not self.flow_level:
self.allow_simple_key = True
else:
found = True
def scan_directive(self):
# See the specification for details.
start_mark = self.get_mark()
self.forward()
name = self.scan_directive_name(start_mark)
value = None
if name == 'YAML':
value = self.scan_yaml_directive_value(start_mark)
end_mark = self.get_mark()
elif name == 'TAG':
value = self.scan_tag_directive_value(start_mark)
end_mark = self.get_mark()
else:
end_mark = self.get_mark()
while self.peek() not in '\0\r\n\x85\u2028\u2029':
self.forward()
self.scan_directive_ignored_line(start_mark)
return DirectiveToken(name, value, start_mark, end_mark)
def scan_directive_name(self, start_mark):
# See the specification for details.
length = 0
ch = self.peek(length)
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
or ch in '-_':
length += 1
ch = self.peek(length)
if not length:
raise ScannerError("while scanning a directive", start_mark,
"expected alphabetic or numeric character, but found %r"
% ch, self.get_mark())
value = self.prefix(length)
self.forward(length)
ch = self.peek()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError("while scanning a directive", start_mark,
"expected alphabetic or numeric character, but found %r"
% ch, self.get_mark())
return value
def scan_yaml_directive_value(self, start_mark):
# See the specification for details.
while self.peek() == ' ':
self.forward()
major = self.scan_yaml_directive_number(start_mark)
if self.peek() != '.':
raise ScannerError("while scanning a directive", start_mark,
"expected a digit or '.', but found %r" % self.peek(),
self.get_mark())
self.forward()
minor = self.scan_yaml_directive_number(start_mark)
if self.peek() not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError("while scanning a directive", start_mark,
"expected a digit or ' ', but found %r" % self.peek(),
self.get_mark())
return (major, minor)
def scan_yaml_directive_number(self, start_mark):
# See the specification for details.
ch = self.peek()
if not ('0' <= ch <= '9'):
raise ScannerError("while scanning a directive", start_mark,
"expected a digit, but found %r" % ch, self.get_mark())
length = 0
while '0' <= self.peek(length) <= '9':
length += 1
value = int(self.prefix(length))
self.forward(length)
return value
def scan_tag_directive_value(self, start_mark):
# See the specification for details.
while self.peek() == ' ':
self.forward()
handle = self.scan_tag_directive_handle(start_mark)
while self.peek() == ' ':
self.forward()
prefix = self.scan_tag_directive_prefix(start_mark)
return (handle, prefix)
def scan_tag_directive_handle(self, start_mark):
# See the specification for details.
value = self.scan_tag_handle('directive', start_mark)
ch = self.peek()
if ch != ' ':
raise ScannerError("while scanning a directive", start_mark,
"expected ' ', but found %r" % ch, self.get_mark())
return value
def scan_tag_directive_prefix(self, start_mark):
# See the specification for details.
value = self.scan_tag_uri('directive', start_mark)
ch = self.peek()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError("while scanning a directive", start_mark,
"expected ' ', but found %r" % ch, self.get_mark())
return value
def scan_directive_ignored_line(self, start_mark):
# See the specification for details.
while self.peek() == ' ':
self.forward()
if self.peek() == '#':
while self.peek() not in '\0\r\n\x85\u2028\u2029':
self.forward()
ch = self.peek()
if ch not in '\0\r\n\x85\u2028\u2029':
raise ScannerError("while scanning a directive", start_mark,
"expected a comment or a line break, but found %r"
% ch, self.get_mark())
self.scan_line_break()
def scan_anchor(self, TokenClass):
# The specification does not restrict characters for anchors and
# aliases. This may lead to problems, for instance, the document:
# [ *alias, value ]
# can be interpteted in two ways, as
# [ "value" ]
# and
# [ *alias , "value" ]
# Therefore we restrict aliases to numbers and ASCII letters.
start_mark = self.get_mark()
indicator = self.peek()
if indicator == '*':
name = 'alias'
else:
name = 'anchor'
self.forward()
length = 0
ch = self.peek(length)
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
or ch in '-_':
length += 1
ch = self.peek(length)
if not length:
raise ScannerError("while scanning an %s" % name, start_mark,
"expected alphabetic or numeric character, but found %r"
% ch, self.get_mark())
value = self.prefix(length)
self.forward(length)
ch = self.peek()
if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`':
raise ScannerError("while scanning an %s" % name, start_mark,
"expected alphabetic or numeric character, but found %r"
% ch, self.get_mark())
end_mark = self.get_mark()
return TokenClass(value, start_mark, end_mark)
def scan_tag(self):
# See the specification for details.
start_mark = self.get_mark()
ch = self.peek(1)
if ch == '<':
handle = None
self.forward(2)
suffix = self.scan_tag_uri('tag', start_mark)
if self.peek() != '>':
raise ScannerError("while parsing a tag", start_mark,
"expected '>', but found %r" % self.peek(),
self.get_mark())
self.forward()
elif ch in '\0 \t\r\n\x85\u2028\u2029':
handle = None
suffix = '!'
self.forward()
else:
length = 1
use_handle = False
while ch not in '\0 \r\n\x85\u2028\u2029':
if ch == '!':
use_handle = True
break
length += 1
ch = self.peek(length)
handle = '!'
if use_handle:
handle = self.scan_tag_handle('tag', start_mark)
else:
handle = '!'
self.forward()
suffix = self.scan_tag_uri('tag', start_mark)
ch = self.peek()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError("while scanning a tag", start_mark,
"expected ' ', but found %r" % ch, self.get_mark())
value = (handle, suffix)
end_mark = self.get_mark()
return TagToken(value, start_mark, end_mark)
def scan_block_scalar(self, style):
# See the specification for details.
if style == '>':
folded = True
else:
folded = False
chunks = []
start_mark = self.get_mark()
# Scan the header.
self.forward()
chomping, increment = self.scan_block_scalar_indicators(start_mark)
self.scan_block_scalar_ignored_line(start_mark)
# Determine the indentation level and go to the first non-empty line.
min_indent = self.indent+1
if min_indent < 1:
min_indent = 1
if increment is None:
breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
indent = max(min_indent, max_indent)
else:
indent = min_indent+increment-1
breaks, end_mark = self.scan_block_scalar_breaks(indent)
line_break = ''
# Scan the inner part of the block scalar.
while self.column == indent and self.peek() != '\0':
chunks.extend(breaks)
leading_non_space = self.peek() not in ' \t'
length = 0
while self.peek(length) not in '\0\r\n\x85\u2028\u2029':
length += 1
chunks.append(self.prefix(length))
self.forward(length)
line_break = self.scan_line_break()
breaks, end_mark = self.scan_block_scalar_breaks(indent)
if self.column == indent and self.peek() != '\0':
# Unfortunately, folding rules are ambiguous.
#
# This is the folding according to the specification:
if folded and line_break == '\n' \
and leading_non_space and self.peek() not in ' \t':
if not breaks:
chunks.append(' ')
else:
chunks.append(line_break)
# This is Clark Evans's interpretation (also in the spec
# examples):
#
#if folded and line_break == '\n':
# if not breaks:
# if self.peek() not in ' \t':
# chunks.append(' ')
# else:
# chunks.append(line_break)
#else:
# chunks.append(line_break)
else:
break
# Chomp the tail.
if chomping is not False:
chunks.append(line_break)
if chomping is True:
chunks.extend(breaks)
# We are done.
return ScalarToken(''.join(chunks), False, start_mark, end_mark,
style)
def scan_block_scalar_indicators(self, start_mark):
# See the specification for details.
chomping = None
increment = None
ch = self.peek()
if ch in '+-':
if ch == '+':
chomping = True
else:
chomping = False
self.forward()
ch = self.peek()
if ch in '0123456789':
increment = int(ch)
if increment == 0:
raise ScannerError("while scanning a block scalar", start_mark,
"expected indentation indicator in the range 1-9, but found 0",
self.get_mark())
self.forward()
elif ch in '0123456789':
increment = int(ch)
if increment == 0:
raise ScannerError("while scanning a block scalar", start_mark,
"expected indentation indicator in the range 1-9, but found 0",
self.get_mark())
self.forward()
ch = self.peek()
if ch in '+-':
if ch == '+':
chomping = True
else:
chomping = False
self.forward()
ch = self.peek()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError("while scanning a block scalar", start_mark,
"expected chomping or indentation indicators, but found %r"
% ch, self.get_mark())
return chomping, increment
def scan_block_scalar_ignored_line(self, start_mark):
# See the specification for details.
while self.peek() == ' ':
self.forward()
if self.peek() == '#':
while self.peek() not in '\0\r\n\x85\u2028\u2029':
self.forward()
ch = self.peek()
if ch not in '\0\r\n\x85\u2028\u2029':
raise ScannerError("while scanning a block scalar", start_mark,
"expected a comment or a line break, but found %r" % ch,
self.get_mark())
self.scan_line_break()
def scan_block_scalar_indentation(self):
# See the specification for details.
chunks = []
max_indent = 0
end_mark = self.get_mark()
while self.peek() in ' \r\n\x85\u2028\u2029':
if self.peek() != ' ':
chunks.append(self.scan_line_break())
end_mark = self.get_mark()
else:
self.forward()
if self.column > max_indent:
max_indent = self.column
return chunks, max_indent, end_mark
def scan_block_scalar_breaks(self, indent):
# See the specification for details.
chunks = []
end_mark = self.get_mark()
while self.column < indent and self.peek() == ' ':
self.forward()
while self.peek() in '\r\n\x85\u2028\u2029':
chunks.append(self.scan_line_break())
end_mark = self.get_mark()
while self.column < indent and self.peek() == ' ':
self.forward()
return chunks, end_mark
def scan_flow_scalar(self, style):
# See the specification for details.
# Note that we loose indentation rules for quoted scalars. Quoted
# scalars don't need to adhere indentation because " and ' clearly
# mark the beginning and the end of them. Therefore we are less
# restrictive then the specification requires. We only need to check
# that document separators are not included in scalars.
if style == '"':
double = True
else:
double = False
chunks = []
start_mark = self.get_mark()
quote = self.peek()
self.forward()
chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
while self.peek() != quote:
chunks.extend(self.scan_flow_scalar_spaces(double, start_mark))
chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
self.forward()
end_mark = self.get_mark()
return ScalarToken(''.join(chunks), False, start_mark, end_mark,
style)
ESCAPE_REPLACEMENTS = {
'0': '\0',
'a': '\x07',
'b': '\x08',
't': '\x09',
'\t': '\x09',
'n': '\x0A',
'v': '\x0B',
'f': '\x0C',
'r': '\x0D',
'e': '\x1B',
' ': '\x20',
'\"': '\"',
'\\': '\\',
'N': '\x85',
'_': '\xA0',
'L': '\u2028',
'P': '\u2029',
}
ESCAPE_CODES = {
'x': 2,
'u': 4,
'U': 8,
}
def scan_flow_scalar_non_spaces(self, double, start_mark):
# See the specification for details.
chunks = []
while True:
length = 0
while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029':
length += 1
if length:
chunks.append(self.prefix(length))
self.forward(length)
ch = self.peek()
if not double and ch == '\'' and self.peek(1) == '\'':
chunks.append('\'')
self.forward(2)
elif (double and ch == '\'') or (not double and ch in '\"\\'):
chunks.append(ch)
self.forward()
elif double and ch == '\\':
self.forward()
ch = self.peek()
if ch in self.ESCAPE_REPLACEMENTS:
chunks.append(self.ESCAPE_REPLACEMENTS[ch])
self.forward()
elif ch in self.ESCAPE_CODES:
length = self.ESCAPE_CODES[ch]
self.forward()
for k in range(length):
if self.peek(k) not in '0123456789ABCDEFabcdef':
raise ScannerError("while scanning a double-quoted scalar", start_mark,
"expected escape sequence of %d hexdecimal numbers, but found %r" %
(length, self.peek(k)), self.get_mark())
code = int(self.prefix(length), 16)
chunks.append(chr(code))
self.forward(length)
elif ch in '\r\n\x85\u2028\u2029':
self.scan_line_break()
chunks.extend(self.scan_flow_scalar_breaks(double, start_mark))
else:
raise ScannerError("while scanning a double-quoted scalar", start_mark,
"found unknown escape character %r" % ch, self.get_mark())
else:
return chunks
def scan_flow_scalar_spaces(self, double, start_mark):
# See the specification for details.
chunks = []
length = 0
while self.peek(length) in ' \t':
length += 1
whitespaces = self.prefix(length)
self.forward(length)
ch = self.peek()
if ch == '\0':
raise ScannerError("while scanning a quoted scalar", start_mark,
"found unexpected end of stream", self.get_mark())
elif ch in '\r\n\x85\u2028\u2029':
line_break = self.scan_line_break()
breaks = self.scan_flow_scalar_breaks(double, start_mark)
if line_break != '\n':
chunks.append(line_break)
elif not breaks:
chunks.append(' ')
chunks.extend(breaks)
else:
chunks.append(whitespaces)
return chunks
def scan_flow_scalar_breaks(self, double, start_mark):
# See the specification for details.
chunks = []
while True:
# Instead of checking indentation, we check for document
# separators.
prefix = self.prefix(3)
if (prefix == '---' or prefix == '...') \
and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
raise ScannerError("while scanning a quoted scalar", start_mark,
"found unexpected document separator", self.get_mark())
while self.peek() in ' \t':
self.forward()
if self.peek() in '\r\n\x85\u2028\u2029':
chunks.append(self.scan_line_break())
else:
return chunks
def scan_plain(self):
# See the specification for details.
# We add an additional restriction for the flow context:
# plain scalars in the flow context cannot contain ',', ':' and '?'.
# We also keep track of the `allow_simple_key` flag here.
# Indentation rules are loosed for the flow context.
chunks = []
start_mark = self.get_mark()
end_mark = start_mark
indent = self.indent+1
# We allow zero indentation for scalars, but then we need to check for
# document separators at the beginning of the line.
#if indent == 0:
# indent = 1
spaces = []
while True:
length = 0
if self.peek() == '#':
break
while True:
ch = self.peek(length)
if ch in '\0 \t\r\n\x85\u2028\u2029' \
or (not self.flow_level and ch == ':' and
self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029') \
or (self.flow_level and ch in ',:?[]{}'):
break
length += 1
# It's not clear what we should do with ':' in the flow context.
if (self.flow_level and ch == ':'
and self.peek(length+1) not in '\0 \t\r\n\x85\u2028\u2029,[]{}'):
self.forward(length)
raise ScannerError("while scanning a plain scalar", start_mark,
"found unexpected ':'", self.get_mark(),
"Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.")
if length == 0:
break
self.allow_simple_key = False
chunks.extend(spaces)
chunks.append(self.prefix(length))
self.forward(length)
end_mark = self.get_mark()
spaces = self.scan_plain_spaces(indent, start_mark)
if not spaces or self.peek() == '#' \
or (not self.flow_level and self.column < indent):
break
return ScalarToken(''.join(chunks), True, start_mark, end_mark)
def scan_plain_spaces(self, indent, start_mark):
# See the specification for details.
# The specification is really confusing about tabs in plain scalars.
# We just forbid them completely. Do not use tabs in YAML!
chunks = []
length = 0
while self.peek(length) in ' ':
length += 1
whitespaces = self.prefix(length)
self.forward(length)
ch = self.peek()
if ch in '\r\n\x85\u2028\u2029':
line_break = self.scan_line_break()
self.allow_simple_key = True
prefix = self.prefix(3)
if (prefix == '---' or prefix == '...') \
and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
return
breaks = []
while self.peek() in ' \r\n\x85\u2028\u2029':
if self.peek() == ' ':
self.forward()
else:
breaks.append(self.scan_line_break())
prefix = self.prefix(3)
if (prefix == '---' or prefix == '...') \
and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
return
if line_break != '\n':
chunks.append(line_break)
elif not breaks:
chunks.append(' ')
chunks.extend(breaks)
elif whitespaces:
chunks.append(whitespaces)
return chunks
def scan_tag_handle(self, name, start_mark):
# See the specification for details.
# For some strange reasons, the specification does not allow '_' in
# tag handles. I have allowed it anyway.
ch = self.peek()
if ch != '!':
raise ScannerError("while scanning a %s" % name, start_mark,
"expected '!', but found %r" % ch, self.get_mark())
length = 1
ch = self.peek(length)
if ch != ' ':
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
or ch in '-_':
length += 1
ch = self.peek(length)
if ch != '!':
self.forward(length)
raise ScannerError("while scanning a %s" % name, start_mark,
"expected '!', but found %r" % ch, self.get_mark())
length += 1
value = self.prefix(length)
self.forward(length)
return value
def scan_tag_uri(self, name, start_mark):
# See the specification for details.
# Note: we do not check if URI is well-formed.
chunks = []
length = 0
ch = self.peek(length)
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
or ch in '-;/?:@&=+$,_.!~*\'()[]%':
if ch == '%':
chunks.append(self.prefix(length))
self.forward(length)
length = 0
chunks.append(self.scan_uri_escapes(name, start_mark))
else:
length += 1
ch = self.peek(length)
if length:
chunks.append(self.prefix(length))
self.forward(length)
length = 0
if not chunks:
raise ScannerError("while parsing a %s" % name, start_mark,
"expected URI, but found %r" % ch, self.get_mark())
return ''.join(chunks)
def scan_uri_escapes(self, name, start_mark):
# See the specification for details.
codes = []
mark = self.get_mark()
while self.peek() == '%':
self.forward()
for k in range(2):
if self.peek(k) not in '0123456789ABCDEFabcdef':
raise ScannerError("while scanning a %s" % name, start_mark,
"expected URI escape sequence of 2 hexdecimal numbers, but found %r"
% self.peek(k), self.get_mark())
codes.append(int(self.prefix(2), 16))
self.forward(2)
try:
value = bytes(codes).decode('utf-8')
except UnicodeDecodeError as exc:
raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark)
return value
def scan_line_break(self):
# Transforms:
# '\r\n' : '\n'
# '\r' : '\n'
# '\n' : '\n'
# '\x85' : '\n'
# '\u2028' : '\u2028'
# '\u2029 : '\u2029'
# default : ''
ch = self.peek()
if ch in '\r\n\x85':
if self.prefix(2) == '\r\n':
self.forward(2)
else:
self.forward()
return '\n'
elif ch in '\u2028\u2029':
self.forward()
return ch
return ''
#try:
# import psyco
# psyco.bind(Scanner)
#except ImportError:
# pass
|
SlimRoms/android_external_chromium_org | refs/heads/lp5.0 | tools/telemetry/telemetry/web_perf/__init__.py | 99 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
The web_perf module provides utilities and measurements for benchmarking web
app's performance.
"""
|
AndreyPopovNew/asuswrt-merlin-rt-n | refs/heads/master | release/src/router/samba36/lib/dnspython/dns/rdtypes/IN/__init__.py | 250 | # Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Class IN rdata type classes."""
__all__ = [
'A',
'AAAA',
'APL',
'DHCID',
'KX',
'NAPTR',
'NSAP',
'NSAP_PTR',
'PX',
'SRV',
'WKS',
]
|
openelections/openelections-core | refs/heads/dev | openelex/us/or/datasource.py | 1 | """
Oregon has CSV files containing precinct-level results for each county and all offices
for all years back to 2000. All of the files are pre-processed and available on Github at
https://github.com/openelections/openelections-data-or.
For regular primary and general elections, there are statewide county-level files. Each county has a
precinct-level results file. Special and runoff elections for non-statewide offices are contained in a single file for each office.
"""
from future import standard_library
standard_library.install_aliases()
from os.path import join
import json
import datetime
import urllib.parse
from openelex import PROJECT_ROOT
from openelex.base.datasource import BaseDatasource
from openelex.lib import build_github_url, build_raw_github_url
class Datasource(BaseDatasource):
# PUBLIC INTERFACE
def mappings(self, year=None):
"""Return array of dicts containing source url and
standardized filename for raw results file, along
with other pieces of metadata
"""
mappings = []
for yr, elecs in list(self.elections(year).items()):
mappings.extend(self._build_metadata(yr, elecs))
return mappings
def target_urls(self, year=None):
"Get list of source data urls, optionally filtered by year"
return [item['raw_url'] for item in self.mappings(year)]
def filename_url_pairs(self, year=None):
return [(mapping['generated_filename'], self._url_for_fetch(mapping))
for mapping in self.mappings(year)]
def _url_for_fetch(self, mapping):
try:
return mapping['pre_processed_url']
except KeyError:
return mapping['raw_url']
def mappings_for_url(self, url):
return [mapping for mapping in self.mappings() if mapping['raw_url'] == url]
# PRIVATE METHODS
def _build_metadata(self, year, elections):
meta = []
year_int = int(year)
for election in elections:
if election['special']:
results = [x for x in self._url_paths() if x['date'] == election['start_date'] and x['special'] == True]
else:
results = [x for x in self._url_paths() if x['date'] == election['start_date'] and x['special'] == False]
for result in results:
if result['url']:
raw_url = result['url']
else:
raw_url = None
if result['county'] == '':
generated_filename = self._generate_filename(election['start_date'], election['race_type'], result)
ocd_id = 'ocd-division/country:us/state:or'
name = "Oregon"
else:
generated_filename = self._generate_county_filename(election['start_date'], result)
ocd_id = 'ocd-division/country:us/state:or/county:%s' % result['county'].lower().replace(" ", "_")
name = result['county']
meta.append({
"generated_filename": generated_filename,
"raw_url": raw_url,
"pre_processed_url": build_raw_github_url(self.state, election['start_date'][0:4], generated_filename),
"ocd_id": ocd_id,
"name": name,
"election": election['slug']
})
# generate precinct files
return meta
def _generate_filename(self, start_date, election_type, result):
if result['district'] == '':
office = result['office'].lower().replace(' ','_')
else:
office = result['office'].lower().replace(' ','_') + '__' + result['district']
if result['special']:
election_type = 'special__' + election_type
bits = [
start_date.replace('-',''),
self.state.lower(),
election_type,
office
]
if office == '':
bits.remove(office)
name = "__".join(bits) + '.csv'
return name
def _generate_county_filename(self, start_date, result):
bits = [
start_date.replace('-',''),
self.state,
]
if result['party']:
bits.append(result['party'].lower())
bits.extend([
result['race_type'].lower(),
result['county'].replace(' ','_').lower()
])
bits.append('precinct')
filename = "__".join(bits) + '.csv'
return filename
def _jurisdictions(self):
"""Oregon counties"""
m = self.jurisdiction_mappings()
mappings = [x for x in m if x['county'] != ""]
return mappings
def _url_for_fetch(self, mapping):
if mapping['pre_processed_url']:
return mapping['pre_processed_url']
else:
return mapping['raw_url']
|
splunk/splunk-webframework | refs/heads/master | contrib/requests/requests/packages/chardet/langthaimodel.py | 235 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import constants
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# The following result for thai was collected from a limited sample (1M).
# Character Mapping Table:
TIS620CharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 92.6386%
# first 1024 sequences:7.3177%
# rest sequences: 1.0230%
# negative sequences: 0.0436%
ThaiLangModel = ( \
0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
TIS620ThaiModel = { \
'charToOrderMap': TIS620CharToOrderMap,
'precedenceMatrix': ThaiLangModel,
'mTypicalPositiveRatio': 0.926386,
'keepEnglishLetter': constants.False,
'charsetName': "TIS-620"
}
|
willthames/ansible | refs/heads/devel | lib/ansible/modules/cloud/cloudstack/cs_volume.py | 66 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, Jefferson Girão <jefferson@girao.net>
# (c) 2015, René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_volume
short_description: Manages volumes on Apache CloudStack based clouds.
description:
- Create, destroy, attach, detach volumes.
version_added: "2.1"
author:
- "Jefferson Girão (@jeffersongirao)"
- "René Moser (@resmo)"
options:
name:
description:
- Name of the volume.
- C(name) can only contain ASCII letters.
required: true
account:
description:
- Account the volume is related to.
required: false
default: null
custom_id:
description:
- Custom id to the resource.
- Allowed to Root Admins only.
required: false
default: null
disk_offering:
description:
- Name of the disk offering to be used.
- Required one of C(disk_offering), C(snapshot) if volume is not already C(state=present).
required: false
default: null
display_volume:
description:
- Whether to display the volume to the end user or not.
- Allowed to Root Admins only.
required: false
default: true
domain:
description:
- Name of the domain the volume to be deployed in.
required: false
default: null
max_iops:
description:
- Max iops
required: false
default: null
min_iops:
description:
- Min iops
required: false
default: null
project:
description:
- Name of the project the volume to be deployed in.
required: false
default: null
size:
description:
- Size of disk in GB
required: false
default: null
snapshot:
description:
- The snapshot name for the disk volume.
- Required one of C(disk_offering), C(snapshot) if volume is not already C(state=present).
required: false
default: null
force:
description:
- Force removal of volume even it is attached to a VM.
- Considered on C(state=absnet) only.
required: false
default: false
shrink_ok:
description:
- Whether to allow to shrink the volume.
required: false
default: false
vm:
description:
- Name of the virtual machine to attach the volume to.
required: false
default: null
zone:
description:
- Name of the zone in which the volume should be deployed.
- If not set, default zone is used.
required: false
default: null
state:
description:
- State of the volume.
required: false
default: 'present'
choices: [ 'present', 'absent', 'attached', 'detached' ]
poll_async:
description:
- Poll async jobs until job has finished.
required: false
default: true
tags:
description:
- List of tags. Tags are a list of dictionaries having keys C(key) and C(value).
- "To delete all tags, set a empty list e.g. C(tags: [])."
required: false
default: null
aliases: [ 'tag' ]
version_added: "2.4"
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Create volume within project, zone with specified storage options
- local_action:
module: cs_volume
name: web-vm-1-volume
project: Integration
zone: ch-zrh-ix-01
disk_offering: PerfPlus Storage
size: 20
# Create/attach volume to instance
- local_action:
module: cs_volume
name: web-vm-1-volume
disk_offering: PerfPlus Storage
size: 20
vm: web-vm-1
state: attached
# Detach volume
- local_action:
module: cs_volume
name: web-vm-1-volume
state: detached
# Remove volume
- local_action:
module: cs_volume
name: web-vm-1-volume
state: absent
'''
RETURN = '''
id:
description: ID of the volume.
returned: success
type: string
sample:
name:
description: Name of the volume.
returned: success
type: string
sample: web-volume-01
display_name:
description: Display name of the volume.
returned: success
type: string
sample: web-volume-01
group:
description: Group the volume belongs to
returned: success
type: string
sample: web
domain:
description: Domain the volume belongs to
returned: success
type: string
sample: example domain
project:
description: Project the volume belongs to
returned: success
type: string
sample: Production
zone:
description: Name of zone the volume is in.
returned: success
type: string
sample: ch-gva-2
created:
description: Date of the volume was created.
returned: success
type: string
sample: 2014-12-01T14:57:57+0100
attached:
description: Date of the volume was attached.
returned: success
type: string
sample: 2014-12-01T14:57:57+0100
type:
description: Disk volume type.
returned: success
type: string
sample: DATADISK
size:
description: Size of disk volume.
returned: success
type: string
sample: 20
vm:
description: Name of the vm the volume is attached to (not returned when detached)
returned: success
type: string
sample: web-01
state:
description: State of the volume
returned: success
type: string
sample: Attached
device_id:
description: Id of the device on user vm the volume is attached to (not returned when detached)
returned: success
type: string
sample: 1
'''
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackVolume(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackVolume, self).__init__(module)
self.returns = {
'group': 'group',
'attached': 'attached',
'vmname': 'vm',
'deviceid': 'device_id',
'type': 'type',
'size': 'size',
}
self.volume = None
#TODO implement in cloudstack utils
def get_disk_offering(self, key=None):
disk_offering = self.module.params.get('disk_offering')
if not disk_offering:
return None
# Do not add domain filter for disk offering listing.
disk_offerings = self.cs.listDiskOfferings()
if disk_offerings:
for d in disk_offerings['diskoffering']:
if disk_offering in [d['displaytext'], d['name'], d['id']]:
return self._get_by_key(key, d)
self.module.fail_json(msg="Disk offering '%s' not found" % disk_offering)
def get_volume(self):
if not self.volume:
args = {}
args['account'] = self.get_account(key='name')
args['domainid'] = self.get_domain(key='id')
args['projectid'] = self.get_project(key='id')
args['zoneid'] = self.get_zone(key='id')
args['displayvolume'] = self.module.params.get('display_volume')
args['type'] = 'DATADISK'
volumes = self.cs.listVolumes(**args)
if volumes:
volume_name = self.module.params.get('name')
for v in volumes['volume']:
if volume_name.lower() == v['name'].lower():
self.volume = v
break
return self.volume
def get_snapshot(self, key=None):
snapshot = self.module.params.get('snapshot')
if not snapshot:
return None
args = {}
args['name'] = snapshot
args['account'] = self.get_account('name')
args['domainid'] = self.get_domain('id')
args['projectid'] = self.get_project('id')
snapshots = self.cs.listSnapshots(**args)
if snapshots:
return self._get_by_key(key, snapshots['snapshot'][0])
self.module.fail_json(msg="Snapshot with name %s not found" % snapshot)
def present_volume(self):
volume = self.get_volume()
if volume:
volume = self.update_volume(volume)
else:
disk_offering_id = self.get_disk_offering(key='id')
snapshot_id = self.get_snapshot(key='id')
if not disk_offering_id and not snapshot_id:
self.module.fail_json(msg="Required one of: disk_offering,snapshot")
self.result['changed'] = True
args = {}
args['name'] = self.module.params.get('name')
args['account'] = self.get_account(key='name')
args['domainid'] = self.get_domain(key='id')
args['diskofferingid'] = disk_offering_id
args['displayvolume'] = self.module.params.get('display_volume')
args['maxiops'] = self.module.params.get('max_iops')
args['miniops'] = self.module.params.get('min_iops')
args['projectid'] = self.get_project(key='id')
args['size'] = self.module.params.get('size')
args['snapshotid'] = snapshot_id
args['zoneid'] = self.get_zone(key='id')
if not self.module.check_mode:
res = self.cs.createVolume(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if poll_async:
volume = self.poll_job(res, 'volume')
if volume:
volume = self.ensure_tags(resource=volume, resource_type='Volume')
self.volume=volume
return volume
def attached_volume(self):
volume = self.present_volume()
if volume:
if volume.get('virtualmachineid') != self.get_vm(key='id'):
self.result['changed'] = True
if not self.module.check_mode:
volume = self.detached_volume()
if 'attached' not in volume:
self.result['changed'] = True
args = {}
args['id'] = volume['id']
args['virtualmachineid'] = self.get_vm(key='id')
args['deviceid'] = self.module.params.get('device_id')
if not self.module.check_mode:
res = self.cs.attachVolume(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if poll_async:
volume = self.poll_job(res, 'volume')
return volume
def detached_volume(self):
volume = self.present_volume()
if volume:
if 'attached' not in volume:
return volume
self.result['changed'] = True
if not self.module.check_mode:
res = self.cs.detachVolume(id=volume['id'])
if 'errortext' in volume:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if poll_async:
volume = self.poll_job(res, 'volume')
return volume
def absent_volume(self):
volume = self.get_volume()
if volume:
if 'attached' in volume and not self.module.params.get('force'):
self.module.fail_json(msg="Volume '%s' is attached, use force=true for detaching and removing the volume." % volume.get('name'))
self.result['changed'] = True
if not self.module.check_mode:
volume = self.detached_volume()
res = self.cs.deleteVolume(id=volume['id'])
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if poll_async:
res = self.poll_job(res, 'volume')
return volume
def update_volume(self, volume):
args_resize = {}
args_resize['id'] = volume['id']
args_resize['diskofferingid'] = self.get_disk_offering(key='id')
args_resize['maxiops'] = self.module.params.get('max_iops')
args_resize['miniops'] = self.module.params.get('min_iops')
args_resize['size'] = self.module.params.get('size')
# change unit from bytes to giga bytes to compare with args
volume_copy = volume.copy()
volume_copy['size'] = volume_copy['size'] / (2**30)
if self.has_changed(args_resize, volume_copy):
self.result['changed'] = True
if not self.module.check_mode:
args_resize['shrinkok'] = self.module.params.get('shrink_ok')
res = self.cs.resizeVolume(**args_resize)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if poll_async:
volume = self.poll_job(res, 'volume')
self.volume = volume
return volume
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
disk_offering = dict(default=None),
display_volume = dict(type='bool', default=None),
max_iops = dict(type='int', default=None),
min_iops = dict(type='int', default=None),
size = dict(type='int', default=None),
snapshot = dict(default=None),
vm = dict(default=None),
device_id = dict(type='int', default=None),
custom_id = dict(default=None),
force = dict(type='bool', default=False),
shrink_ok = dict(type='bool', default=False),
state = dict(choices=['present', 'absent', 'attached', 'detached'], default='present'),
zone = dict(default=None),
domain = dict(default=None),
account = dict(default=None),
project = dict(default=None),
poll_async = dict(type='bool', default=True),
tags=dict(type='list', aliases=['tag'], default=None),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
mutually_exclusive = (
['snapshot', 'disk_offering'],
),
supports_check_mode=True
)
try:
acs_vol = AnsibleCloudStackVolume(module)
state = module.params.get('state')
if state in ['absent']:
volume = acs_vol.absent_volume()
elif state in ['attached']:
volume = acs_vol.attached_volume()
elif state in ['detached']:
volume = acs_vol.detached_volume()
else:
volume = acs_vol.present_volume()
result = acs_vol.get_result(volume)
except CloudStackException as e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
johankaito/fufuka | refs/heads/master | node_modules/kafka-node/node_modules/snappy/node_modules/pangyp/gyp/pylib/gyp/MSVSNew.py | 601 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""New implementation of Visual Studio project generation."""
import os
import random
import gyp.common
# hashlib is supplied as of Python 2.5 as the replacement interface for md5
# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
# preserving 2.4 compatibility.
try:
import hashlib
_new_md5 = hashlib.md5
except ImportError:
import md5
_new_md5 = md5.new
# Initialize random number generator
random.seed()
# GUIDs for project types
ENTRY_TYPE_GUIDS = {
'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
}
#------------------------------------------------------------------------------
# Helper functions
def MakeGuid(name, seed='msvs_new'):
"""Returns a GUID for the specified target name.
Args:
name: Target name.
seed: Seed for MD5 hash.
Returns:
A GUID-line string calculated from the name and seed.
This generates something which looks like a GUID, but depends only on the
name and seed. This means the same name/seed will always generate the same
GUID, so that projects and solutions which refer to each other can explicitly
determine the GUID to refer to explicitly. It also means that the GUID will
not change when the project for a target is rebuilt.
"""
# Calculate a MD5 signature for the seed and name.
d = _new_md5(str(seed) + str(name)).hexdigest().upper()
# Convert most of the signature to GUID form (discard the rest)
guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
+ '-' + d[20:32] + '}')
return guid
#------------------------------------------------------------------------------
class MSVSSolutionEntry(object):
def __cmp__(self, other):
# Sort by name then guid (so things are in order on vs2008).
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
class MSVSFolder(MSVSSolutionEntry):
"""Folder in a Visual Studio project or solution."""
def __init__(self, path, name = None, entries = None,
guid = None, items = None):
"""Initializes the folder.
Args:
path: Full path to the folder.
name: Name of the folder.
entries: List of folder entries to nest inside this folder. May contain
Folder or Project objects. May be None, if the folder is empty.
guid: GUID to use for folder, if not None.
items: List of solution items to include in the folder project. May be
None, if the folder does not directly contain items.
"""
if name:
self.name = name
else:
# Use last layer.
self.name = os.path.basename(path)
self.path = path
self.guid = guid
# Copy passed lists (or set to empty lists)
self.entries = sorted(list(entries or []))
self.items = list(items or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
def get_guid(self):
if self.guid is None:
# Use consistent guids for folders (so things don't regenerate).
self.guid = MakeGuid(self.path, seed='msvs_folder')
return self.guid
#------------------------------------------------------------------------------
class MSVSProject(MSVSSolutionEntry):
"""Visual Studio project."""
def __init__(self, path, name = None, dependencies = None, guid = None,
spec = None, build_file = None, config_platform_overrides = None,
fixpath_prefix = None):
"""Initializes the project.
Args:
path: Absolute path to the project file.
name: Name of project. If None, the name will be the same as the base
name of the project file.
dependencies: List of other Project objects this project is dependent
upon, if not None.
guid: GUID to use for project, if not None.
spec: Dictionary specifying how to build this project.
build_file: Filename of the .gyp file that the vcproj file comes from.
config_platform_overrides: optional dict of configuration platforms to
used in place of the default for this target.
fixpath_prefix: the path used to adjust the behavior of _fixpath
"""
self.path = path
self.guid = guid
self.spec = spec
self.build_file = build_file
# Use project filename if name not specified
self.name = name or os.path.splitext(os.path.basename(path))[0]
# Copy passed lists (or set to empty lists)
self.dependencies = list(dependencies or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
if config_platform_overrides:
self.config_platform_overrides = config_platform_overrides
else:
self.config_platform_overrides = {}
self.fixpath_prefix = fixpath_prefix
self.msbuild_toolset = None
def set_dependencies(self, dependencies):
self.dependencies = list(dependencies or [])
def get_guid(self):
if self.guid is None:
# Set GUID from path
# TODO(rspangler): This is fragile.
# 1. We can't just use the project filename sans path, since there could
# be multiple projects with the same base name (for example,
# foo/unittest.vcproj and bar/unittest.vcproj).
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
# GUID is the same whether it's included from base/base.sln or
# foo/bar/baz/baz.sln.
# 3. The GUID needs to be the same each time this builder is invoked, so
# that we don't need to rebuild the solution when the project changes.
# 4. We should be able to handle pre-built project files by reading the
# GUID from the files.
self.guid = MakeGuid(self.name)
return self.guid
def set_msbuild_toolset(self, msbuild_toolset):
self.msbuild_toolset = msbuild_toolset
#------------------------------------------------------------------------------
class MSVSSolution:
"""Visual Studio solution."""
def __init__(self, path, version, entries=None, variants=None,
websiteProperties=True):
"""Initializes the solution.
Args:
path: Path to solution file.
version: Format version to emit.
entries: List of entries in solution. May contain Folder or Project
objects. May be None, if the folder is empty.
variants: List of build variant strings. If none, a default list will
be used.
websiteProperties: Flag to decide if the website properties section
is generated.
"""
self.path = path
self.websiteProperties = websiteProperties
self.version = version
# Copy passed lists (or set to empty lists)
self.entries = list(entries or [])
if variants:
# Copy passed list
self.variants = variants[:]
else:
# Use default
self.variants = ['Debug|Win32', 'Release|Win32']
# TODO(rspangler): Need to be able to handle a mapping of solution config
# to project config. Should we be able to handle variants being a dict,
# or add a separate variant_map variable? If it's a dict, we can't
# guarantee the order of variants since dict keys aren't ordered.
# TODO(rspangler): Automatically write to disk for now; should delay until
# node-evaluation time.
self.Write()
def Write(self, writer=gyp.common.WriteOnDiff):
"""Writes the solution file to disk.
Raises:
IndexError: An entry appears multiple times.
"""
# Walk the entry tree and collect all the folders and projects.
all_entries = set()
entries_to_check = self.entries[:]
while entries_to_check:
e = entries_to_check.pop(0)
# If this entry has been visited, nothing to do.
if e in all_entries:
continue
all_entries.add(e)
# If this is a folder, check its entries too.
if isinstance(e, MSVSFolder):
entries_to_check += e.entries
all_entries = sorted(all_entries)
# Open file and print header
f = writer(self.path)
f.write('Microsoft Visual Studio Solution File, '
'Format Version %s\r\n' % self.version.SolutionVersion())
f.write('# %s\r\n' % self.version.Description())
# Project entries
sln_root = os.path.split(self.path)[0]
for e in all_entries:
relative_path = gyp.common.RelativePath(e.path, sln_root)
# msbuild does not accept an empty folder_name.
# use '.' in case relative_path is empty.
folder_name = relative_path.replace('/', '\\') or '.'
f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
e.entry_type_guid, # Entry type GUID
e.name, # Folder name
folder_name, # Folder name (again)
e.get_guid(), # Entry GUID
))
# TODO(rspangler): Need a way to configure this stuff
if self.websiteProperties:
f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
'\tEndProjectSection\r\n')
if isinstance(e, MSVSFolder):
if e.items:
f.write('\tProjectSection(SolutionItems) = preProject\r\n')
for i in e.items:
f.write('\t\t%s = %s\r\n' % (i, i))
f.write('\tEndProjectSection\r\n')
if isinstance(e, MSVSProject):
if e.dependencies:
f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
for d in e.dependencies:
f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
f.write('\tEndProjectSection\r\n')
f.write('EndProject\r\n')
# Global section
f.write('Global\r\n')
# Configurations (variants)
f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
for v in self.variants:
f.write('\t\t%s = %s\r\n' % (v, v))
f.write('\tEndGlobalSection\r\n')
# Sort config guids for easier diffing of solution changes.
config_guids = []
config_guids_overrides = {}
for e in all_entries:
if isinstance(e, MSVSProject):
config_guids.append(e.get_guid())
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
config_guids.sort()
f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
for g in config_guids:
for v in self.variants:
nv = config_guids_overrides[g].get(v, v)
# Pick which project configuration to build for this solution
# configuration.
f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
g, # Project GUID
v, # Solution build configuration
nv, # Project build config for that solution config
))
# Enable project in this solution configuration.
f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
g, # Project GUID
v, # Solution build configuration
nv, # Project build config for that solution config
))
f.write('\tEndGlobalSection\r\n')
# TODO(rspangler): Should be able to configure this stuff too (though I've
# never seen this be any different)
f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
f.write('\t\tHideSolutionNode = FALSE\r\n')
f.write('\tEndGlobalSection\r\n')
# Folder mappings
# Omit this section if there are no folders
if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
for e in all_entries:
if not isinstance(e, MSVSFolder):
continue # Does not apply to projects, only folders
for subentry in e.entries:
f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
f.write('\tEndGlobalSection\r\n')
f.write('EndGlobal\r\n')
f.close()
|
johnkeepmoving/oss-ftp | refs/heads/master | python27/win32/Lib/distutils/msvccompiler.py | 250 | """distutils.msvccompiler
Contains MSVCCompiler, an implementation of the abstract CCompiler class
for the Microsoft Visual Studio.
"""
# Written by Perry Stoll
# hacked by Robin Becker and Thomas Heller to do a better job of
# finding DevStudio (through the registry)
__revision__ = "$Id$"
import sys
import os
import string
from distutils.errors import (DistutilsExecError, DistutilsPlatformError,
CompileError, LibError, LinkError)
from distutils.ccompiler import CCompiler, gen_lib_options
from distutils import log
_can_read_reg = 0
try:
import _winreg
_can_read_reg = 1
hkey_mod = _winreg
RegOpenKeyEx = _winreg.OpenKeyEx
RegEnumKey = _winreg.EnumKey
RegEnumValue = _winreg.EnumValue
RegError = _winreg.error
except ImportError:
try:
import win32api
import win32con
_can_read_reg = 1
hkey_mod = win32con
RegOpenKeyEx = win32api.RegOpenKeyEx
RegEnumKey = win32api.RegEnumKey
RegEnumValue = win32api.RegEnumValue
RegError = win32api.error
except ImportError:
log.info("Warning: Can't read registry to find the "
"necessary compiler setting\n"
"Make sure that Python modules _winreg, "
"win32api or win32con are installed.")
pass
if _can_read_reg:
HKEYS = (hkey_mod.HKEY_USERS,
hkey_mod.HKEY_CURRENT_USER,
hkey_mod.HKEY_LOCAL_MACHINE,
hkey_mod.HKEY_CLASSES_ROOT)
def read_keys(base, key):
"""Return list of registry keys."""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
L = []
i = 0
while 1:
try:
k = RegEnumKey(handle, i)
except RegError:
break
L.append(k)
i = i + 1
return L
def read_values(base, key):
"""Return dict of registry keys and values.
All names are converted to lowercase.
"""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
d = {}
i = 0
while 1:
try:
name, value, type = RegEnumValue(handle, i)
except RegError:
break
name = name.lower()
d[convert_mbcs(name)] = convert_mbcs(value)
i = i + 1
return d
def convert_mbcs(s):
enc = getattr(s, "encode", None)
if enc is not None:
try:
s = enc("mbcs")
except UnicodeError:
pass
return s
class MacroExpander:
def __init__(self, version):
self.macros = {}
self.load_macros(version)
def set_macro(self, macro, path, key):
for base in HKEYS:
d = read_values(base, path)
if d:
self.macros["$(%s)" % macro] = d[key]
break
def load_macros(self, version):
vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
net = r"Software\Microsoft\.NETFramework"
self.set_macro("FrameworkDir", net, "installroot")
try:
if version > 7.0:
self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
else:
self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
except KeyError:
raise DistutilsPlatformError, \
("""Python was built with Visual Studio 2003;
extensions must be built with a compiler than can generate compatible binaries.
Visual Studio 2003 was not found on this system. If you have Cygwin installed,
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
p = r"Software\Microsoft\NET Framework Setup\Product"
for base in HKEYS:
try:
h = RegOpenKeyEx(base, p)
except RegError:
continue
key = RegEnumKey(h, 0)
d = read_values(base, r"%s\%s" % (p, key))
self.macros["$(FrameworkVersion)"] = d["version"]
def sub(self, s):
for k, v in self.macros.items():
s = string.replace(s, k, v)
return s
def get_build_version():
"""Return the version of MSVC that was used to build Python.
For Python 2.3 and up, the version number is included in
sys.version. For earlier versions, assume the compiler is MSVC 6.
"""
prefix = "MSC v."
i = string.find(sys.version, prefix)
if i == -1:
return 6
i = i + len(prefix)
s, rest = sys.version[i:].split(" ", 1)
majorVersion = int(s[:-2]) - 6
minorVersion = int(s[2:3]) / 10.0
# I don't think paths are affected by minor version in version 6
if majorVersion == 6:
minorVersion = 0
if majorVersion >= 6:
return majorVersion + minorVersion
# else we don't know what version of the compiler this is
return None
def get_build_architecture():
"""Return the processor architecture.
Possible results are "Intel", "Itanium", or "AMD64".
"""
prefix = " bit ("
i = string.find(sys.version, prefix)
if i == -1:
return "Intel"
j = string.find(sys.version, ")", i)
return sys.version[i+len(prefix):j]
def normalize_and_reduce_paths(paths):
"""Return a list of normalized paths with duplicates removed.
The current order of paths is maintained.
"""
# Paths are normalized so things like: /a and /a/ aren't both preserved.
reduced_paths = []
for p in paths:
np = os.path.normpath(p)
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
if np not in reduced_paths:
reduced_paths.append(np)
return reduced_paths
class MSVCCompiler (CCompiler) :
"""Concrete class that implements an interface to Microsoft Visual C++,
as defined by the CCompiler abstract class."""
compiler_type = 'msvc'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
_rc_extensions = ['.rc']
_mc_extensions = ['.mc']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = (_c_extensions + _cpp_extensions +
_rc_extensions + _mc_extensions)
res_extension = '.res'
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__ (self, verbose=0, dry_run=0, force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
self.__version = get_build_version()
self.__arch = get_build_architecture()
if self.__arch == "Intel":
# x86
if self.__version >= 7:
self.__root = r"Software\Microsoft\VisualStudio"
self.__macros = MacroExpander(self.__version)
else:
self.__root = r"Software\Microsoft\Devstudio"
self.__product = "Visual Studio version %s" % self.__version
else:
# Win64. Assume this was built with the platform SDK
self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
self.initialized = False
def initialize(self):
self.__paths = []
if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
# Assume that the SDK set up everything alright; don't try to be
# smarter
self.cc = "cl.exe"
self.linker = "link.exe"
self.lib = "lib.exe"
self.rc = "rc.exe"
self.mc = "mc.exe"
else:
self.__paths = self.get_msvc_paths("path")
if len (self.__paths) == 0:
raise DistutilsPlatformError, \
("Python was built with %s, "
"and extensions need to be built with the same "
"version of the compiler, but it isn't installed." % self.__product)
self.cc = self.find_exe("cl.exe")
self.linker = self.find_exe("link.exe")
self.lib = self.find_exe("lib.exe")
self.rc = self.find_exe("rc.exe") # resource compiler
self.mc = self.find_exe("mc.exe") # message compiler
self.set_path_env_var('lib')
self.set_path_env_var('include')
# extend the MSVC path with the current path
try:
for p in string.split(os.environ['path'], ';'):
self.__paths.append(p)
except KeyError:
pass
self.__paths = normalize_and_reduce_paths(self.__paths)
os.environ['path'] = string.join(self.__paths, ';')
self.preprocess_options = None
if self.__arch == "Intel":
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' ,
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
'/Z7', '/D_DEBUG']
else:
# Win64
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
'/Z7', '/D_DEBUG']
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
if self.__version >= 7:
self.ldflags_shared_debug = [
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
]
else:
self.ldflags_shared_debug = [
'/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
]
self.ldflags_static = [ '/nologo']
self.initialized = True
# -- Worker methods ------------------------------------------------
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
# Copied from ccompiler.py, extended to return .res as 'object'-file
# for .rc input file
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
(base, ext) = os.path.splitext (src_name)
base = os.path.splitdrive(base)[1] # Chop off the drive
base = base[os.path.isabs(base):] # If abs, chop off leading /
if ext not in self.src_extensions:
# Better to raise an exception instead of silently continuing
# and later complain about sources and targets having
# different lengths
raise CompileError ("Don't know how to compile %s" % src_name)
if strip_dir:
base = os.path.basename (base)
if ext in self._rc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
elif ext in self._mc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
if not self.initialized: self.initialize()
macros, objects, extra_postargs, pp_opts, build = \
self._setup_compile(output_dir, macros, include_dirs, sources,
depends, extra_postargs)
compile_opts = extra_preargs or []
compile_opts.append ('/c')
if debug:
compile_opts.extend(self.compile_options_debug)
else:
compile_opts.extend(self.compile_options)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
if debug:
# pass the full pathname to MSVC in debug mode,
# this allows the debugger to find the source file
# without asking the user to browse for it
src = os.path.abspath(src)
if ext in self._c_extensions:
input_opt = "/Tc" + src
elif ext in self._cpp_extensions:
input_opt = "/Tp" + src
elif ext in self._rc_extensions:
# compile .RC to .RES file
input_opt = src
output_opt = "/fo" + obj
try:
self.spawn ([self.rc] + pp_opts +
[output_opt] + [input_opt])
except DistutilsExecError, msg:
raise CompileError, msg
continue
elif ext in self._mc_extensions:
# Compile .MC to .RC file to .RES file.
# * '-h dir' specifies the directory for the
# generated include file
# * '-r dir' specifies the target directory of the
# generated RC file and the binary message resource
# it includes
#
# For now (since there are no options to change this),
# we use the source-directory for the include file and
# the build directory for the RC file and message
# resources. This works at least for win32all.
h_dir = os.path.dirname (src)
rc_dir = os.path.dirname (obj)
try:
# first compile .MC to .RC and .H file
self.spawn ([self.mc] +
['-h', h_dir, '-r', rc_dir] + [src])
base, _ = os.path.splitext (os.path.basename (src))
rc_file = os.path.join (rc_dir, base + '.rc')
# then compile .RC to .RES file
self.spawn ([self.rc] +
["/fo" + obj] + [rc_file])
except DistutilsExecError, msg:
raise CompileError, msg
continue
else:
# how to handle this file?
raise CompileError (
"Don't know how to compile %s to %s" % \
(src, obj))
output_opt = "/Fo" + obj
try:
self.spawn ([self.cc] + compile_opts + pp_opts +
[input_opt, output_opt] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError, msg
return objects
# compile ()
def create_static_lib (self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
if not self.initialized: self.initialize()
(objects, output_dir) = self._fix_object_args (objects, output_dir)
output_filename = \
self.library_filename (output_libname, output_dir=output_dir)
if self._need_link (objects, output_filename):
lib_args = objects + ['/OUT:' + output_filename]
if debug:
pass # XXX what goes here?
try:
self.spawn ([self.lib] + lib_args)
except DistutilsExecError, msg:
raise LibError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# create_static_lib ()
def link (self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
if not self.initialized: self.initialize()
(objects, output_dir) = self._fix_object_args (objects, output_dir)
(libraries, library_dirs, runtime_library_dirs) = \
self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
if runtime_library_dirs:
self.warn ("I don't know what to do with 'runtime_library_dirs': "
+ str (runtime_library_dirs))
lib_opts = gen_lib_options (self,
library_dirs, runtime_library_dirs,
libraries)
if output_dir is not None:
output_filename = os.path.join (output_dir, output_filename)
if self._need_link (objects, output_filename):
if target_desc == CCompiler.EXECUTABLE:
if debug:
ldflags = self.ldflags_shared_debug[1:]
else:
ldflags = self.ldflags_shared[1:]
else:
if debug:
ldflags = self.ldflags_shared_debug
else:
ldflags = self.ldflags_shared
export_opts = []
for sym in (export_symbols or []):
export_opts.append("/EXPORT:" + sym)
ld_args = (ldflags + lib_opts + export_opts +
objects + ['/OUT:' + output_filename])
# The MSVC linker generates .lib and .exp files, which cannot be
# suppressed by any linker switches. The .lib files may even be
# needed! Make sure they are generated in the temporary build
# directory. Since they have different names for debug and release
# builds, they can go into the same directory.
if export_symbols is not None:
(dll_name, dll_ext) = os.path.splitext(
os.path.basename(output_filename))
implib_file = os.path.join(
os.path.dirname(objects[0]),
self.library_filename(dll_name))
ld_args.append ('/IMPLIB:' + implib_file)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath (os.path.dirname (output_filename))
try:
self.spawn ([self.linker] + ld_args)
except DistutilsExecError, msg:
raise LinkError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# link ()
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option (self, dir):
return "/LIBPATH:" + dir
def runtime_library_dir_option (self, dir):
raise DistutilsPlatformError, \
"don't know how to set runtime library search path for MSVC++"
def library_option (self, lib):
return self.library_filename (lib)
def find_library_file (self, dirs, lib, debug=0):
# Prefer a debugging library if found (and requested), but deal
# with it if we don't have one.
if debug:
try_names = [lib + "_d", lib]
else:
try_names = [lib]
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename (name))
if os.path.exists(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
# find_library_file ()
# Helper methods for using the MSVC registry settings
def find_exe(self, exe):
"""Return path to an MSVC executable program.
Tries to find the program in several places: first, one of the
MSVC program search paths from the registry; next, the directories
in the PATH environment variable. If any of those work, return an
absolute path that is known to exist. If none of them work, just
return the original program name, 'exe'.
"""
for p in self.__paths:
fn = os.path.join(os.path.abspath(p), exe)
if os.path.isfile(fn):
return fn
# didn't find it; try existing path
for p in string.split(os.environ['Path'],';'):
fn = os.path.join(os.path.abspath(p),exe)
if os.path.isfile(fn):
return fn
return exe
def get_msvc_paths(self, path, platform='x86'):
"""Get a list of devstudio directories (include, lib or path).
Return a list of strings. The list will be empty if unable to
access the registry or appropriate registry keys not found.
"""
if not _can_read_reg:
return []
path = path + " dirs"
if self.__version >= 7:
key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
% (self.__root, self.__version))
else:
key = (r"%s\6.0\Build System\Components\Platforms"
r"\Win32 (%s)\Directories" % (self.__root, platform))
for base in HKEYS:
d = read_values(base, key)
if d:
if self.__version >= 7:
return string.split(self.__macros.sub(d[path]), ";")
else:
return string.split(d[path], ";")
# MSVC 6 seems to create the registry entries we need only when
# the GUI is run.
if self.__version == 6:
for base in HKEYS:
if read_values(base, r"%s\6.0" % self.__root) is not None:
self.warn("It seems you have Visual Studio 6 installed, "
"but the expected registry settings are not present.\n"
"You must at least run the Visual Studio GUI once "
"so that these entries are created.")
break
return []
def set_path_env_var(self, name):
"""Set environment variable 'name' to an MSVC path type value.
This is equivalent to a SET command prior to execution of spawned
commands.
"""
if name == "lib":
p = self.get_msvc_paths("library")
else:
p = self.get_msvc_paths(name)
if p:
os.environ[name] = string.join(p, ';')
if get_build_version() >= 8.0:
log.debug("Importing new compiler from distutils.msvc9compiler")
OldMSVCCompiler = MSVCCompiler
from distutils.msvc9compiler import MSVCCompiler
# get_build_architecture not really relevant now we support cross-compile
from distutils.msvc9compiler import MacroExpander
|
0-wiz-0/audacity | refs/heads/master | lib-src/lv2/sratom/waflib/extras/autowaf.py | 176 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import glob
import os
import subprocess
import sys
from waflib import Configure,Context,Logs,Node,Options,Task,Utils
from waflib.TaskGen import feature,before,after
global g_is_child
g_is_child=False
global g_step
g_step=0
@feature('c','cxx')
@after('apply_incpaths')
def include_config_h(self):
self.env.append_value('INCPATHS',self.bld.bldnode.abspath())
def set_options(opt,debug_by_default=False):
global g_step
if g_step>0:
return
dirs_options=opt.add_option_group('Installation directories','')
for k in('--prefix','--destdir'):
option=opt.parser.get_option(k)
if option:
opt.parser.remove_option(k)
dirs_options.add_option(option)
dirs_options.add_option('--bindir',type='string',help="Executable programs [Default: PREFIX/bin]")
dirs_options.add_option('--configdir',type='string',help="Configuration data [Default: PREFIX/etc]")
dirs_options.add_option('--datadir',type='string',help="Shared data [Default: PREFIX/share]")
dirs_options.add_option('--includedir',type='string',help="Header files [Default: PREFIX/include]")
dirs_options.add_option('--libdir',type='string',help="Libraries [Default: PREFIX/lib]")
dirs_options.add_option('--mandir',type='string',help="Manual pages [Default: DATADIR/man]")
dirs_options.add_option('--docdir',type='string',help="HTML documentation [Default: DATADIR/doc]")
if debug_by_default:
opt.add_option('--optimize',action='store_false',default=True,dest='debug',help="Build optimized binaries")
else:
opt.add_option('--debug',action='store_true',default=False,dest='debug',help="Build debuggable binaries")
opt.add_option('--pardebug',action='store_true',default=False,dest='pardebug',help="Build parallel-installable debuggable libraries with D suffix")
opt.add_option('--grind',action='store_true',default=False,dest='grind',help="Run tests in valgrind")
opt.add_option('--strict',action='store_true',default=False,dest='strict',help="Use strict compiler flags and show all warnings")
opt.add_option('--ultra-strict',action='store_true',default=False,dest='ultra_strict',help="Use even stricter compiler flags (likely to trigger many warnings in library headers)")
opt.add_option('--docs',action='store_true',default=False,dest='docs',help="Build documentation - requires doxygen")
opt.add_option('--lv2-user',action='store_true',default=False,dest='lv2_user',help="Install LV2 bundles to user location")
opt.add_option('--lv2-system',action='store_true',default=False,dest='lv2_system',help="Install LV2 bundles to system location")
dirs_options.add_option('--lv2dir',type='string',help="LV2 bundles [Default: LIBDIR/lv2]")
g_step=1
def check_header(conf,lang,name,define='',mandatory=True):
includes=''
if sys.platform=="darwin":
includes='/opt/local/include'
if lang=='c':
check_func=conf.check_cc
elif lang=='cxx':
check_func=conf.check_cxx
else:
Logs.error("Unknown header language `%s'"%lang)
return
if define!='':
check_func(header_name=name,includes=includes,define_name=define,mandatory=mandatory)
else:
check_func(header_name=name,includes=includes,mandatory=mandatory)
def nameify(name):
return name.replace('/','_').replace('++','PP').replace('-','_').replace('.','_')
def define(conf,var_name,value):
conf.define(var_name,value)
conf.env[var_name]=value
def check_pkg(conf,name,**args):
if args['uselib_store'].lower()in conf.env['AUTOWAF_LOCAL_LIBS']:
return
class CheckType:
OPTIONAL=1
MANDATORY=2
var_name='CHECKED_'+nameify(args['uselib_store'])
check=not var_name in conf.env
mandatory=not'mandatory'in args or args['mandatory']
if not check and'atleast_version'in args:
checked_version=conf.env['VERSION_'+name]
if checked_version and checked_version<args['atleast_version']:
check=True;
if not check and mandatory and conf.env[var_name]==CheckType.OPTIONAL:
check=True;
if check:
found=None
pkg_var_name='PKG_'+name.replace('-','_')
pkg_name=name
if conf.env.PARDEBUG:
args['mandatory']=False
found=conf.check_cfg(package=pkg_name+'D',args="--cflags --libs",**args)
if found:
pkg_name+='D'
if mandatory:
args['mandatory']=True
if not found:
found=conf.check_cfg(package=pkg_name,args="--cflags --libs",**args)
if found:
conf.env[pkg_var_name]=pkg_name
if'atleast_version'in args:
conf.env['VERSION_'+name]=args['atleast_version']
if mandatory:
conf.env[var_name]=CheckType.MANDATORY
else:
conf.env[var_name]=CheckType.OPTIONAL
def normpath(path):
if sys.platform=='win32':
return os.path.normpath(path).replace('\\','/')
else:
return os.path.normpath(path)
def configure(conf):
global g_step
if g_step>1:
return
def append_cxx_flags(flags):
conf.env.append_value('CFLAGS',flags)
conf.env.append_value('CXXFLAGS',flags)
print('')
display_header('Global Configuration')
if Options.options.docs:
conf.load('doxygen')
conf.env['DOCS']=Options.options.docs
conf.env['DEBUG']=Options.options.debug or Options.options.pardebug
conf.env['PARDEBUG']=Options.options.pardebug
conf.env['PREFIX']=normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX'])))
def config_dir(var,opt,default):
if opt:
conf.env[var]=normpath(opt)
else:
conf.env[var]=normpath(default)
opts=Options.options
prefix=conf.env['PREFIX']
config_dir('BINDIR',opts.bindir,os.path.join(prefix,'bin'))
config_dir('SYSCONFDIR',opts.configdir,os.path.join(prefix,'etc'))
config_dir('DATADIR',opts.datadir,os.path.join(prefix,'share'))
config_dir('INCLUDEDIR',opts.includedir,os.path.join(prefix,'include'))
config_dir('LIBDIR',opts.libdir,os.path.join(prefix,'lib'))
config_dir('MANDIR',opts.mandir,os.path.join(conf.env['DATADIR'],'man'))
config_dir('DOCDIR',opts.docdir,os.path.join(conf.env['DATADIR'],'doc'))
if Options.options.lv2dir:
conf.env['LV2DIR']=Options.options.lv2dir
elif Options.options.lv2_user:
if sys.platform=="darwin":
conf.env['LV2DIR']=os.path.join(os.getenv('HOME'),'Library/Audio/Plug-Ins/LV2')
elif sys.platform=="win32":
conf.env['LV2DIR']=os.path.join(os.getenv('APPDATA'),'LV2')
else:
conf.env['LV2DIR']=os.path.join(os.getenv('HOME'),'.lv2')
elif Options.options.lv2_system:
if sys.platform=="darwin":
conf.env['LV2DIR']='/Library/Audio/Plug-Ins/LV2'
elif sys.platform=="win32":
conf.env['LV2DIR']=os.path.join(os.getenv('COMMONPROGRAMFILES'),'LV2')
else:
conf.env['LV2DIR']=os.path.join(conf.env['LIBDIR'],'lv2')
else:
conf.env['LV2DIR']=os.path.join(conf.env['LIBDIR'],'lv2')
conf.env['LV2DIR']=normpath(conf.env['LV2DIR'])
if Options.options.docs:
doxygen=conf.find_program('doxygen')
if not doxygen:
conf.fatal("Doxygen is required to build with --docs")
dot=conf.find_program('dot')
if not dot:
conf.fatal("Graphviz (dot) is required to build with --docs")
if Options.options.debug:
if conf.env['MSVC_COMPILER']:
conf.env['CFLAGS']=['/Od','/Zi','/MTd']
conf.env['CXXFLAGS']=['/Od','/Zi','/MTd']
conf.env['LINKFLAGS']=['/DEBUG']
else:
conf.env['CFLAGS']=['-O0','-g']
conf.env['CXXFLAGS']=['-O0','-g']
else:
if conf.env['MSVC_COMPILER']:
conf.env['CFLAGS']=['/MD']
conf.env['CXXFLAGS']=['/MD']
append_cxx_flags(['-DNDEBUG'])
if Options.options.ultra_strict:
Options.options.strict=True
conf.env.append_value('CFLAGS',['-Wredundant-decls','-Wstrict-prototypes','-Wmissing-prototypes','-Wcast-qual'])
conf.env.append_value('CXXFLAGS',['-Wcast-qual'])
if Options.options.strict:
conf.env.append_value('CFLAGS',['-pedantic','-Wshadow'])
conf.env.append_value('CXXFLAGS',['-ansi','-Wnon-virtual-dtor','-Woverloaded-virtual'])
append_cxx_flags(['-Wall','-Wcast-align','-Wextra','-Wmissing-declarations','-Wno-unused-parameter','-Wstrict-overflow','-Wundef','-Wwrite-strings','-fstrict-overflow'])
if not conf.check_cc(fragment='''
#ifndef __clang__
#error
#endif
int main() { return 0; }''',features='c',mandatory=False,execute=False,msg='Checking for clang'):
append_cxx_flags(['-Wlogical-op','-Wsuggest-attribute=noreturn','-Wunsafe-loop-optimizations'])
if not conf.env['MSVC_COMPILER']:
append_cxx_flags(['-fshow-column'])
conf.env.prepend_value('CFLAGS','-I'+os.path.abspath('.'))
conf.env.prepend_value('CXXFLAGS','-I'+os.path.abspath('.'))
display_msg(conf,"Install prefix",conf.env['PREFIX'])
display_msg(conf,"Debuggable build",str(conf.env['DEBUG']))
display_msg(conf,"Build documentation",str(conf.env['DOCS']))
print('')
g_step=2
def set_c99_mode(conf):
if conf.env.MSVC_COMPILER:
conf.env.append_unique('CFLAGS',['-TP'])
else:
conf.env.append_unique('CFLAGS',['-std=c99'])
def set_local_lib(conf,name,has_objects):
var_name='HAVE_'+nameify(name.upper())
define(conf,var_name,1)
if has_objects:
if type(conf.env['AUTOWAF_LOCAL_LIBS'])!=dict:
conf.env['AUTOWAF_LOCAL_LIBS']={}
conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()]=True
else:
if type(conf.env['AUTOWAF_LOCAL_HEADERS'])!=dict:
conf.env['AUTOWAF_LOCAL_HEADERS']={}
conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()]=True
def append_property(obj,key,val):
if hasattr(obj,key):
setattr(obj,key,getattr(obj,key)+val)
else:
setattr(obj,key,val)
def use_lib(bld,obj,libs):
abssrcdir=os.path.abspath('.')
libs_list=libs.split()
for l in libs_list:
in_headers=l.lower()in bld.env['AUTOWAF_LOCAL_HEADERS']
in_libs=l.lower()in bld.env['AUTOWAF_LOCAL_LIBS']
if in_libs:
append_property(obj,'use',' lib%s '%l.lower())
append_property(obj,'framework',bld.env['FRAMEWORK_'+l])
if in_headers or in_libs:
inc_flag='-iquote '+os.path.join(abssrcdir,l.lower())
for f in['CFLAGS','CXXFLAGS']:
if not inc_flag in bld.env[f]:
bld.env.prepend_value(f,inc_flag)
else:
append_property(obj,'uselib',' '+l)
@feature('c','cxx')
@before('apply_link')
def version_lib(self):
if sys.platform=='win32':
self.vnum=None
if self.env['PARDEBUG']:
applicable=['cshlib','cxxshlib','cstlib','cxxstlib']
if[x for x in applicable if x in self.features]:
self.target=self.target+'D'
def set_lib_env(conf,name,version):
'Set up environment for local library as if found via pkg-config.'
NAME=name.upper()
major_ver=version.split('.')[0]
pkg_var_name='PKG_'+name.replace('-','_')+'_'+major_ver
lib_name='%s-%s'%(name,major_ver)
if conf.env.PARDEBUG:
lib_name+='D'
conf.env[pkg_var_name]=lib_name
conf.env['INCLUDES_'+NAME]=['${INCLUDEDIR}/%s-%s'%(name,major_ver)]
conf.env['LIBPATH_'+NAME]=[conf.env.LIBDIR]
conf.env['LIB_'+NAME]=[lib_name]
def display_header(title):
Logs.pprint('BOLD',title)
def display_msg(conf,msg,status=None,color=None):
color='CYAN'
if type(status)==bool and status or status=="True":
color='GREEN'
elif type(status)==bool and not status or status=="False":
color='YELLOW'
Logs.pprint('BOLD'," *",sep='')
Logs.pprint('NORMAL',"%s"%msg.ljust(conf.line_just-3),sep='')
Logs.pprint('BOLD',":",sep='')
Logs.pprint(color,status)
def link_flags(env,lib):
return' '.join(map(lambda x:env['LIB_ST']%x,env['LIB_'+lib]))
def compile_flags(env,lib):
return' '.join(map(lambda x:env['CPPPATH_ST']%x,env['INCLUDES_'+lib]))
def set_recursive():
global g_is_child
g_is_child=True
def is_child():
global g_is_child
return g_is_child
def build_pc(bld,name,version,version_suffix,libs,subst_dict={}):
'''Build a pkg-config file for a library.
name -- uppercase variable name (e.g. 'SOMENAME')
version -- version string (e.g. '1.2.3')
version_suffix -- name version suffix (e.g. '2')
libs -- string/list of dependencies (e.g. 'LIBFOO GLIB')
'''
pkg_prefix=bld.env['PREFIX']
if pkg_prefix[-1]=='/':
pkg_prefix=pkg_prefix[:-1]
target=name.lower()
if version_suffix!='':
target+='-'+version_suffix
if bld.env['PARDEBUG']:
target+='D'
target+='.pc'
libdir=bld.env['LIBDIR']
if libdir.startswith(pkg_prefix):
libdir=libdir.replace(pkg_prefix,'${exec_prefix}')
includedir=bld.env['INCLUDEDIR']
if includedir.startswith(pkg_prefix):
includedir=includedir.replace(pkg_prefix,'${prefix}')
obj=bld(features='subst',source='%s.pc.in'%name.lower(),target=target,install_path=os.path.join(bld.env['LIBDIR'],'pkgconfig'),exec_prefix='${prefix}',PREFIX=pkg_prefix,EXEC_PREFIX='${prefix}',LIBDIR=libdir,INCLUDEDIR=includedir)
if type(libs)!=list:
libs=libs.split()
subst_dict[name+'_VERSION']=version
subst_dict[name+'_MAJOR_VERSION']=version[0:version.find('.')]
for i in libs:
subst_dict[i+'_LIBS']=link_flags(bld.env,i)
lib_cflags=compile_flags(bld.env,i)
if lib_cflags=='':
lib_cflags=' '
subst_dict[i+'_CFLAGS']=lib_cflags
obj.__dict__.update(subst_dict)
def build_dir(name,subdir):
if is_child():
return os.path.join('build',name,subdir)
else:
return os.path.join('build',subdir)
def make_simple_dox(name):
name=name.lower()
NAME=name.upper()
try:
top=os.getcwd()
os.chdir(build_dir(name,'doc/html'))
page='group__%s.html'%name
if not os.path.exists(page):
return
for i in[['%s_API '%NAME,''],['%s_DEPRECATED '%NAME,''],['group__%s.html'%name,''],[' ',''],['<script.*><\/script>',''],['<hr\/><a name="details" id="details"><\/a><h2>.*<\/h2>',''],['<link href=\"tabs.css\" rel=\"stylesheet\" type=\"text\/css\"\/>',''],['<img class=\"footer\" src=\"doxygen.png\" alt=\"doxygen\"\/>','Doxygen']]:
os.system("sed -i 's/%s/%s/g' %s"%(i[0],i[1],page))
os.rename('group__%s.html'%name,'index.html')
for i in(glob.glob('*.png')+glob.glob('*.html')+glob.glob('*.js')+glob.glob('*.css')):
if i!='index.html'and i!='style.css':
os.remove(i)
os.chdir(top)
os.chdir(build_dir(name,'doc/man/man3'))
for i in glob.glob('*.3'):
os.system("sed -i 's/%s_API //' %s"%(NAME,i))
for i in glob.glob('_*'):
os.remove(i)
os.chdir(top)
except Exception ,e:
Logs.error("Failed to fix up %s documentation: %s"%(name,e))
def build_dox(bld,name,version,srcdir,blddir,outdir='',versioned=True):
if not bld.env['DOCS']:
return
if is_child():
src_dir=os.path.join(srcdir,name.lower())
doc_dir=os.path.join(blddir,name.lower(),'doc')
else:
src_dir=srcdir
doc_dir=os.path.join(blddir,'doc')
subst_tg=bld(features='subst',source='doc/reference.doxygen.in',target='doc/reference.doxygen',install_path='',name='doxyfile')
subst_dict={name+'_VERSION':version,name+'_SRCDIR':os.path.abspath(src_dir),name+'_DOC_DIR':os.path.abspath(doc_dir)}
subst_tg.__dict__.update(subst_dict)
subst_tg.post()
docs=bld(features='doxygen',doxyfile='doc/reference.doxygen')
docs.post()
outname=name.lower()
if versioned:
outname+='-%d'%int(version[0:version.find('.')])
bld.install_files(os.path.join('${DOCDIR}',outname,outdir,'html'),bld.path.get_bld().ant_glob('doc/html/*'))
for i in range(1,8):
bld.install_files('${MANDIR}/man%d'%i,bld.path.get_bld().ant_glob('doc/man/man%d/*'%i,excl='**/_*'))
def build_version_files(header_path,source_path,domain,major,minor,micro):
header_path=os.path.abspath(header_path)
source_path=os.path.abspath(source_path)
text="int "+domain+"_major_version = "+str(major)+";\n"
text+="int "+domain+"_minor_version = "+str(minor)+";\n"
text+="int "+domain+"_micro_version = "+str(micro)+";\n"
try:
o=open(source_path,'w')
o.write(text)
o.close()
except IOError:
Logs.error('Failed to open %s for writing\n'%source_path)
sys.exit(-1)
text="#ifndef __"+domain+"_version_h__\n"
text+="#define __"+domain+"_version_h__\n"
text+="extern const char* "+domain+"_revision;\n"
text+="extern int "+domain+"_major_version;\n"
text+="extern int "+domain+"_minor_version;\n"
text+="extern int "+domain+"_micro_version;\n"
text+="#endif /* __"+domain+"_version_h__ */\n"
try:
o=open(header_path,'w')
o.write(text)
o.close()
except IOError:
Logs.warn('Failed to open %s for writing\n'%header_path)
sys.exit(-1)
return None
def build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder=None):
Logs.info('Generating pot file from %s'%name)
pot_file='%s.pot'%name
cmd=['xgettext','--keyword=_','--keyword=N_','--keyword=S_','--from-code=UTF-8','-o',pot_file]
if copyright_holder:
cmd+=['--copyright-holder="%s"'%copyright_holder]
cmd+=sources
Logs.info('Updating '+pot_file)
subprocess.call(cmd,cwd=os.path.join(srcdir,dir))
def build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder=None):
pwd=os.getcwd()
os.chdir(os.path.join(srcdir,dir))
pot_file='%s.pot'%name
po_files=glob.glob('po/*.po')
for po_file in po_files:
cmd=['msgmerge','--update',po_file,pot_file]
Logs.info('Updating '+po_file)
subprocess.call(cmd)
os.chdir(pwd)
def build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder=None):
pwd=os.getcwd()
os.chdir(os.path.join(srcdir,dir))
pot_file='%s.pot'%name
po_files=glob.glob('po/*.po')
for po_file in po_files:
mo_file=po_file.replace('.po','.mo')
cmd=['msgfmt','-c','-f','-o',mo_file,po_file]
Logs.info('Generating '+po_file)
subprocess.call(cmd)
os.chdir(pwd)
def build_i18n(bld,srcdir,dir,name,sources,copyright_holder=None):
build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder)
build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder)
build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder)
def cd_to_build_dir(ctx,appname):
orig_dir=os.path.abspath(os.curdir)
top_level=(len(ctx.stack_path)>1)
if top_level:
os.chdir(os.path.join('build',appname))
else:
os.chdir('build')
Logs.pprint('GREEN',"Waf: Entering directory `%s'"%os.path.abspath(os.getcwd()))
def cd_to_orig_dir(ctx,child):
if child:
os.chdir(os.path.join('..','..'))
else:
os.chdir('..')
def pre_test(ctx,appname,dirs=['src']):
diropts=''
for i in dirs:
diropts+=' -d '+i
cd_to_build_dir(ctx,appname)
clear_log=open('lcov-clear.log','w')
try:
try:
subprocess.call(('lcov %s -z'%diropts).split(),stdout=clear_log,stderr=clear_log)
except:
Logs.warn('Failed to run lcov, no coverage report will be generated')
finally:
clear_log.close()
def post_test(ctx,appname,dirs=['src'],remove=['*boost*','c++*']):
diropts=''
for i in dirs:
diropts+=' -d '+i
coverage_log=open('lcov-coverage.log','w')
coverage_lcov=open('coverage.lcov','w')
coverage_stripped_lcov=open('coverage-stripped.lcov','w')
try:
try:
base='.'
if g_is_child:
base='..'
subprocess.call(('lcov -c %s -b %s'%(diropts,base)).split(),stdout=coverage_lcov,stderr=coverage_log)
subprocess.call(['lcov','--remove','coverage.lcov']+remove,stdout=coverage_stripped_lcov,stderr=coverage_log)
if not os.path.isdir('coverage'):
os.makedirs('coverage')
subprocess.call('genhtml -o coverage coverage-stripped.lcov'.split(),stdout=coverage_log,stderr=coverage_log)
except:
Logs.warn('Failed to run lcov, no coverage report will be generated')
finally:
coverage_stripped_lcov.close()
coverage_lcov.close()
coverage_log.close()
print('')
Logs.pprint('GREEN',"Waf: Leaving directory `%s'"%os.path.abspath(os.getcwd()))
top_level=(len(ctx.stack_path)>1)
if top_level:
cd_to_orig_dir(ctx,top_level)
print('')
Logs.pprint('BOLD','Coverage:',sep='')
print('<file://%s>\n\n'%os.path.abspath('coverage/index.html'))
def run_test(ctx,appname,test,desired_status=0,dirs=['src'],name='',header=False):
s=test
if type(test)==type([]):
s=' '.join(i)
if header:
Logs.pprint('BOLD','** Test',sep='')
Logs.pprint('NORMAL','%s'%s)
cmd=test
if Options.options.grind:
cmd='valgrind '+test
if subprocess.call(cmd,shell=True)==desired_status:
Logs.pprint('GREEN','** Pass %s'%name)
return True
else:
Logs.pprint('RED','** FAIL %s'%name)
return False
def run_tests(ctx,appname,tests,desired_status=0,dirs=['src'],name='*',headers=False):
failures=0
diropts=''
for i in dirs:
diropts+=' -d '+i
for i in tests:
if not run_test(ctx,appname,i,desired_status,dirs,i,headers):
failures+=1
print('')
if failures==0:
Logs.pprint('GREEN','** Pass: All %s.%s tests passed'%(appname,name))
else:
Logs.pprint('RED','** FAIL: %d %s.%s tests failed'%(failures,appname,name))
def run_ldconfig(ctx):
if(ctx.cmd=='install'and not ctx.env['RAN_LDCONFIG']and ctx.env['LIBDIR']and not'DESTDIR'in os.environ and not Options.options.destdir):
try:
Logs.info("Waf: Running `/sbin/ldconfig %s'"%ctx.env['LIBDIR'])
subprocess.call(['/sbin/ldconfig',ctx.env['LIBDIR']])
ctx.env['RAN_LDCONFIG']=True
except:
pass
def write_news(name,in_files,out_file,top_entries=None,extra_entries=None):
import rdflib
import textwrap
from time import strftime,strptime
doap=rdflib.Namespace('http://usefulinc.com/ns/doap#')
dcs=rdflib.Namespace('http://ontologi.es/doap-changeset#')
rdfs=rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#')
foaf=rdflib.Namespace('http://xmlns.com/foaf/0.1/')
rdf=rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
m=rdflib.ConjunctiveGraph()
try:
for i in in_files:
m.parse(i,format='n3')
except:
Logs.warn('Error parsing data, unable to generate NEWS')
return
proj=m.value(None,rdf.type,doap.Project)
for f in m.triples([proj,rdfs.seeAlso,None]):
if f[2].endswith('.ttl'):
m.parse(f[2],format='n3')
entries={}
for r in m.triples([proj,doap.release,None]):
release=r[2]
revision=m.value(release,doap.revision,None)
date=m.value(release,doap.created,None)
blamee=m.value(release,dcs.blame,None)
changeset=m.value(release,dcs.changeset,None)
dist=m.value(release,doap['file-release'],None)
if revision and date and blamee and changeset:
entry='%s (%s) stable;\n'%(name,revision)
for i in m.triples([changeset,dcs.item,None]):
item=textwrap.wrap(m.value(i[2],rdfs.label,None),width=79)
entry+='\n * '+'\n '.join(item)
if dist and top_entries is not None:
if not str(dist)in top_entries:
top_entries[str(dist)]=[]
top_entries[str(dist)]+=['%s: %s'%(name,'\n '.join(item))]
if extra_entries:
for i in extra_entries[str(dist)]:
entry+='\n * '+i
entry+='\n\n --'
blamee_name=m.value(blamee,foaf.name,None)
blamee_mbox=m.value(blamee,foaf.mbox,None)
if blamee_name and blamee_mbox:
entry+=' %s <%s>'%(blamee_name,blamee_mbox.replace('mailto:',''))
entry+=' %s\n\n'%(strftime('%a, %d %b %Y %H:%M:%S +0000',strptime(date,'%Y-%m-%d')))
entries[(date,revision)]=entry
else:
Logs.warn('Ignored incomplete %s release description'%name)
if len(entries)>0:
news=open(out_file,'w')
for e in sorted(entries.keys(),reverse=True):
news.write(entries[e])
news.close()
|
samueldotj/TeeRISC-Simulator | refs/heads/master | src/arch/x86/isa/insts/general_purpose/rotate_and_shift/__init__.py | 91 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["rotate",
"shift"]
microcode = ""
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
|
naresh21/synergetics-edx-platform | refs/heads/oxa/master.fic | openedx/core/djangoapps/cors_csrf/views.py | 10 | """Views for enabling cross-domain requests. """
import logging
import json
from django.conf import settings
from django.views.decorators.cache import cache_page
from django.http import HttpResponseNotFound
from edxmako.shortcuts import render_to_response
from .models import XDomainProxyConfiguration
log = logging.getLogger(__name__)
XDOMAIN_PROXY_CACHE_TIMEOUT = getattr(settings, 'XDOMAIN_PROXY_CACHE_TIMEOUT', 60 * 15)
@cache_page(XDOMAIN_PROXY_CACHE_TIMEOUT)
def xdomain_proxy(request): # pylint: disable=unused-argument
"""Serve the xdomain proxy page.
Internet Explorer 9 does not send cookie information with CORS,
which means we can't make cross-domain POST requests that
require authentication (for example, from the course details
page on the marketing site to the enrollment API
to auto-enroll a user in an "honor" track).
The XDomain library [https://github.com/jpillora/xdomain]
provides an alternative to using CORS.
The library works as follows:
1) A static HTML file ("xdomain_proxy.html") is served from courses.edx.org.
The file includes JavaScript and a domain whitelist.
2) The course details page (on edx.org) creates an invisible iframe
that loads the proxy HTML file.
3) A JS shim library on the course details page intercepts
AJAX requests and communicates with JavaScript on the iframed page.
The iframed page then proxies the request to the LMS.
Since the iframed page is served from courses.edx.org,
this is a same-domain request, so all cookies for the domain
are sent along with the request.
You can enable this feature and configure the domain whitelist
using Django admin.
"""
config = XDomainProxyConfiguration.current()
if not config.enabled:
return HttpResponseNotFound()
allowed_domains = []
for domain in config.whitelist.split("\n"):
if domain.strip():
allowed_domains.append(domain.strip())
if not allowed_domains:
log.warning(
u"No whitelist configured for cross-domain proxy. "
u"You can configure the whitelist in Django Admin "
u"using the XDomainProxyConfiguration model."
)
return HttpResponseNotFound()
context = {
'xdomain_masters': json.dumps({
domain: '*'
for domain in allowed_domains
})
}
return render_to_response('cors_csrf/xdomain_proxy.html', context)
|
BizzCloud/PosBox | refs/heads/master | addons/hr_gamification/__openerp__.py | 62 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'HR Gamification',
'version': '1.0',
'author': 'OpenERP SA',
'category': 'hidden',
'depends': ['gamification', 'hr'],
'description': """Use the HR ressources for the gamification process.
The HR officer can now manage challenges and badges.
This allow the user to send badges to employees instead of simple users.
Badge received are displayed on the user profile.
""",
'data': [
'security/ir.model.access.csv',
'security/gamification_security.xml',
'wizard/grant_badge.xml',
'views/gamification.xml',
'views/hr_gamification.xml',
],
'auto_install': True,
}
|
nitzmahone/ansible | refs/heads/devel | lib/ansible/plugins/lookup/__init__.py | 89 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from abc import abstractmethod
from ansible.errors import AnsibleFileNotFound
from ansible.plugins import AnsiblePlugin
from ansible.utils.display import Display
display = Display()
__all__ = ['LookupBase']
class LookupBase(AnsiblePlugin):
def __init__(self, loader=None, templar=None, **kwargs):
super(LookupBase, self).__init__()
self._loader = loader
self._templar = templar
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
def get_basedir(self, variables):
if 'role_path' in variables:
return variables['role_path']
else:
return self._loader.get_basedir()
@staticmethod
def _flatten(terms):
ret = []
for term in terms:
if isinstance(term, (list, tuple)):
ret.extend(term)
else:
ret.append(term)
return ret
@staticmethod
def _combine(a, b):
results = []
for x in a:
for y in b:
results.append(LookupBase._flatten([x, y]))
return results
@staticmethod
def _flatten_hash_to_list(terms):
ret = []
for key in terms:
ret.append({'key': key, 'value': terms[key]})
return ret
@abstractmethod
def run(self, terms, variables=None, **kwargs):
"""
When the playbook specifies a lookup, this method is run. The
arguments to the lookup become the arguments to this method. One
additional keyword argument named ``variables`` is added to the method
call. It contains the variables available to ansible at the time the
lookup is templated. For instance::
"{{ lookup('url', 'https://toshio.fedorapeople.org/one.txt', validate_certs=True) }}"
would end up calling the lookup plugin named url's run method like this::
run(['https://toshio.fedorapeople.org/one.txt'], variables=available_variables, validate_certs=True)
Lookup plugins can be used within playbooks for looping. When this
happens, the first argument is a list containing the terms. Lookup
plugins can also be called from within playbooks to return their
values into a variable or parameter. If the user passes a string in
this case, it is converted into a list.
Errors encountered during execution should be returned by raising
AnsibleError() with a message describing the error.
Any strings returned by this method that could ever contain non-ascii
must be converted into python's unicode type as the strings will be run
through jinja2 which has this requirement. You can use::
from ansible.module_utils._text import to_text
result_string = to_text(result_string)
"""
pass
def find_file_in_search_path(self, myvars, subdir, needle, ignore_missing=False):
'''
Return a file (needle) in the task's expected search path.
'''
if 'ansible_search_path' in myvars:
paths = myvars['ansible_search_path']
else:
paths = [self.get_basedir(myvars)]
result = None
try:
result = self._loader.path_dwim_relative_stack(paths, subdir, needle)
except AnsibleFileNotFound:
if not ignore_missing:
self._display.warning("Unable to find '%s' in expected paths (use -vvvvv to see paths)" % needle)
return result
|
jmcorgan/gnuradio | refs/heads/master | gr-fec/python/fec/qa_polar_decoder_sc_systematic.py | 24 | #!/usr/bin/env python
#
# Copyright 2015 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, blocks
import fec_swig as fec
import numpy as np
from extended_decoder import extended_decoder
from polar.encoder import PolarEncoder
import polar.channel_construction as cc
# import os
# print('PID:', os.getpid())
# raw_input('tell me smth')
class test_polar_decoder_sc_systematic(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_001_setup(self):
block_size = 16
num_info_bits = 8
frozen_bit_positions = np.arange(block_size - num_info_bits)
polar_decoder = fec.polar_decoder_sc_systematic.make(block_size, num_info_bits, frozen_bit_positions)
self.assertEqual(num_info_bits, polar_decoder.get_output_size())
self.assertEqual(block_size, polar_decoder.get_input_size())
self.assertFloatTuplesAlmostEqual((float(num_info_bits) / block_size, ), (polar_decoder.rate(), ))
self.assertFalse(polar_decoder.set_frame_size(10))
def test_002_one_vector(self):
block_power = 4
block_size = 2 ** block_power
num_info_bits = block_size // 2
frozen_bit_positions = cc.frozen_bit_positions(block_size, num_info_bits, 0.0)
bits, gr_data = self.generate_test_data(block_size, num_info_bits, frozen_bit_positions, 1, False)
polar_decoder = fec.polar_decoder_sc_systematic.make(block_size, num_info_bits, frozen_bit_positions)
src = blocks.vector_source_f(gr_data, False)
dec_block = extended_decoder(polar_decoder, None)
snk = blocks.vector_sink_b(1)
self.tb.connect(src, dec_block)
self.tb.connect(dec_block, snk)
self.tb.run()
res = np.array(snk.data()).astype(dtype=int)
self.assertTupleEqual(tuple(res), tuple(bits))
def test_003_stream(self):
nframes = 3
block_power = 8
block_size = 2 ** block_power
num_info_bits = block_size // 2
frozen_bit_positions = cc.frozen_bit_positions(block_size, num_info_bits, 0.0)
bits, gr_data = self.generate_test_data(block_size, num_info_bits, frozen_bit_positions, nframes, False)
polar_decoder = fec.polar_decoder_sc_systematic.make(block_size, num_info_bits, frozen_bit_positions)
src = blocks.vector_source_f(gr_data, False)
dec_block = extended_decoder(polar_decoder, None)
snk = blocks.vector_sink_b(1)
self.tb.connect(src, dec_block)
self.tb.connect(dec_block, snk)
self.tb.run()
res = np.array(snk.data()).astype(dtype=int)
self.assertTupleEqual(tuple(res), tuple(bits))
def generate_test_data(self, block_size, num_info_bits, frozen_bit_positions, nframes, onlyones):
frozen_bit_values = np.zeros(block_size - num_info_bits, dtype=int)
encoder = PolarEncoder(block_size, num_info_bits, frozen_bit_positions, frozen_bit_values)
bits = np.array([], dtype=int)
data = np.array([], dtype=int)
for n in range(nframes):
if onlyones:
b = np.ones(num_info_bits, dtype=int)
else:
b = np.random.randint(2, size=num_info_bits)
d = encoder.encode_systematic(b)
bits = np.append(bits, b)
data = np.append(data, d)
gr_data = 2.0 * data - 1.0
return bits, gr_data
if __name__ == '__main__':
gr_unittest.run(test_polar_decoder_sc_systematic)
|
lehinevych/cfme_tests | refs/heads/master | scripts/install_snmp_listener.py | 4 | #!/usr/bin/env python2
"""SSH into a running appliance and install SNMP listener."""
import argparse
import requests
import sys
from utils.conf import credentials
from utils.path import scripts_data_path
from utils.ssh import SSHClient
def main():
parser = argparse.ArgumentParser(epilog=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('address', help='hostname or ip address of target appliance')
args = parser.parse_args()
ssh_kwargs = {
'username': credentials['ssh']['username'],
'password': credentials['ssh']['password'],
'hostname': args.address
}
# Init SSH client
client = SSHClient(**ssh_kwargs)
snmp_path = scripts_data_path.join("snmp")
# Copy
print("Copying files")
client.put_file(snmp_path.join("snmp_listen.rb").strpath, "/root/snmp_listen.rb")
client.put_file(snmp_path.join("snmp_listen.sh").strpath, "/root/snmp_listen.sh")
# Enable after startup
print("Enabling after startup")
status = client.run_command("grep 'snmp_listen[.]sh' /etc/rc.local")[0]
if status != 0:
client.run_command("echo 'cd /root/ && ./snmp_listen.sh start' >> /etc/rc.local")
assert client.run_command("grep 'snmp_listen[.]sh' /etc/rc.local")[0] == 0, "Could not enable!"
# Run!
print("Starting listener")
assert client.run_command("cd /root/ && ./snmp_listen.sh start")[0] == 0, "Could not start!"
# Open the port if not opened
print("Opening the port in iptables")
status = client.run_command("grep '--dport 8765' /etc/sysconfig/iptables")[0]
if status != 0:
# append after the 5432 entry
client.run_command(
"sed -i '/--dport 5432/a -A INPUT -p tcp -m tcp --dport 8765 -j ACCEPT' "
"/etc/sysconfig/iptables"
)
client.run_command("service iptables restart")
# Check if accessible
try:
requests.get("http://{}:8765/".format(args.address))
except requests.exceptions.ConnectionError:
print("Could not detect running listener!")
exit(2)
if __name__ == '__main__':
sys.exit(main())
|
kchodorow/tensorflow | refs/heads/master | tensorflow/contrib/learn/python/learn/tests/__init__.py | 118 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""tf.learn tests."""
# TODO(ptucker): Move these to the packages of the units under test.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
|
Korkki/django-base-template | refs/heads/master | manage.py | 30 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings.dev")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
tayfun/django | refs/heads/master | tests/user_commands/tests.py | 205 | import os
from django.apps import apps
from django.core import management
from django.core.management import BaseCommand, CommandError, find_commands
from django.core.management.utils import find_command, popen_wrapper
from django.db import connection
from django.test import SimpleTestCase, ignore_warnings, override_settings
from django.test.utils import captured_stderr, captured_stdout, extend_sys_path
from django.utils import translation
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.six import StringIO
# A minimal set of apps to avoid system checks running on all apps.
@override_settings(
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'user_commands',
],
)
class CommandTests(SimpleTestCase):
def test_command(self):
out = StringIO()
management.call_command('dance', stdout=out)
self.assertIn("I don't feel like dancing Rock'n'Roll.\n", out.getvalue())
def test_command_style(self):
out = StringIO()
management.call_command('dance', style='Jive', stdout=out)
self.assertIn("I don't feel like dancing Jive.\n", out.getvalue())
# Passing options as arguments also works (thanks argparse)
management.call_command('dance', '--style', 'Jive', stdout=out)
self.assertIn("I don't feel like dancing Jive.\n", out.getvalue())
def test_language_preserved(self):
out = StringIO()
with translation.override('fr'):
management.call_command('dance', stdout=out)
self.assertEqual(translation.get_language(), 'fr')
def test_explode(self):
""" Test that an unknown command raises CommandError """
self.assertRaises(CommandError, management.call_command, ('explode',))
def test_system_exit(self):
""" Exception raised in a command should raise CommandError with
call_command, but SystemExit when run from command line
"""
with self.assertRaises(CommandError):
management.call_command('dance', example="raise")
with captured_stderr() as stderr, self.assertRaises(SystemExit):
management.ManagementUtility(['manage.py', 'dance', '--example=raise']).execute()
self.assertIn("CommandError", stderr.getvalue())
def test_deactivate_locale_set(self):
# Deactivate translation when set to true
out = StringIO()
with translation.override('pl'):
management.call_command('leave_locale_alone_false', stdout=out)
self.assertEqual(out.getvalue(), "")
def test_configured_locale_preserved(self):
# Leaves locale from settings when set to false
out = StringIO()
with translation.override('pl'):
management.call_command('leave_locale_alone_true', stdout=out)
self.assertEqual(out.getvalue(), "pl\n")
def test_find_command_without_PATH(self):
"""
find_command should still work when the PATH environment variable
doesn't exist (#22256).
"""
current_path = os.environ.pop('PATH', None)
try:
self.assertIsNone(find_command('_missing_'))
finally:
if current_path is not None:
os.environ['PATH'] = current_path
def test_discover_commands_in_eggs(self):
"""
Test that management commands can also be loaded from Python eggs.
"""
egg_dir = '%s/eggs' % os.path.dirname(upath(__file__))
egg_name = '%s/basic.egg' % egg_dir
with extend_sys_path(egg_name):
with self.settings(INSTALLED_APPS=['commandegg']):
cmds = find_commands(os.path.join(apps.get_app_config('commandegg').path, 'management'))
self.assertEqual(cmds, ['eggcommand'])
def test_call_command_option_parsing(self):
"""
When passing the long option name to call_command, the available option
key is the option dest name (#22985).
"""
out = StringIO()
management.call_command('dance', stdout=out, opt_3=True)
self.assertIn("option3", out.getvalue())
self.assertNotIn("opt_3", out.getvalue())
self.assertNotIn("opt-3", out.getvalue())
@ignore_warnings(category=RemovedInDjango110Warning)
def test_optparse_compatibility(self):
"""
optparse should be supported during Django 1.8/1.9 releases.
"""
out = StringIO()
management.call_command('optparse_cmd', stdout=out)
self.assertEqual(out.getvalue(), "All right, let's dance Rock'n'Roll.\n")
# Simulate command line execution
with captured_stdout() as stdout, captured_stderr():
management.execute_from_command_line(['django-admin', 'optparse_cmd'])
self.assertEqual(stdout.getvalue(), "All right, let's dance Rock'n'Roll.\n")
def test_calling_a_command_with_only_empty_parameter_should_ends_gracefully(self):
out = StringIO()
management.call_command('hal', "--empty", stdout=out)
self.assertIn("Dave, I can't do that.\n", out.getvalue())
def test_calling_command_with_app_labels_and_parameters_should_be_ok(self):
out = StringIO()
management.call_command('hal', 'myapp', "--verbosity", "3", stdout=out)
self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
def test_calling_command_with_parameters_and_app_labels_at_the_end_should_be_ok(self):
out = StringIO()
management.call_command('hal', "--verbosity", "3", "myapp", stdout=out)
self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
def test_calling_a_command_with_no_app_labels_and_parameters_should_raise_a_command_error(self):
out = StringIO()
with self.assertRaises(CommandError):
management.call_command('hal', stdout=out)
def test_output_transaction(self):
out = StringIO()
management.call_command('transaction', stdout=out, no_color=True)
output = out.getvalue().strip()
self.assertTrue(output.startswith(connection.ops.start_transaction_sql()))
self.assertTrue(output.endswith(connection.ops.end_transaction_sql()))
def test_call_command_no_checks(self):
"""
By default, call_command should not trigger the check framework, unless
specifically asked.
"""
self.counter = 0
def patched_check(self_, **kwargs):
self.counter = self.counter + 1
saved_check = BaseCommand.check
BaseCommand.check = patched_check
try:
management.call_command("dance", verbosity=0)
self.assertEqual(self.counter, 0)
management.call_command("dance", verbosity=0, skip_checks=False)
self.assertEqual(self.counter, 1)
finally:
BaseCommand.check = saved_check
class UtilsTests(SimpleTestCase):
def test_no_existent_external_program(self):
self.assertRaises(CommandError, popen_wrapper, ['a_42_command_that_doesnt_exist_42'])
|
evanscottgray/ryu | refs/heads/master | ryu/contrib/ncclient/operations/rpc.py | 31 | # Copyright 2009 Shikhar Bhushan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from threading import Event, Lock
from uuid import uuid1
from ncclient.xml_ import *
from ncclient.transport import SessionListener
from errors import OperationError, TimeoutExpiredError, MissingCapabilityError
import logging
logger = logging.getLogger("ncclient.operations.rpc")
class RPCError(OperationError):
"Represents an `rpc-error`. It is a type of :exc:`OperationError` and can be raised as such."
tag_to_attr = {
qualify("error-type"): "_type",
qualify("error-tag"): "_tag",
qualify("error-severity"): "_severity",
qualify("error-info"): "_info",
qualify("error-path"): "_path",
qualify("error-message"): "_message"
}
def __init__(self, raw):
self._raw = raw
for attr in RPCError.tag_to_attr.values():
setattr(self, attr, None)
for subele in raw:
attr = RPCError.tag_to_attr.get(subele.tag, None)
if attr is not None:
setattr(self, attr, subele.text if attr != "_info" else to_xml(subele) )
if self.message is not None:
OperationError.__init__(self, self.message)
else:
OperationError.__init__(self, self.to_dict())
def to_dict(self):
return dict([ (attr[1:], getattr(self, attr)) for attr in RPCError.tag_to_attr.values() ])
@property
def xml(self):
"The `rpc-error` element as returned in XML."
return self._raw
@property
def type(self):
"The contents of the `error-type` element."
return self._type
@property
def tag(self):
"The contents of the `error-tag` element."
return self._tag
@property
def severity(self):
"The contents of the `error-severity` element."
return self._severity
@property
def path(self):
"The contents of the `error-path` element if present or `None`."
return self._path
@property
def message(self):
"The contents of the `error-message` element if present or `None`."
return self._message
@property
def info(self):
"XML string or `None`; representing the `error-info` element."
return self._info
class RPCReply:
"""Represents an *rpc-reply*. Only concerns itself with whether the operation was successful.
.. note::
If the reply has not yet been parsed there is an implicit, one-time parsing overhead to
accessing some of the attributes defined by this class.
"""
ERROR_CLS = RPCError
"Subclasses can specify a different error class, but it should be a subclass of `RPCError`."
def __init__(self, raw):
self._raw = raw
self._parsed = False
self._root = None
self._errors = []
def __repr__(self):
return self._raw
def parse(self):
"Parses the *rpc-reply*."
if self._parsed: return
root = self._root = to_ele(self._raw) # The <rpc-reply> element
# Per RFC 4741 an <ok/> tag is sent when there are no errors or warnings
ok = root.find(qualify("ok"))
if ok is None:
# Create RPCError objects from <rpc-error> elements
error = root.find(qualify("rpc-error"))
if error is not None:
for err in root.getiterator(error.tag):
# Process a particular <rpc-error>
self._errors.append(self.ERROR_CLS(err))
self._parsing_hook(root)
self._parsed = True
def _parsing_hook(self, root):
"No-op by default. Gets passed the *root* element for the reply."
pass
@property
def xml(self):
"*rpc-reply* element as returned."
return self._raw
@property
def ok(self):
"Boolean value indicating if there were no errors."
return not self.errors # empty list => false
@property
def error(self):
"Returns the first :class:`RPCError` and `None` if there were no errors."
self.parse()
if self._errors:
return self._errors[0]
else:
return None
@property
def errors(self):
"List of `RPCError` objects. Will be empty if there were no *rpc-error* elements in reply."
self.parse()
return self._errors
class RPCReplyListener(SessionListener): # internal use
creation_lock = Lock()
# one instance per session -- maybe there is a better way??
def __new__(cls, session):
with RPCReplyListener.creation_lock:
instance = session.get_listener_instance(cls)
if instance is None:
instance = object.__new__(cls)
instance._lock = Lock()
instance._id2rpc = {}
#instance._pipelined = session.can_pipeline
session.add_listener(instance)
return instance
def register(self, id, rpc):
with self._lock:
self._id2rpc[id] = rpc
def callback(self, root, raw):
tag, attrs = root
if tag != qualify("rpc-reply"):
return
for key in attrs: # in the <rpc-reply> attributes
if key == "message-id": # if we found msgid attr
id = attrs[key] # get the msgid
with self._lock:
try:
rpc = self._id2rpc[id] # the corresponding rpc
logger.debug("Delivering to %r" % rpc)
rpc.deliver_reply(raw)
except KeyError:
raise OperationError("Unknown 'message-id': %s", id)
# no catching other exceptions, fail loudly if must
else:
# if no error delivering, can del the reference to the RPC
del self._id2rpc[id]
break
else:
raise OperationError("Could not find 'message-id' attribute in <rpc-reply>")
def errback(self, err):
try:
for rpc in self._id2rpc.values():
rpc.deliver_error(err)
finally:
self._id2rpc.clear()
class RaiseMode(object):
NONE = 0
"Don't attempt to raise any type of `rpc-error` as :exc:`RPCError`."
ERRORS = 1
"Raise only when the `error-type` indicates it is an honest-to-god error."
ALL = 2
"Don't look at the `error-type`, always raise."
class RPC(object):
"""Base class for all operations, directly corresponding to *rpc* requests. Handles making the request, and taking delivery of the reply."""
DEPENDS = []
"""Subclasses can specify their dependencies on capabilities as a list of URI's or abbreviated names, e.g. ':writable-running'. These are verified at the time of instantiation. If the capability is not available, :exc:`MissingCapabilityError` is raised."""
REPLY_CLS = RPCReply
"By default :class:`RPCReply`. Subclasses can specify a :class:`RPCReply` subclass."
def __init__(self, session, async=False, timeout=30, raise_mode=RaiseMode.NONE):
"""
*session* is the :class:`~ncclient.transport.Session` instance
*async* specifies whether the request is to be made asynchronously, see :attr:`is_async`
*timeout* is the timeout for a synchronous request, see :attr:`timeout`
*raise_mode* specifies the exception raising mode, see :attr:`raise_mode`
"""
self._session = session
try:
for cap in self.DEPENDS:
self._assert(cap)
except AttributeError:
pass
self._async = async
self._timeout = timeout
self._raise_mode = raise_mode
self._id = uuid1().urn # Keeps things simple instead of having a class attr with running ID that has to be locked
self._listener = RPCReplyListener(session)
self._listener.register(self._id, self)
self._reply = None
self._error = None
self._event = Event()
def _wrap(self, subele):
# internal use
ele = new_ele("rpc", {"message-id": self._id})
ele.append(subele)
return to_xml(ele)
def _request(self, op):
"""Implementations of :meth:`request` call this method to send the request and process the reply.
In synchronous mode, blocks until the reply is received and returns :class:`RPCReply`. Depending on the :attr:`raise_mode` a `rpc-error` element in the reply may lead to an :exc:`RPCError` exception.
In asynchronous mode, returns immediately, returning `self`. The :attr:`event` attribute will be set when the reply has been received (see :attr:`reply`) or an error occured (see :attr:`error`).
*op* is the operation to be requested as an :class:`~xml.etree.ElementTree.Element`
"""
logger.info('Requesting %r' % self.__class__.__name__)
req = self._wrap(op)
self._session.send(req)
if self._async:
logger.debug('Async request, returning %r', self)
return self
else:
logger.debug('Sync request, will wait for timeout=%r' % self._timeout)
self._event.wait(self._timeout)
if self._event.isSet():
if self._error:
# Error that prevented reply delivery
raise self._error
self._reply.parse()
if self._reply.error is not None:
# <rpc-error>'s [ RPCError ]
if self._raise_mode == RaiseMode.ALL:
raise self._reply.error
elif (self._raise_mode == RaiseMode.ERRORS and self._reply.error.type == "error"):
raise self._reply.error
return self._reply
else:
raise TimeoutExpiredError
def request(self):
"""Subclasses must implement this method. Typically only the request needs to be built as an
:class:`~xml.etree.ElementTree.Element` and everything else can be handed off to
:meth:`_request`."""
pass
def _assert(self, capability):
"""Subclasses can use this method to verify that a capability is available with the NETCONF
server, before making a request that requires it. A :exc:`MissingCapabilityError` will be
raised if the capability is not available."""
if capability not in self._session.server_capabilities:
raise MissingCapabilityError('Server does not support [%s]' % capability)
def deliver_reply(self, raw):
# internal use
self._reply = self.REPLY_CLS(raw)
self._event.set()
def deliver_error(self, err):
# internal use
self._error = err
self._event.set()
@property
def reply(self):
":class:`RPCReply` element if reply has been received or `None`"
return self._reply
@property
def error(self):
""":exc:`Exception` type if an error occured or `None`.
.. note::
This represents an error which prevented a reply from being received. An *rpc-error*
does not fall in that category -- see `RPCReply` for that.
"""
return self._error
@property
def id(self):
"The *message-id* for this RPC."
return self._id
@property
def session(self):
"The `~ncclient.transport.Session` object associated with this RPC."
return self._session
@property
def event(self):
""":class:`~threading.Event` that is set when reply has been received or when an error preventing
delivery of the reply occurs.
"""
return self._event
def __set_async(self, async=True):
self._async = async
if async and not session.can_pipeline:
raise UserWarning('Asynchronous mode not supported for this device/session')
def __set_raise_mode(self, mode):
assert(choice in ("all", "errors", "none"))
self._raise_mode = mode
def __set_timeout(self, timeout):
self._timeout = timeout
raise_mode = property(fget=lambda self: self._raise_mode, fset=__set_raise_mode)
"""Depending on this exception raising mode, an `rpc-error` in the reply may be raised as an :exc:`RPCError` exception. Valid values are the constants defined in :class:`RaiseMode`. """
is_async = property(fget=lambda self: self._async, fset=__set_async)
"""Specifies whether this RPC will be / was requested asynchronously. By default RPC's are synchronous."""
timeout = property(fget=lambda self: self._timeout, fset=__set_timeout)
"""Timeout in seconds for synchronous waiting defining how long the RPC request will block on a reply before raising :exc:`TimeoutExpiredError`.
Irrelevant for asynchronous usage.
"""
|
AISpace2/AISpace2 | refs/heads/master | aipython/probGraphicalModels.py | 1 | from .probFactors import Prob
from .probVariables import Variable
from .utilities import Displayable
# probGraphicalModels.py - Graphical Models and Belief Networks
# AIFCA Python3 code Version 0.7.1 Documentation at http://aipython.org
# Artificial Intelligence: Foundations of Computational Agents
# http://artint.info
# Copyright David L Poole and Alan K Mackworth 2017.
# This work is licensed under a Creative Commons
# Attribution-NonCommercial-ShareAlike 4.0 International License.
# See: http://creativecommons.org/licenses/by-nc-sa/4.0/deed.en
class Graphical_model(object):
"""The class of graphical models.
A graphical model consists of a set of variables and a set of factors.
vars - a list of variables (name should be unique)
factors - a list of factors
positions - a dictionary that maps each variable's name into its (x,y)-position
"""
def __init__(self, vars=None, factors=None, positions={}):
self.variables = vars
self.factors = factors
self.positions = positions
class Belief_network(Graphical_model):
"""The class of belief networks."""
def __init__(self, vars=None, factors=None, positions={}):
"""
vars - a list of variables (name should be unique)
factors - a list of factors
positions - a dictionary that maps each variable's name into its (x,y)-position
"""
Graphical_model.__init__(self, vars, factors, positions)
assert all(isinstance(f, Prob) for f in factors) if factors else True
class Inference_method(Displayable):
"""The abstract class of graphical model inference methods"""
def query(self, qvar, obs={}):
raise NotImplementedError("Inference_method query") # abstract method
bn_empty = Belief_network([], [])
boolean = ["True", "False"]
A = Variable("A", boolean)
B = Variable("B", boolean)
C = Variable("C", boolean)
f_a = Prob(A, [], [0.6, 0.4])
f_b = Prob(B, [A], [0.1, 0.9, 0.8, 0.2])
f_c = Prob(C, [B], [0.5, 0.5, 0.7, 0.3])
bn_simple1 = Belief_network([A, B, C], [f_a, f_b, f_c], positions={
"A": (474, 199),
"B": (427, 310),
"C": (546, 295)})
B = Variable('B', boolean)
A = Variable('A', boolean)
C = Variable('C', boolean)
E = Variable('E', boolean)
D = Variable('D', boolean)
F = Variable('F', boolean)
G = Variable('G', boolean)
f_B = Prob(B, [], [0.7, 0.3])
f_A = Prob(A, [B], [0.88, 0.12, 0.38, 0.62])
f_C = Prob(C, [B, D], [0.93, 0.07, 0.33, 0.67, 0.53, 0.47, 0.83, 0.17])
f_E = Prob(E, [], [0.91, 0.09])
f_D = Prob(D, [E], [0.04, 0.96, 0.84, 0.16])
f_F = Prob(F, [C], [0.45, 0.55, 0.85, 0.15])
f_G = Prob(G, [F], [0.26, 0.74, 0.96, 0.04])
bn_simple2 = Belief_network(
vars=[B, A, C, E, D, F, G],
factors=[f_B, f_A, f_C, f_E, f_D, f_F, f_G],
positions=[])
# An example with domain length > 2
C100 = Variable("C100", ["c103", "c110", "c121"])
M200 = Variable("M200", ["m200", "m221"])
C200 = Variable("C200", ["c210", "c221"])
C300 = Variable("C300", ["c310", "c313", "c314", "c320"])
f_c100 = Prob(C100, [], [0.2, 0.4, 0.4])
f_m200 = Prob(M200, [], [0.5, 0.5])
f_c200 = Prob(C200, [C100], [0.5, 0.5, 0.8, 0.2, 0.2, 0.8])
f_c300 = Prob(C300, [C200, M200], [0.25, 0.25, 0.25, 0.25, 0.30,
0.30, 0.10, 0.30, 0.20, 0.20, 0.40, 0.20, 0.10, 0.10, 0.70, 0.10])
bn_simple3 = Belief_network([C100, M200, C200, C300], [
f_c100, f_m200, f_c200, f_c300])
Season = Variable("Season", ["summer", "winter"])
Sprinkler = Variable("Sprinkler", ["on", "off"])
Rained = Variable("Rained", boolean)
Grass_wet = Variable("Grass wet", boolean)
Grass_shiny = Variable("Grass shiny", boolean)
Shoes_wet = Variable("Shoes wet", boolean)
f_season = Prob(Season, [], [0.5, 0.5])
f_sprinkler = Prob(Sprinkler, [Season], [0.9, 0.1, 0.05, 0.95])
f_rained = Prob(Rained, [Season], [0.3, 0.7, 0.8, 0.2])
f_wet = Prob(Grass_wet, [Sprinkler, Rained], [
0, 1, 0.9, 0.1, 0.8, 0.2, 0.98, 0.02])
f_shiny = Prob(Grass_shiny, [Grass_wet], [0.05, 0.95, 0.7, 0.3])
f_shoes = Prob(Shoes_wet, [Grass_wet], [0.08, 0.92, 0.65, 0.35])
bn_grass_watering = Belief_network([Season, Sprinkler, Rained, Grass_wet, Grass_shiny, Shoes_wet], [
f_season, f_sprinkler, f_rained, f_wet, f_shiny, f_shoes])
# Bayesian network report of leaving example from
# Poole and Mackworth, Artificial Intelligence, 2010 http://artint.info
# This is Example 6.10 (page 236) shown in Figure 6.1
Report = Variable('Report', boolean)
Tampering = Variable('Tampering', boolean)
Alarm = Variable('Alarm', boolean)
Fire = Variable('Fire', boolean)
Smoke = Variable('Smoke', boolean)
Leaving = Variable('Leaving', boolean)
f_report = Prob(Report, [Leaving], [0.75, 0.25, 0.01, 0.99])
f_tampering = Prob(Tampering, [], [0.02, 0.98])
f_alarm = Prob(Alarm, [Tampering, Fire], [
0.5, 0.5, 0.85, 0.15, 0.99, 0.01, 0.0, 1.0])
f_fire = Prob(Fire, [], [0.01, 0.99])
f_smoke = Prob(Smoke, [Fire], [0.9, 0.1, 0.01, 0.99])
f_leaving = Prob(Leaving, [Alarm], [0.88, 0.12, 0.0, 1.0])
bn_fire_alarm = Belief_network(
vars=[Report, Tampering, Alarm, Fire, Smoke, Leaving],
factors=[f_report, f_tampering, f_alarm, f_fire, f_smoke, f_leaving],
positions=[])
Fever = Variable('Fever', boolean)
Smokes = Variable('Smokes', boolean)
Coughing = Variable('Coughing', boolean)
Bronchitis = Variable('Bronchitis', boolean)
Wheezing = Variable('Wheezing', boolean)
Sore_Throat = Variable('Sore_Throat', boolean)
Influenza = Variable('Influenza', boolean)
f_fever = Prob(Fever, [Influenza], [0.9, 0.1, 0.05, 0.95])
f_smokes = Prob(Smokes, [], [0.2, 0.8])
f_coughing = Prob(Coughing, [Bronchitis], [0.8, 0.2, 0.07, 0.93])
f_bronchitis = Prob(Bronchitis, [Influenza, Smokes], [
0.99, 0.01, 0.9, 0.1, 0.7, 0.3, 1.0E-4, 0.9999])
f_wheezing = Prob(Wheezing, [Bronchitis], [0.6, 0.4, 0.001, 0.999])
f_sore_throat = Prob(Sore_Throat, [Influenza], [0.3, 0.7, 0.001, 0.999])
f_influenza = Prob(Influenza, [], [0.05, 0.95])
bn_diagnosis = Belief_network(
vars=[Fever, Smokes, Coughing, Bronchitis,
Wheezing, Sore_Throat, Influenza],
factors=[f_fever, f_smokes, f_coughing, f_bronchitis,
f_wheezing, f_sore_throat, f_influenza],
positions=[])
B = Variable('B', boolean)
A = Variable('A', boolean)
I = Variable('I', boolean)
C = Variable('C', boolean)
J = Variable('J', boolean)
H = Variable('H', boolean)
E = Variable('E', boolean)
D = Variable('D', boolean)
F = Variable('F', boolean)
G = Variable('G', boolean)
f_B = Prob(B, [C], [0.9, 0.1, 0.4, 0.6])
f_A = Prob(A, [B], [0.7, 0.3, 0.4, 0.6])
f_I = Prob(I, [H], [0.8, 0.2, 0.1, 0.9])
f_C = Prob(C, [], [0.5, 0.5])
f_J = Prob(J, [], [0.3, 0.7])
f_H = Prob(H, [G, J], [0.8, 0.2, 0.3, 0.7, 0.5, 0.5, 0.1, 0.9])
f_E = Prob(E, [C], [0.7, 0.3, 0.2, 0.8])
f_D = Prob(D, [B, E], [0.3, 0.7, 0.5, 0.5, 0.2, 0.8, 0.9, 0.1])
f_F = Prob(F, [E, G], [0.9, 0.1, 0.2, 0.8, 0.4, 0.6, 0.7, 0.3])
f_G = Prob(G, [], [0.2, 0.8])
bn_conditional_independence = Belief_network(
vars=[B, A, I, C, J, H, E, D, F, G],
factors=[f_B, f_A, f_I, f_C, f_J, f_H, f_E, f_D, f_F, f_G],
positions=[])
Spark_Plugs = Variable('Spark_Plugs', ['okay', 'too_wide', 'fouled'])
Distributer_OK = Variable('Distributer_OK', boolean)
Alternator_OK = Variable('Alternator_OK', boolean)
Starter_Motor_OK = Variable('Starter_Motor_OK', boolean)
Car_Cranks = Variable('Car_Cranks', boolean)
Battery_Voltage = Variable('Battery_Voltage', ['strong', 'weak', 'dead'])
Voltage_at_Plug = Variable('Voltage_at_Plug', ['strong', 'weak', 'dead'])
Air_Filter_Clean = Variable('Air_Filter_Clean', boolean)
Charging_System_OK = Variable('Charging_System_OK', boolean)
Spark_Quality = Variable('Spark_Quality', ['good', 'bad', 'very_bad'])
Air_System_OK = Variable('Air_System_OK', boolean)
Headlights = Variable('Headlights', ['bright', 'dim', 'off'])
Main_Fuse_OK = Variable('Main_Fuse_OK', boolean)
Starter_System_OK = Variable('Starter_System_OK', boolean)
Spark_Adequate = Variable('Spark_Adequate', boolean)
Car_Starts = Variable('Car_Starts', boolean)
Spark_Timing = Variable('Spark_Timing', ['good', 'bad', 'very_bad'])
Battery_Age = Variable('Battery_Age', ['new', 'old', 'very_old'])
Fuel_System_OK = Variable('Fuel_System_OK', boolean)
f_Spark_Plugs = Prob(Spark_Plugs, [], [0.99, 0.003, 0.007])
f_Distributer_OK = Prob(Distributer_OK, [], [0.99, 0.01])
f_Alternator_OK = Prob(Alternator_OK, [], [0.9997, 3.0E-4])
f_Starter_Motor_OK = Prob(Starter_Motor_OK, [], [0.992, 0.008])
f_Car_Cranks = Prob(Car_Cranks, [Starter_System_OK], [0.98, 0.02, 0.0, 1.0])
f_Battery_Voltage = Prob(Battery_Voltage, [Charging_System_OK, Battery_Age], [
0.999, 8.0E-4, 2.0E-4, 0.99, 0.008, 0.002, 0.6, 0.3, 0.1, 0.8, 0.15, 0.05, 0.05, 0.3, 0.65, 0.002, 0.1, 0.898])
f_Voltage_at_Plug = Prob(Voltage_at_Plug, [Battery_Voltage, Main_Fuse_OK, Distributer_OK], [0.98, 0.015, 0.005, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0,
0.0, 0.0, 1.0, 0.1, 0.8, 0.1, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0])
f_Air_Filter_Clean = Prob(Air_Filter_Clean, [], [0.9, 0.1])
f_Charging_System_OK = Prob(Charging_System_OK, [Alternator_OK], [
0.995, 0.005, 0.0, 1.0])
f_Spark_Quality = Prob(Spark_Quality, [Voltage_at_Plug, Spark_Plugs], [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.5, 0.5, 0.0, 0.2, 0.8, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0])
f_Air_System_OK = Prob(Air_System_OK, [Air_Filter_Clean], [0.9, 0.1, 0.3, 0.7])
f_Headlights = Prob(Headlights, [Voltage_at_Plug], [
0.98, 0.015, 0.005, 0.05, 0.9, 0.05, 0.0, 0.0, 1.0])
f_Main_Fuse_OK = Prob(Main_Fuse_OK, [], [0.999, 0.001])
f_Starter_System_OK = Prob(Starter_System_OK, [Battery_Voltage, Main_Fuse_OK, Starter_Motor_OK], [
0.998, 0.002, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.72, 0.28, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_Spark_Adequate = Prob(Spark_Adequate, [Spark_Timing, Spark_Quality], [
0.99, 0.01, 0.5, 0.5, 0.1, 0.9, 0.5, 0.5, 0.05, 0.95, 0.01, 0.99, 0.1, 0.9, 0.01, 0.99, 0.0, 1.0])
f_Car_Starts = Prob(Car_Starts, [Car_Cranks, Fuel_System_OK, Air_System_OK, Spark_Adequate], [1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0,
1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_Spark_Timing = Prob(Spark_Timing, [Distributer_OK], [
0.97, 0.02, 0.01, 0.2, 0.3, 0.5])
f_Battery_Age = Prob(Battery_Age, [], [0.4, 0.4, 0.2])
f_Fuel_System_OK = Prob(Fuel_System_OK, [], [0.9, 0.1])
bn_car_starting = Belief_network(
vars=[Spark_Plugs, Distributer_OK, Alternator_OK, Starter_Motor_OK, Car_Cranks, Battery_Voltage, Voltage_at_Plug, Air_Filter_Clean, Charging_System_OK,
Spark_Quality, Air_System_OK, Headlights, Main_Fuse_OK, Starter_System_OK, Spark_Adequate, Car_Starts, Spark_Timing, Battery_Age, Fuel_System_OK],
factors=[f_Spark_Plugs, f_Distributer_OK, f_Alternator_OK, f_Starter_Motor_OK, f_Car_Cranks, f_Battery_Voltage, f_Voltage_at_Plug, f_Air_Filter_Clean, f_Charging_System_OK,
f_Spark_Quality, f_Air_System_OK, f_Headlights, f_Main_Fuse_OK, f_Starter_System_OK, f_Spark_Adequate, f_Car_Starts, f_Spark_Timing, f_Battery_Age, f_Fuel_System_OK],
positions=[])
p1 = Variable('p1', boolean)
w2 = Variable('w2', ['live', 'dead'])
w1 = Variable('w1', ['live', 'dead'])
cb1_st = Variable('cb1_st', ['on', 'off'])
l1_lit = Variable('l1_lit', boolean)
cb2_st = Variable('cb2_st', ['on', 'off'])
s3_pos = Variable('s3_pos', ['up', 'down'])
w6 = Variable('w6', ['live', 'dead'])
l2_st = Variable('l2_st', ['ok', 'intermittent', 'broken'])
s2_pos = Variable('s2_pos', ['up', 'down'])
w3 = Variable('w3', ['live', 'dead'])
l1_st = Variable('l1_st', ['ok', 'intermittent', 'broken'])
s3_st = Variable('s3_st', ['ok', 'upside_down',
'short', 'intermittent', 'broken'])
outside_power = Variable('outside_power', ['on', 'off'])
s2_st = Variable('s2_st', ['ok', 'upside_down',
'short', 'intermittent', 'broken'])
w0 = Variable('w0', ['live', 'dead'])
l2_lit = Variable('l2_lit', boolean)
w4 = Variable('w4', ['live', 'dead'])
s1_pos = Variable('s1_pos', ['up', 'down'])
p2 = Variable('p2', boolean)
s1_st = Variable('s1_st', ['ok', 'upside_down',
'short', 'intermittent', 'broken'])
f_p1 = Prob(p1, [w3], [1.0, 0.0, 0.0, 1.0])
f_w2 = Prob(w2, [w3, s1_st, s1_pos], [0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.4, 0.6, 0.4, 0.6, 0.2, 0.8, 0.2, 0.8, 0.0,
1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_w1 = Prob(w1, [w3, s1_st, s1_pos], [1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.6, 0.4, 0.4, 0.6, 0.4, 0.6, 0.01, 0.99,
0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_cb1_st = Prob(cb1_st, [], [0.999, 0.001])
f_l1_lit = Prob(l1_lit, [w0, l1_st], [1.0, 0.0, 0.7,
0.3, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_cb2_st = Prob(cb2_st, [], [0.999, 0.001])
f_s3_pos = Prob(s3_pos, [], [0.8, 0.2])
f_w6 = Prob(w6, [outside_power, cb2_st], [
1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_l2_st = Prob(l2_st, [], [0.9, 0.03, 0.07])
f_s2_pos = Prob(s2_pos, [], [0.5, 0.5])
f_w3 = Prob(w3, [outside_power, cb1_st], [
1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_l1_st = Prob(l1_st, [], [0.9, 0.07, 0.03])
f_s3_st = Prob(s3_st, [], [0.9, 0.01, 0.04, 0.03, 0.02])
f_outside_power = Prob(outside_power, [], [0.98, 0.02])
f_s2_st = Prob(s2_st, [], [0.9, 0.01, 0.04, 0.03, 0.02])
f_w0 = Prob(w0, [w1, w2, s2_st, s2_pos], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.8, 0.2, 0.8, 0.2, 0.4, 0.6, 0.4, 0.6, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.4, 0.6, 0.4, 0.6, 0.2, 0.8, 0.2, 0.8,
0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.4, 0.6, 0.4, 0.6, 0.2, 0.8, 0.2, 0.8, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_l2_lit = Prob(l2_lit, [w4, l2_st], [1.0, 0.0, 0.6,
0.4, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_w4 = Prob(w4, [w3, s3_pos, s3_st], [1.0, 0.0, 0.0, 1.0, 0.4, 0.6, 0.2, 0.8, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.4, 0.6, 0.2,
0.8, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_s1_pos = Prob(s1_pos, [], [0.5, 0.5])
f_p2 = Prob(p2, [w6], [1.0, 0.0, 0.0, 1.0])
f_s1_st = Prob(s1_st, [], [0.9, 0.01, 0.04, 0.03, 0.02])
bn_electrical_diagnosis = Belief_network(
vars=[p1, w2, w1, cb1_st, l1_lit, cb2_st, s3_pos, w6, l2_st, s2_pos, w3,
l1_st, s3_st, outside_power, s2_st, w0, l2_lit, w4, s1_pos, p2, s1_st],
factors=[f_p1, f_w2, f_w1, f_cb1_st, f_l1_lit, f_cb2_st, f_s3_pos, f_w6, f_l2_st, f_s2_pos, f_w3,
f_l1_st, f_s3_st, f_outside_power, f_s2_st, f_w0, f_l2_lit, f_w4, f_s1_pos, f_p2, f_s1_st],
positions=[])
PRESS = Variable('PRESS', ['ZERO', 'LOW', 'NORMAL', 'HIGH'])
SHUNT = Variable('SHUNT', ['NORMAL', 'HIGH'])
STROKEVOLUME = Variable('STROKEVOLUME', ['LOW', 'NORMAL', 'HIGH'])
FIO2 = Variable('FIO2', ['LOW', 'NORMAL'])
INTUBATION = Variable('INTUBATION', ['NORMAL', 'ESOPHAGEAL', 'ONESIDED'])
MINVOLSET = Variable('MINVOLSET', ['LOW', 'NORMAL', 'HIGH'])
HR = Variable('HR', ['LOW', 'NORMAL', 'HIGH'])
VENTTUBE = Variable('VENTTUBE', ['ZERO', 'LOW', 'NORMAL', 'HIGH'])
DISCONNECT = Variable('DISCONNECT', ['TRUE', 'FALSE'])
CO = Variable('CO', ['LOW', 'NORMAL', 'HIGH'])
PAP = Variable('PAP', ['LOW', 'NORMAL', 'HIGH'])
MINVOL = Variable('MINVOL', ['ZERO', 'LOW', 'NORMAL', 'HIGH'])
HYPOVOLEMIA = Variable('HYPOVOLEMIA', ['TRUE', 'FALSE'])
CVP = Variable('CVP', ['LOW', 'NORMAL', 'HIGH'])
PULMEMBOLUS = Variable('PULMEMBOLUS', ['TRUE', 'FALSE'])
ARTCO2 = Variable('ARTCO2', ['LOW', 'NORMAL', 'HIGH'])
HRBP = Variable('HRBP', ['LOW', 'NORMAL', 'HIGH'])
TPR = Variable('TPR', ['LOW', 'NORMAL', 'HIGH'])
INSUFFANESTH = Variable('INSUFFANESTH', ['TRUE', 'FALSE'])
VENTALV = Variable('VENTALV', ['ZERO', 'LOW', 'NORMAL', 'HIGH'])
HREKG = Variable('HREKG', ['LOW', 'NORMAL', 'HIGH'])
ANAPHYLAXIS = Variable('ANAPHYLAXIS', ['TRUE', 'FALSE'])
HRSAT = Variable('HRSAT', ['LOW', 'NORMAL', 'HIGH'])
EXPCO2 = Variable('EXPCO2', ['ZERO', 'LOW', 'NORMAL', 'HIGH'])
ERRCAUTER = Variable('ERRCAUTER', ['TRUE', 'FALSE'])
SAO2 = Variable('SAO2', ['LOW', 'NORMAL', 'HIGH'])
PVSAT = Variable('PVSAT', ['LOW', 'NORMAL', 'HIGH'])
LVFAILURE = Variable('LVFAILURE', ['TRUE', 'FALSE'])
BP = Variable('BP', ['LOW', 'NORMAL', 'HIGH'])
ERRLOWOUTPUT = Variable('ERRLOWOUTPUT', ['TRUE', 'FALSE'])
CATECHOL = Variable('CATECHOL', ['NORMAL', 'HIGH'])
PCWP = Variable('PCWP', ['LOW', 'NORMAL', 'HIGH'])
KINKEDTUBE = Variable('KINKEDTUBE', ['TRUE', 'FALSE'])
VENTMACH = Variable('VENTMACH', ['ZERO', 'LOW', 'NORMAL', 'HIGH'])
HISTORY = Variable('HISTORY', ['TRUE', 'FALSE'])
LVEDVOLUME = Variable('LVEDVOLUME', ['LOW', 'NORMAL', 'HIGH'])
VENTLUNG = Variable('VENTLUNG', ['ZERO', 'LOW', 'NORMAL', 'HIGH'])
f_PRESS = Prob(PRESS, [KINKEDTUBE, INTUBATION, VENTTUBE], [0.97, 0.01, 0.01, 0.01, 0.01, 0.3, 0.49, 0.2, 0.01, 0.01, 0.08, 0.9, 0.01, 0.01, 0.01, 0.97, 0.97, 0.01, 0.01, 0.01, 0.01, 0.29, 0.3, 0.4, 0.01, 0.01, 0.08, 0.9, 0.01, 0.01, 0.01, 0.97, 0.97, 0.01, 0.01, 0.01, 0.4, 0.58, 0.01, 0.01, 0.2, 0.75, 0.04, 0.01,
0.2, 0.7, 0.09, 0.01, 0.97, 0.01, 0.01, 0.01, 0.1, 0.84, 0.05, 0.01, 0.05, 0.25, 0.25, 0.45, 0.01, 0.15, 0.25, 0.59, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.97, 0.01, 0.01, 0.01, 0.01, 0.9, 0.08, 0.01, 0.01, 0.01, 0.38, 0.6, 0.01, 0.01, 0.01, 0.97])
f_SHUNT = Prob(SHUNT, [PULMEMBOLUS, INTUBATION], [
0.1, 0.9, 0.01, 0.99, 0.95, 0.05, 0.1, 0.9, 0.95, 0.05, 0.05, 0.95])
f_STROKEVOLUME = Prob(STROKEVOLUME, [HYPOVOLEMIA, LVFAILURE], [
0.98, 0.01, 0.01, 0.95, 0.04, 0.01, 0.5, 0.49, 0.01, 0.05, 0.9, 0.05])
f_FIO2 = Prob(FIO2, [], [0.05, 0.95])
f_INTUBATION = Prob(INTUBATION, [], [0.92, 0.03, 0.05])
f_MINVOLSET = Prob(MINVOLSET, [], [0.05, 0.9, 0.05])
f_HR = Prob(HR, [CATECHOL], [0.05, 0.9, 0.05, 0.01, 0.09, 0.9])
f_VENTTUBE = Prob(VENTTUBE, [DISCONNECT, VENTMACH], [0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01,
0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97])
f_DISCONNECT = Prob(DISCONNECT, [], [0.1, 0.9])
f_CO = Prob(CO, [STROKEVOLUME, HR], [0.98, 0.01, 0.01, 0.95, 0.04, 0.01, 0.3, 0.69, 0.01, 0.95, 0.04,
0.01, 0.04, 0.95, 0.01, 0.01, 0.3, 0.69, 0.8, 0.19, 0.01, 0.01, 0.04, 0.95, 0.01, 0.01, 0.98])
f_PAP = Prob(PAP, [PULMEMBOLUS], [0.01, 0.19, 0.8, 0.05, 0.9, 0.05])
f_MINVOL = Prob(MINVOL, [INTUBATION, VENTLUNG], [0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.97, 0.01, 0.01, 0.01,
0.6, 0.38, 0.01, 0.01, 0.5, 0.48, 0.01, 0.01, 0.5, 0.48, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97])
f_HYPOVOLEMIA = Prob(HYPOVOLEMIA, [], [0.2, 0.8])
f_CVP = Prob(CVP, [LVEDVOLUME], [0.95, 0.04, 0.01,
0.04, 0.95, 0.01, 0.01, 0.29, 0.7])
f_PULMEMBOLUS = Prob(PULMEMBOLUS, [], [0.01, 0.99])
f_ARTCO2 = Prob(ARTCO2, [VENTALV], [0.01, 0.01, 0.98,
0.01, 0.01, 0.98, 0.04, 0.92, 0.04, 0.9, 0.09, 0.01])
f_HRBP = Prob(HRBP, [ERRLOWOUTPUT, HR], [0.98, 0.01, 0.01, 0.4, 0.59, 0.01,
0.3, 0.4, 0.3, 0.98, 0.01, 0.01, 0.01, 0.98, 0.01, 0.01, 0.01, 0.98])
f_TPR = Prob(TPR, [ANAPHYLAXIS], [0.98, 0.01, 0.01, 0.3, 0.4, 0.3])
f_INSUFFANESTH = Prob(INSUFFANESTH, [], [0.1, 0.9])
f_VENTALV = Prob(VENTALV, [INTUBATION, VENTLUNG], [0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.97, 0.01, 0.01, 0.01,
0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.97, 0.01, 0.01, 0.01, 0.03, 0.95, 0.01, 0.01, 0.01, 0.94, 0.04, 0.01, 0.01, 0.88, 0.1, 0.01])
f_HREKG = Prob(HREKG, [ERRCAUTER, HR], [0.33333334, 0.33333333, 0.33333333, 0.33333334, 0.33333333,
0.33333333, 0.33333334, 0.33333333, 0.33333333, 0.98, 0.01, 0.01, 0.01, 0.98, 0.01, 0.01, 0.01, 0.98])
f_ANAPHYLAXIS = Prob(ANAPHYLAXIS, [], [0.01, 0.99])
f_HRSAT = Prob(HRSAT, [ERRCAUTER, HR], [0.33333334, 0.33333333, 0.33333333, 0.33333334, 0.33333333,
0.33333333, 0.33333334, 0.33333333, 0.33333333, 0.98, 0.01, 0.01, 0.01, 0.98, 0.01, 0.01, 0.01, 0.98])
f_EXPCO2 = Prob(EXPCO2, [VENTLUNG, ARTCO2], [0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01,
0.01, 0.01, 0.97, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97])
f_ERRCAUTER = Prob(ERRCAUTER, [], [0.1, 0.9])
f_SAO2 = Prob(SAO2, [PVSAT, SHUNT], [0.98, 0.01, 0.01, 0.01, 0.98, 0.01,
0.01, 0.01, 0.98, 0.98, 0.01, 0.01, 0.98, 0.01, 0.01, 0.69, 0.3, 0.01])
f_PVSAT = Prob(PVSAT, [FIO2, VENTALV], [1.0, 0.0, 0.0, 0.99, 0.01, 0.0, 0.95, 0.04, 0.01,
0.95, 0.04, 0.01, 1.0, 0.0, 0.0, 0.95, 0.04, 0.01, 0.01, 0.95, 0.04, 0.01, 0.01, 0.98])
f_LVFAILURE = Prob(LVFAILURE, [], [0.05, 0.95])
f_BP = Prob(BP, [TPR, CO], [0.98, 0.01, 0.01, 0.98, 0.01, 0.01, 0.3, 0.6, 0.1, 0.98, 0.01,
0.01, 0.1, 0.85, 0.05, 0.05, 0.4, 0.55, 0.9, 0.09, 0.01, 0.05, 0.2, 0.75, 0.01, 0.09, 0.9])
f_ERRLOWOUTPUT = Prob(ERRLOWOUTPUT, [], [0.05, 0.95])
f_CATECHOL = Prob(CATECHOL, [INSUFFANESTH, TPR, SAO2, ARTCO2], [0.01, 0.99, 0.01, 0.99, 0.7, 0.3, 0.01, 0.99, 0.05, 0.95, 0.7, 0.3, 0.01, 0.99, 0.05, 0.95, 0.7, 0.3, 0.01, 0.99, 0.01, 0.99, 0.7, 0.3, 0.01, 0.99, 0.05, 0.95, 0.7, 0.3, 0.01, 0.99, 0.05, 0.95, 0.7, 0.3, 0.01, 0.99, 0.01, 0.99, 0.1, 0.9, 0.01, 0.99, 0.01, 0.99, 0.1, 0.9, 0.01, 0.99,
0.01, 0.99, 0.1, 0.9, 0.01, 0.99, 0.05, 0.95, 0.95, 0.05, 0.01, 0.99, 0.05, 0.95, 0.95, 0.05, 0.05, 0.95, 0.05, 0.95, 0.95, 0.05, 0.01, 0.99, 0.05, 0.95, 0.99, 0.01, 0.01, 0.99, 0.05, 0.95, 0.99, 0.01, 0.05, 0.95, 0.05, 0.95, 0.99, 0.01, 0.01, 0.99, 0.01, 0.99, 0.3, 0.7, 0.01, 0.99, 0.01, 0.99, 0.3, 0.7, 0.01, 0.99, 0.01, 0.99, 0.3, 0.7])
f_PCWP = Prob(PCWP, [LVEDVOLUME], [0.95, 0.04, 0.01,
0.04, 0.95, 0.01, 0.01, 0.04, 0.95])
f_KINKEDTUBE = Prob(KINKEDTUBE, [], [0.04, 0.96])
f_VENTMACH = Prob(VENTMACH, [MINVOLSET], [
0.05, 0.93, 0.01, 0.01, 0.05, 0.01, 0.93, 0.01, 0.05, 0.01, 0.01, 0.93])
f_HISTORY = Prob(HISTORY, [LVFAILURE], [0.9, 0.1, 0.01, 0.99])
f_LVEDVOLUME = Prob(LVEDVOLUME, [HYPOVOLEMIA, LVFAILURE], [
0.95, 0.04, 0.01, 0.98, 0.01, 0.01, 0.01, 0.09, 0.9, 0.05, 0.9, 0.05])
f_VENTLUNG = Prob(VENTLUNG, [KINKEDTUBE, INTUBATION, VENTTUBE], [0.97, 0.01, 0.01, 0.01, 0.95, 0.03, 0.01, 0.01, 0.4, 0.58, 0.01, 0.01, 0.3, 0.68, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.95, 0.03, 0.01, 0.01, 0.5, 0.48, 0.01, 0.01, 0.3, 0.68, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01,
0.01, 0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97, 0.01, 0.01, 0.01, 0.01, 0.97])
bn_diagnosis_extended = Belief_network(
vars=[PRESS, SHUNT, STROKEVOLUME, FIO2, INTUBATION, MINVOLSET, HR, VENTTUBE, DISCONNECT, CO, PAP, MINVOL, HYPOVOLEMIA, CVP, PULMEMBOLUS, ARTCO2, HRBP, TPR, INSUFFANESTH,
VENTALV, HREKG, ANAPHYLAXIS, HRSAT, EXPCO2, ERRCAUTER, SAO2, PVSAT, LVFAILURE, BP, ERRLOWOUTPUT, CATECHOL, PCWP, KINKEDTUBE, VENTMACH, HISTORY, LVEDVOLUME, VENTLUNG],
factors=[f_PRESS, f_SHUNT, f_STROKEVOLUME, f_FIO2, f_INTUBATION, f_MINVOLSET, f_HR, f_VENTTUBE, f_DISCONNECT, f_CO, f_PAP, f_MINVOL, f_HYPOVOLEMIA, f_CVP, f_PULMEMBOLUS, f_ARTCO2, f_HRBP, f_TPR, f_INSUFFANESTH,
f_VENTALV, f_HREKG, f_ANAPHYLAXIS, f_HRSAT, f_EXPCO2, f_ERRCAUTER, f_SAO2, f_PVSAT, f_LVFAILURE, f_BP, f_ERRLOWOUTPUT, f_CATECHOL, f_PCWP, f_KINKEDTUBE, f_VENTMACH, f_HISTORY, f_LVEDVOLUME, f_VENTLUNG],
positions=[])
ScnRelPlFcst = Variable(
'ScnRelPlFcst', ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K'])
AMInsWliScen = Variable(
'AMInsWliScen', ['LessUnstable', 'Average', 'MoreUnstable'])
SfcWndShfDis = Variable('SfcWndShfDis', [
'DenvCyclone', 'E_W_N', 'E_W_S', 'MovingFtorOt', 'DryLine', 'None', 'Other'])
InsInMt = Variable('InsInMt', ['None', 'Weak', 'Strong'])
AMCINInScen = Variable(
'AMCINInScen', ['LessThanAve', 'Average', 'MoreThanAve'])
PlainsFcst = Variable('PlainsFcst', ['XNIL', 'SIG', 'SVR'])
CombClouds = Variable('CombClouds', ['Cloudy', 'PC', 'Clear'])
LIfr12ZDENSd = Variable(
'LIfr12ZDENSd', ['LIGt0', 'N1GtLIGt_4', 'N5GtLIGt_8', 'LILt_8'])
CombMoisture = Variable('CombMoisture', ['VeryWet', 'Wet', 'Neutral', 'Dry'])
AMInstabMt = Variable('AMInstabMt', ['None', 'Weak', 'Strong'])
CldShadeOth = Variable('CldShadeOth', ['Cloudy', 'PC', 'Clear'])
N0_7muVerMo = Variable(
'N0_7muVerMo', ['StrongUp', 'WeakUp', 'Neutral', 'Down'])
LLIW = Variable('LLIW', ['Unfavorable', 'Weak', 'Moderate', 'Strong'])
ScenRelAMIns = Variable('ScenRelAMIns', ['ABI', 'CDEJ', 'F', 'G', 'H', 'K'])
VISCloudCov = Variable('VISCloudCov', ['Cloudy', 'PC', 'Clear'])
ScenRelAMCIN = Variable('ScenRelAMCIN', ['AB', 'CThruK'])
MidLLapse = Variable('MidLLapse', ['CloseToDryAd', 'Steep', 'ModerateOrLe'])
MvmtFeatures = Variable(
'MvmtFeatures', ['StrongFront', 'MarkedUpper', 'OtherRapid', 'NoMajor'])
CapInScen = Variable('CapInScen', ['LessThanAve', 'Average', 'MoreThanAve'])
CldShadeConv = Variable('CldShadeConv', ['None', 'Some', 'Marked'])
IRCloudCover = Variable('IRCloudCover', ['Cloudy', 'PC', 'Clear'])
CapChange = Variable('CapChange', ['Decreasing', 'LittleChange', 'Increasing'])
QGVertMotion = Variable(
'QGVertMotion', ['StrongUp', 'WeakUp', 'Neutral', 'Down'])
LoLevMoistAd = Variable(
'LoLevMoistAd', ['StrongPos', 'WeakPos', 'Neutral', 'Negative'])
Scenario = Variable(
'Scenario', ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K'])
MountainFcst = Variable('MountainFcst', ['XNIL', 'SIG', 'SVR'])
CombVerMo = Variable('CombVerMo', ['StrongUp', 'WeakUp', 'Neutral', 'Down'])
Boundaries = Variable('Boundaries', ['None', 'Weak', 'Strong'])
MeanRH = Variable('MeanRH', ['VeryMoist', 'Average', 'Dry'])
Dewpoints = Variable('Dewpoints', ['LowEvrywhere', 'LowAtStation',
'LowSHighN', 'LowNHighS', 'LowMtsHighPl', 'HighEvrywher', 'Other'])
InsChange = Variable('InsChange', ['Decreasing', 'LittleChange', 'Increasing'])
N34StarFcst = Variable('N34StarFcst', ['XNIL', 'SIG', 'SVR'])
SynForcng = Variable('SynForcng', [
'SigNegative', 'NegToPos', 'SigPositive', 'PosToNeg', 'LittleChange'])
CompPlFcst = Variable(
'CompPlFcst', ['IncCapDecIns', 'LittleChange', 'DecCapIncIns'])
Date = Variable('Date', ['May15_Jun14', 'Jun15_Jul1',
'Jul2_Jul15', 'Jul16_Aug10', 'Aug11_Aug20', 'Aug20_Sep15'])
WndHodograph = Variable(
'WndHodograph', ['DCVZFavor', 'StrongWest', 'Westerly', 'Other'])
ScenRel3_4 = Variable('ScenRel3_4', ['ACEFK', 'B', 'D', 'GJ', 'HI'])
R5Fcst = Variable('R5Fcst', ['XNIL', 'SIG', 'SVR'])
LatestCIN = Variable(
'LatestCIN', ['None', 'PartInhibit', 'Stifling', 'TotalInhibit'])
WindFieldMt = Variable('WindFieldMt', ['Westerly', 'LVorOther'])
RaoContMoist = Variable('RaoContMoist', ['VeryWet', 'Wet', 'Neutral', 'Dry'])
AreaMeso_ALS = Variable(
'AreaMeso_ALS', ['StrongUp', 'WeakUp', 'Neutral', 'Down'])
SatContMoist = Variable('SatContMoist', ['VeryWet', 'Wet', 'Neutral', 'Dry'])
MorningBound = Variable('MorningBound', ['None', 'Weak', 'Strong'])
RHRatio = Variable('RHRatio', ['MoistMDryL', 'DryMMoistL', 'Other'])
WindAloft = Variable('WindAloft', ['LV', 'SWQuad', 'NWQuad', 'AllElse'])
LowLLapse = Variable(
'LowLLapse', ['CloseToDryAd', 'Steep', 'ModerateOrLe', 'Stable'])
AMDewptCalPl = Variable(
'AMDewptCalPl', ['Instability', 'Neutral', 'Stability'])
SubjVertMo = Variable('SubjVertMo', ['StronUp', 'WeakUp', 'Neutral', 'Down'])
OutflowFrMt = Variable('OutflowFrMt', ['None', 'Weak', 'Strong'])
AreaMoDryAir = Variable('AreaMoDryAir', ['VeryWet', 'Wet', 'Neutral', 'Dry'])
MorningCIN = Variable(
'MorningCIN', ['None', 'PartInhibit', 'Stifling', 'TotalInhibit'])
TempDis = Variable('TempDis', ['QStationary', 'Moving', 'None', 'Other'])
InsSclInScen = Variable(
'InsSclInScen', ['LessUnstable', 'Average', 'MoreUnstable'])
WindFieldPln = Variable('WindFieldPln', [
'LV', 'DenvCyclone', 'LongAnticyc', 'E_NE', 'SEQuad', 'WidespdDnsl'])
CurPropConv = Variable('CurPropConv', ['None', 'Slight', 'Moderate', 'Strong'])
f_ScnRelPlFcst = Prob(ScnRelPlFcst, [Scenario], [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0])
f_AMInsWliScen = Prob(AMInsWliScen, [ScenRelAMIns, LIfr12ZDENSd, AMDewptCalPl], [0.6, 0.3, 0.1, 0.85, 0.13, 0.02, 0.95, 0.04, 0.01, 0.3, 0.3, 0.4, 0.5, 0.3, 0.2, 0.75, 0.2, 0.05, 0.06, 0.21, 0.73, 0.2, 0.4, 0.4, 0.5, 0.4, 0.1, 0.01, 0.04, 0.95, 0.05, 0.2, 0.75, 0.35, 0.35, 0.3, 0.4, 0.3, 0.3, 0.7, 0.2, 0.1, 0.9, 0.08, 0.02, 0.15, 0.3, 0.55, 0.25, 0.5, 0.25, 0.6, 0.3, 0.1, 0.03, 0.17, 0.8, 0.2, 0.3, 0.5, 0.45, 0.4, 0.15, 0.01, 0.04, 0.95, 0.05, 0.18, 0.77, 0.25, 0.4, 0.35, 0.35, 0.35, 0.3, 0.55, 0.4, 0.05, 0.85, 0.13, 0.02, 0.07, 0.38, 0.55, 0.2, 0.6, 0.2, 0.5, 0.43, 0.07, 0.0, 0.05, 0.95, 0.05, 0.35, 0.6, 0.25, 0.5, 0.25, 0.0, 0.02,
0.98, 0.0, 0.05, 0.95, 0.04, 0.16, 0.8, 0.3, 0.4, 0.3, 0.5, 0.3, 0.2, 0.75, 0.2, 0.05, 0.15, 0.35, 0.5, 0.2, 0.6, 0.2, 0.15, 0.7, 0.15, 0.07, 0.23, 0.7, 0.13, 0.47, 0.4, 0.1, 0.75, 0.15, 0.02, 0.18, 0.8, 0.04, 0.26, 0.7, 0.07, 0.3, 0.63, 0.35, 0.45, 0.2, 0.4, 0.5, 0.1, 0.58, 0.4, 0.02, 0.1, 0.25, 0.65, 0.15, 0.45, 0.4, 0.4, 0.45, 0.15, 0.02, 0.18, 0.8, 0.05, 0.25, 0.7, 0.15, 0.35, 0.5, 0.01, 0.09, 0.9, 0.03, 0.17, 0.8, 0.08, 0.32, 0.6, 0.3, 0.55, 0.15, 0.4, 0.5, 0.1, 0.5, 0.43, 0.07, 0.1, 0.35, 0.55, 0.25, 0.5, 0.25, 0.3, 0.5, 0.2, 0.05, 0.22, 0.73, 0.1, 0.35, 0.55, 0.15, 0.35, 0.5, 0.02, 0.1, 0.88, 0.04, 0.16, 0.8, 0.1, 0.25, 0.65])
f_SfcWndShfDis = Prob(SfcWndShfDis, [Scenario], [0.65, 0.05, 0.1, 0.08, 0.04, 0.07, 0.01, 0.65, 0.05, 0.1, 0.1, 0.02, 0.07, 0.01, 0.0, 0.65, 0.2, 0.02, 0.06, 0.05, 0.02, 0.12, 0.02, 0.02, 0.02, 0.45, 0.27, 0.1, 0.06, 0.14, 0.04, 0.04, 0.25, 0.4,
0.07, 0.1, 0.1, 0.1, 0.02, 0.0, 0.56, 0.12, 0.02, 0.05, 0.05, 0.0, 0.35, 0.33, 0.2, 0.01, 0.1, 0.15, 0.4, 0.0, 0.23, 0.11, 0.02, 0.1, 0.5, 0.3, 0.01, 0.02, 0.05, 0.06, 0.08, 0.04, 0.02, 0.6, 0.14, 0.06, 0.05, 0.13, 0.05, 0.39, 0.13, 0.15, 0.1])
f_InsInMt = Prob(InsInMt, [CldShadeOth, AMInstabMt], [0.9, 0.1, 0.0, 0.01, 0.4, 0.59, 0.0, 0.05, 0.95,
0.6, 0.39, 0.01, 0.0, 0.4, 0.6, 0.0, 0.0, 1.0, 0.5, 0.35, 0.15, 0.0, 0.15, 0.85, 0.0, 0.0, 1.0])
f_AMCINInScen = Prob(AMCINInScen, [ScenRelAMCIN, MorningCIN], [1.0, 0.0, 0.0, 0.6, 0.37, 0.03, 0.25,
0.45, 0.3, 0.0, 0.1, 0.9, 0.75, 0.25, 0.0, 0.3, 0.6, 0.1, 0.01, 0.4, 0.59, 0.0, 0.03, 0.97])
f_PlainsFcst = Prob(PlainsFcst, [CapInScen, InsSclInScen, CurPropConv, ScnRelPlFcst], [0.75, 0.2, 0.05, 0.75, 0.2, 0.05, 0.9, 0.08, 0.02, 0.9, 0.06, 0.04, 0.88, 0.1, 0.02, 0.92, 0.08, 0.0, 0.85, 0.13, 0.02, 1.0, 0.0, 0.0, 0.9, 0.08, 0.02, 0.9, 0.08, 0.02, 0.95, 0.04, 0.01, 0.7, 0.25, 0.05, 0.6, 0.33, 0.07, 0.82, 0.13, 0.05, 0.85, 0.1, 0.05, 0.82, 0.15, 0.03, 0.85, 0.14, 0.01, 0.8, 0.17, 0.03, 0.97, 0.02, 0.01, 0.88, 0.1, 0.02, 0.86, 0.1, 0.04, 0.88, 0.1, 0.02, 0.5, 0.4, 0.1, 0.45, 0.42, 0.13, 0.75, 0.18, 0.07, 0.75, 0.15, 0.1, 0.72, 0.22, 0.06, 0.78, 0.21, 0.01, 0.66, 0.27, 0.07, 0.88, 0.1, 0.02, 0.7, 0.22, 0.08, 0.78, 0.16, 0.06, 0.8, 0.16, 0.04, 0.4, 0.45, 0.15, 0.35, 0.45, 0.2, 0.6, 0.27, 0.13, 0.6, 0.22, 0.18, 0.55, 0.32, 0.13, 0.69, 0.29, 0.02, 0.54, 0.36, 0.1, 0.75, 0.2, 0.05, 0.55, 0.3, 0.15, 0.7, 0.22, 0.08, 0.7, 0.25, 0.05, 0.5, 0.3, 0.2, 0.6, 0.3, 0.1, 0.8, 0.14, 0.06, 0.85, 0.09, 0.06, 0.85, 0.1, 0.05, 0.88, 0.11, 0.01, 0.8, 0.17, 0.03, 0.92, 0.06, 0.02, 0.8, 0.12, 0.08, 0.75, 0.22, 0.03, 0.9, 0.08, 0.02, 0.3, 0.4, 0.3, 0.55, 0.34, 0.11, 0.7, 0.2, 0.1, 0.75, 0.15, 0.1, 0.62, 0.28, 0.1, 0.85, 0.14, 0.01, 0.75, 0.2, 0.05, 0.82, 0.14, 0.04, 0.6, 0.25, 0.15, 0.68, 0.22, 0.1, 0.82, 0.15, 0.03, 0.2, 0.45, 0.35, 0.4, 0.4, 0.2, 0.7, 0.2, 0.1, 0.65, 0.22, 0.13, 0.5, 0.34, 0.16, 0.74, 0.24, 0.02, 0.6, 0.3, 0.1, 0.67, 0.24, 0.09, 0.35, 0.4, 0.25, 0.6, 0.25, 0.15, 0.75, 0.2, 0.05, 0.16, 0.47, 0.37, 0.3, 0.45, 0.25, 0.45, 0.32, 0.23, 0.52, 0.26, 0.22, 0.35, 0.45, 0.2, 0.65, 0.32, 0.03, 0.48, 0.39, 0.13, 0.58, 0.3, 0.12, 0.25, 0.45, 0.3, 0.5, 0.28, 0.22, 0.65, 0.27, 0.08, 0.35, 0.2, 0.45, 0.45, 0.35, 0.2, 0.8, 0.1, 0.1, 0.72, 0.14, 0.14, 0.78, 0.15, 0.07, 0.86, 0.12, 0.02, 0.65, 0.25, 0.1, 0.85, 0.1, 0.05, 0.65, 0.2, 0.15, 0.72, 0.2, 0.08, 0.85, 0.1, 0.05, 0.3, 0.25, 0.45, 0.4, 0.36, 0.24, 0.65, 0.2, 0.15, 0.6, 0.2, 0.2, 0.6, 0.28, 0.12, 0.83, 0.14, 0.03, 0.45, 0.4, 0.15, 0.7, 0.18, 0.12, 0.55, 0.25, 0.2, 0.6, 0.25, 0.15, 0.72, 0.2, 0.08, 0.25, 0.28, 0.47, 0.3, 0.38, 0.32, 0.45, 0.3, 0.25, 0.5, 0.25, 0.25, 0.4, 0.35, 0.25, 0.72, 0.24, 0.04, 0.25, 0.57, 0.18, 0.57, 0.28, 0.15, 0.25, 0.35, 0.4, 0.48, 0.26, 0.26, 0.6, 0.26, 0.14, 0.18, 0.3, 0.52, 0.2, 0.4, 0.4, 0.3, 0.3, 0.4, 0.4, 0.3, 0.3, 0.25, 0.48, 0.27, 0.63, 0.32, 0.05, 0.15, 0.63, 0.22, 0.4, 0.38, 0.22, 0.2, 0.37, 0.43, 0.3, 0.35, 0.35, 0.5, 0.32, 0.18, 0.75, 0.2, 0.05, 0.65, 0.3, 0.05, 0.9, 0.08, 0.02, 0.91, 0.05, 0.04, 0.85, 0.13, 0.02, 0.9, 0.1, 0.0, 0.84, 0.12, 0.04, 0.99, 0.01, 0.0, 0.88, 0.1, 0.02, 0.92, 0.06, 0.02, 0.96, 0.03, 0.01, 0.65, 0.25, 0.1, 0.58, 0.32, 0.1, 0.8, 0.15, 0.05, 0.85, 0.1, 0.05, 0.8, 0.16, 0.04, 0.83, 0.16, 0.01, 0.77, 0.17, 0.06, 0.93, 0.06, 0.01, 0.85, 0.12, 0.03, 0.85, 0.1, 0.05, 0.9, 0.08, 0.02, 0.45, 0.35, 0.2, 0.45, 0.35, 0.2, 0.7, 0.2, 0.1, 0.72, 0.17, 0.11, 0.7, 0.22, 0.08, 0.75, 0.24, 0.01, 0.62, 0.3, 0.08, 0.85, 0.12, 0.03, 0.75, 0.15, 0.1, 0.76, 0.17, 0.07, 0.8, 0.16, 0.04, 0.35, 0.4, 0.25, 0.35, 0.4, 0.25, 0.55, 0.3, 0.15, 0.55, 0.27, 0.18, 0.5, 0.35, 0.15, 0.65, 0.33, 0.02, 0.38, 0.5, 0.12, 0.7, 0.24, 0.06, 0.65, 0.2, 0.15, 0.67, 0.23, 0.1, 0.7, 0.25, 0.05, 0.35, 0.3, 0.35, 0.55, 0.3, 0.15, 0.82, 0.13, 0.05, 0.82, 0.1, 0.08, 0.75, 0.18, 0.07, 0.88, 0.11, 0.01, 0.75, 0.2, 0.05, 0.9, 0.07, 0.03, 0.7, 0.2, 0.1, 0.8, 0.15, 0.05, 0.9, 0.08, 0.02, 0.28, 0.37, 0.35, 0.48, 0.35, 0.17, 0.7, 0.2, 0.1, 0.7, 0.17, 0.13, 0.6, 0.29, 0.11, 0.82, 0.16, 0.02, 0.63, 0.3, 0.07, 0.8, 0.15, 0.05, 0.5,
0.3, 0.2, 0.7, 0.2, 0.1, 0.8, 0.16, 0.04, 0.23, 0.4, 0.37, 0.38, 0.35, 0.27, 0.58, 0.25, 0.17, 0.55, 0.25, 0.2, 0.53, 0.32, 0.15, 0.73, 0.25, 0.02, 0.35, 0.53, 0.12, 0.65, 0.24, 0.11, 0.3, 0.4, 0.3, 0.6, 0.24, 0.16, 0.68, 0.24, 0.08, 0.18, 0.45, 0.37, 0.3, 0.35, 0.35, 0.45, 0.3, 0.25, 0.45, 0.3, 0.25, 0.35, 0.43, 0.22, 0.62, 0.35, 0.03, 0.2, 0.65, 0.15, 0.52, 0.33, 0.15, 0.23, 0.42, 0.35, 0.47, 0.3, 0.23, 0.55, 0.3, 0.15, 0.25, 0.15, 0.6, 0.45, 0.35, 0.2, 0.65, 0.2, 0.15, 0.55, 0.2, 0.25, 0.55, 0.25, 0.2, 0.81, 0.17, 0.02, 0.6, 0.28, 0.12, 0.8, 0.13, 0.07, 0.6, 0.2, 0.2, 0.75, 0.15, 0.1, 0.88, 0.08, 0.04, 0.22, 0.17, 0.61, 0.35, 0.37, 0.28, 0.45, 0.3, 0.25, 0.45, 0.25, 0.3, 0.48, 0.29, 0.23, 0.72, 0.25, 0.03, 0.43, 0.4, 0.17, 0.68, 0.2, 0.12, 0.35, 0.3, 0.35, 0.6, 0.2, 0.2, 0.74, 0.16, 0.1, 0.19, 0.18, 0.63, 0.25, 0.4, 0.35, 0.35, 0.3, 0.35, 0.35, 0.3, 0.35, 0.35, 0.35, 0.3, 0.65, 0.3, 0.05, 0.22, 0.58, 0.2, 0.45, 0.35, 0.2, 0.25, 0.34, 0.41, 0.48, 0.26, 0.26, 0.58, 0.25, 0.17, 0.15, 0.2, 0.65, 0.18, 0.4, 0.42, 0.25, 0.35, 0.4, 0.25, 0.35, 0.4, 0.25, 0.42, 0.33, 0.58, 0.36, 0.06, 0.13, 0.62, 0.25, 0.3, 0.45, 0.25, 0.22, 0.35, 0.43, 0.35, 0.32, 0.33, 0.5, 0.3, 0.2, 0.75, 0.2, 0.05, 0.75, 0.2, 0.05, 0.95, 0.04, 0.01, 0.93, 0.04, 0.03, 0.92, 0.06, 0.02, 0.87, 0.13, 0.0, 0.9, 0.06, 0.04, 0.98, 0.02, 0.0, 0.92, 0.06, 0.02, 0.95, 0.04, 0.01, 0.97, 0.02, 0.01, 0.6, 0.3, 0.1, 0.65, 0.28, 0.07, 0.9, 0.08, 0.02, 0.85, 0.1, 0.05, 0.82, 0.13, 0.05, 0.8, 0.19, 0.01, 0.8, 0.13, 0.07, 0.91, 0.08, 0.01, 0.85, 0.12, 0.03, 0.9, 0.08, 0.02, 0.93, 0.06, 0.01, 0.35, 0.4, 0.25, 0.45, 0.4, 0.15, 0.75, 0.19, 0.06, 0.7, 0.2, 0.1, 0.6, 0.3, 0.1, 0.72, 0.27, 0.01, 0.6, 0.3, 0.1, 0.8, 0.16, 0.04, 0.75, 0.17, 0.08, 0.75, 0.2, 0.05, 0.88, 0.1, 0.02, 0.2, 0.45, 0.35, 0.3, 0.45, 0.25, 0.55, 0.3, 0.15, 0.5, 0.3, 0.2, 0.45, 0.38, 0.17, 0.6, 0.38, 0.02, 0.28, 0.57, 0.15, 0.65, 0.28, 0.07, 0.63, 0.25, 0.12, 0.62, 0.28, 0.1, 0.8, 0.17, 0.03, 0.5, 0.2, 0.3, 0.6, 0.25, 0.15, 0.85, 0.1, 0.05, 0.85, 0.07, 0.08, 0.75, 0.15, 0.1, 0.85, 0.14, 0.01, 0.75, 0.2, 0.05, 0.94, 0.05, 0.01, 0.65, 0.22, 0.13, 0.83, 0.1, 0.07, 0.93, 0.06, 0.01, 0.4, 0.28, 0.32, 0.5, 0.25, 0.25, 0.72, 0.18, 0.1, 0.65, 0.2, 0.15, 0.55, 0.3, 0.15, 0.78, 0.2, 0.02, 0.55, 0.35, 0.1, 0.85, 0.12, 0.03, 0.45, 0.3, 0.25, 0.73, 0.15, 0.12, 0.85, 0.12, 0.03, 0.3, 0.34, 0.36, 0.35, 0.35, 0.3, 0.55, 0.25, 0.2, 0.5, 0.27, 0.23, 0.4, 0.38, 0.22, 0.7, 0.28, 0.02, 0.35, 0.5, 0.15, 0.6, 0.25, 0.15, 0.35, 0.35, 0.3, 0.62, 0.22, 0.16, 0.7, 0.22, 0.08, 0.23, 0.4, 0.37, 0.25, 0.4, 0.35, 0.4, 0.3, 0.3, 0.4, 0.3, 0.3, 0.3, 0.45, 0.25, 0.57, 0.4, 0.03, 0.15, 0.65, 0.2, 0.5, 0.33, 0.17, 0.25, 0.36, 0.39, 0.5, 0.28, 0.22, 0.55, 0.3, 0.15, 0.4, 0.08, 0.52, 0.45, 0.25, 0.3, 0.75, 0.1, 0.15, 0.65, 0.15, 0.2, 0.52, 0.25, 0.23, 0.82, 0.16, 0.02, 0.65, 0.27, 0.08, 0.85, 0.09, 0.06, 0.5, 0.2, 0.3, 0.77, 0.1, 0.13, 0.9, 0.07, 0.03, 0.27, 0.1, 0.63, 0.35, 0.3, 0.35, 0.55, 0.22, 0.23, 0.45, 0.25, 0.3, 0.42, 0.3, 0.28, 0.74, 0.22, 0.04, 0.45, 0.4, 0.15, 0.77, 0.13, 0.1, 0.3, 0.25, 0.45, 0.68, 0.15, 0.17, 0.75, 0.15, 0.1, 0.15, 0.16, 0.69, 0.25, 0.3, 0.45, 0.4, 0.3, 0.3, 0.3, 0.3, 0.4, 0.25, 0.4, 0.35, 0.6, 0.34, 0.06, 0.18, 0.62, 0.2, 0.47, 0.3, 0.23, 0.25, 0.3, 0.45, 0.5, 0.22, 0.28, 0.5, 0.27, 0.23, 0.1, 0.2, 0.7, 0.2, 0.3, 0.5, 0.2, 0.4, 0.4, 0.23, 0.3, 0.47, 0.15, 0.45, 0.4, 0.5, 0.42, 0.08, 0.1, 0.65, 0.25, 0.28, 0.4, 0.32, 0.2, 0.32, 0.48, 0.3, 0.28, 0.42, 0.38, 0.32, 0.3])
f_CombClouds = Prob(CombClouds, [VISCloudCov, IRCloudCover], [0.95, 0.04, 0.01, 0.85, 0.13, 0.02, 0.8, 0.1,
0.1, 0.45, 0.52, 0.03, 0.1, 0.8, 0.1, 0.05, 0.45, 0.5, 0.1, 0.4, 0.5, 0.02, 0.28, 0.7, 0.0, 0.02, 0.98])
f_LIfr12ZDENSd = Prob(LIfr12ZDENSd, [], [0.1, 0.52, 0.3, 0.08])
f_CombMoisture = Prob(CombMoisture, [SatContMoist, RaoContMoist], [0.9, 0.1, 0.0, 0.0, 0.6, 0.35, 0.05, 0.0, 0.3, 0.5, 0.2, 0.0, 0.25, 0.35, 0.25, 0.15, 0.55, 0.4, 0.05, 0.0, 0.15, 0.6, 0.2, 0.05, 0.05, 0.4, 0.45,
0.1, 0.1, 0.3, 0.3, 0.3, 0.25, 0.3, 0.35, 0.1, 0.1, 0.35, 0.5, 0.05, 0.0, 0.15, 0.7, 0.15, 0.0, 0.1, 0.4, 0.5, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25])
f_AMInstabMt = Prob(AMInstabMt, [], [0.333333, 0.333333, 0.333334])
f_CldShadeOth = Prob(CldShadeOth, [AreaMeso_ALS, AreaMoDryAir, CombClouds], [1.0, 0.0, 0.0, 0.85, 0.15, 0.0, 0.25, 0.35, 0.4, 0.92, 0.08, 0.0, 0.7, 0.29, 0.01, 0.15, 0.4, 0.45, 0.88, 0.12, 0.0, 0.4, 0.5, 0.1, 0.1, 0.4, 0.5, 0.85, 0.14, 0.01, 0.55, 0.43, 0.02, 0.1, 0.25, 0.65, 0.95, 0.05, 0.0, 0.4, 0.55, 0.05, 0.05, 0.45, 0.5, 0.9, 0.09, 0.01, 0.25, 0.6, 0.15, 0.01, 0.3, 0.69, 0.85, 0.15, 0.0, 0.15, 0.75, 0.1, 0.0, 0.2, 0.8, 0.6, 0.39, 0.01,
0.01, 0.9, 0.09, 0.0, 0.15, 0.85, 0.93, 0.07, 0.0, 0.2, 0.78, 0.02, 0.01, 0.29, 0.7, 0.8, 0.2, 0.0, 0.01, 0.89, 0.1, 0.0, 0.1, 0.9, 0.8, 0.18, 0.02, 0.03, 0.85, 0.12, 0.0, 0.05, 0.95, 0.78, 0.2, 0.02, 0.01, 0.74, 0.25, 0.0, 0.04, 0.96, 0.74, 0.25, 0.01, 0.0, 0.5, 0.5, 0.0, 0.1, 0.9, 0.65, 0.34, 0.01, 0.0, 0.4, 0.6, 0.0, 0.02, 0.98, 0.5, 0.48, 0.02, 0.01, 0.74, 0.25, 0.0, 0.01, 0.99, 0.42, 0.55, 0.03, 0.05, 0.65, 0.3, 0.0, 0.0, 1.0])
f_N0_7muVerMo = Prob(N0_7muVerMo, [], [0.25, 0.25, 0.25, 0.25])
f_LLIW = Prob(LLIW, [], [0.12, 0.32, 0.38, 0.18])
f_ScenRelAMIns = Prob(ScenRelAMIns, [Scenario], [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0])
f_VISCloudCov = Prob(VISCloudCov, [], [0.1, 0.5, 0.4])
f_ScenRelAMCIN = Prob(ScenRelAMCIN, [Scenario], [1.0, 0.0, 1.0, 0.0, 0.0, 1.0,
0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
f_MidLLapse = Prob(MidLLapse, [Scenario], [0.25, 0.55, 0.2, 0.25, 0.5, 0.25, 0.4, 0.38, 0.22, 0.43, 0.37, 0.2, 0.02,
0.38, 0.6, 0.0, 0.1, 0.9, 0.84, 0.16, 0.0, 0.25, 0.31, 0.44, 0.41, 0.29, 0.3, 0.23, 0.42, 0.35, 0.16, 0.28, 0.56])
f_MvmtFeatures = Prob(MvmtFeatures, [Scenario], [0.25, 0.55, 0.2, 0.0, 0.05, 0.1, 0.1, 0.75, 0.1, 0.3, 0.3, 0.3, 0.18, 0.38, 0.34, 0.1, 0.02, 0.02,
0.26, 0.7, 0.05, 0.07, 0.05, 0.83, 0.1, 0.25, 0.15, 0.5, 0.0, 0.6, 0.1, 0.3, 0.2, 0.1, 0.2, 0.5, 0.04, 0.0, 0.04, 0.92, 0.5, 0.35, 0.09, 0.06])
f_CapInScen = Prob(CapInScen, [CapChange, AMCINInScen], [1.0, 0.0, 0.0, 0.75, 0.25, 0.0, 0.3, 0.35, 0.35,
0.98, 0.02, 0.0, 0.03, 0.94, 0.03, 0.0, 0.02, 0.98, 0.35, 0.35, 0.3, 0.0, 0.25, 0.75, 0.0, 0.0, 1.0])
f_CldShadeConv = Prob(CldShadeConv, [InsInMt, WndHodograph], [1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.3,
0.6, 0.1, 0.2, 0.7, 0.1, 0.5, 0.46, 0.04, 0.8, 0.19, 0.01, 0.0, 0.3, 0.7, 0.0, 0.2, 0.8, 0.1, 0.5, 0.4, 0.5, 0.38, 0.12])
f_IRCloudCover = Prob(IRCloudCover, [], [0.15, 0.45, 0.4])
f_CapChange = Prob(CapChange, [CompPlFcst], [
0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0])
f_QGVertMotion = Prob(QGVertMotion, [], [0.15, 0.15, 0.5, 0.2])
f_LoLevMoistAd = Prob(LoLevMoistAd, [], [0.12, 0.28, 0.3, 0.3])
f_Scenario = Prob(Scenario, [Date], [0.1, 0.16, 0.1, 0.08, 0.08, 0.01, 0.08, 0.1, 0.09, 0.03, 0.17, 0.05, 0.16, 0.09, 0.09, 0.12, 0.02, 0.13, 0.06, 0.07, 0.11, 0.1, 0.04, 0.13, 0.1, 0.08, 0.15, 0.03, 0.14, 0.04,
0.06, 0.15, 0.08, 0.04, 0.13, 0.09, 0.07, 0.2, 0.08, 0.06, 0.05, 0.07, 0.13, 0.08, 0.04, 0.11, 0.1, 0.07, 0.17, 0.05, 0.1, 0.05, 0.07, 0.14, 0.1, 0.05, 0.11, 0.1, 0.08, 0.11, 0.02, 0.11, 0.06, 0.08, 0.11, 0.17])
f_MountainFcst = Prob(MountainFcst, [InsInMt], [
1.0, 0.0, 0.0, 0.48, 0.5, 0.02, 0.2, 0.5, 0.3])
f_CombVerMo = Prob(CombVerMo, [N0_7muVerMo, SubjVertMo, QGVertMotion], [1.0, 0.0, 0.0, 0.0, 0.9, 0.1, 0.0, 0.0, 0.7, 0.2, 0.1, 0.0, 0.2, 0.5, 0.2, 0.1, 0.9, 0.1, 0.0, 0.0, 0.7, 0.3, 0.0, 0.0, 0.15, 0.7, 0.15, 0.0, 0.1, 0.35, 0.45, 0.1, 0.7, 0.2, 0.1, 0.0, 0.15, 0.7, 0.15, 0.0, 0.2, 0.6, 0.2, 0.0, 0.1, 0.2, 0.6, 0.1, 0.2, 0.5, 0.2, 0.1, 0.1, 0.35, 0.45, 0.1, 0.1, 0.2, 0.6, 0.1, 0.1, 0.1, 0.2, 0.6, 0.9, 0.1, 0.0, 0.0, 0.7, 0.3, 0.0, 0.0, 0.15, 0.7, 0.15, 0.0, 0.1, 0.35, 0.45, 0.1, 0.7, 0.3, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.7, 0.3, 0.0, 0.0, 0.2, 0.7, 0.1, 0.15, 0.7, 0.15, 0.0, 0.0, 0.7, 0.3, 0.0, 0.0, 0.3, 0.7, 0.0, 0.0, 0.15, 0.5, 0.35, 0.1, 0.35, 0.45, 0.1, 0.0, 0.2, 0.7, 0.1, 0.0,
0.15, 0.5, 0.35, 0.0, 0.1, 0.2, 0.7, 0.7, 0.2, 0.1, 0.0, 0.15, 0.7, 0.15, 0.0, 0.2, 0.6, 0.2, 0.0, 0.1, 0.2, 0.6, 0.1, 0.15, 0.7, 0.15, 0.0, 0.0, 0.7, 0.3, 0.0, 0.0, 0.3, 0.7, 0.0, 0.0, 0.15, 0.5, 0.35, 0.2, 0.6, 0.2, 0.0, 0.0, 0.3, 0.7, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.7, 0.3, 0.1, 0.2, 0.6, 0.1, 0.0, 0.15, 0.5, 0.35, 0.0, 0.0, 0.7, 0.3, 0.0, 0.0, 0.3, 0.7, 0.2, 0.5, 0.2, 0.1, 0.1, 0.35, 0.45, 0.1, 0.1, 0.2, 0.6, 0.1, 0.1, 0.1, 0.2, 0.6, 0.1, 0.35, 0.45, 0.1, 0.0, 0.2, 0.7, 0.1, 0.0, 0.15, 0.5, 0.35, 0.0, 0.1, 0.2, 0.7, 0.1, 0.2, 0.6, 0.1, 0.0, 0.15, 0.5, 0.35, 0.0, 0.0, 0.7, 0.3, 0.0, 0.0, 0.3, 0.7, 0.1, 0.1, 0.2, 0.6, 0.0, 0.1, 0.2, 0.7, 0.0, 0.0, 0.3, 0.7, 0.0, 0.0, 0.0, 1.0])
f_Boundaries = Prob(Boundaries, [WndHodograph, OutflowFrMt, MorningBound], [0.5, 0.48, 0.02, 0.3, 0.5, 0.2, 0.1, 0.25, 0.65, 0.3, 0.63, 0.07, 0.1, 0.5, 0.4, 0.05, 0.2, 0.75, 0.0, 0.55, 0.45, 0.0, 0.4, 0.6, 0.0, 0.15, 0.85, 0.75, 0.22, 0.03, 0.45, 0.45, 0.1, 0.25, 0.4, 0.35, 0.15, 0.7, 0.15, 0.1, 0.75, 0.15, 0.05, 0.5, 0.45, 0.0, 0.5,
0.5, 0.0, 0.4, 0.6, 0.0, 0.2, 0.8, 0.8, 0.18, 0.02, 0.35, 0.5, 0.15, 0.25, 0.35, 0.4, 0.15, 0.7, 0.15, 0.05, 0.8, 0.15, 0.05, 0.45, 0.5, 0.0, 0.7, 0.3, 0.0, 0.5, 0.5, 0.0, 0.2, 0.8, 0.7, 0.28, 0.02, 0.25, 0.6, 0.15, 0.05, 0.35, 0.6, 0.4, 0.55, 0.05, 0.2, 0.65, 0.15, 0.05, 0.3, 0.65, 0.02, 0.73, 0.25, 0.01, 0.5, 0.49, 0.01, 0.2, 0.79])
f_MeanRH = Prob(MeanRH, [Scenario], [0.33, 0.5, 0.17, 0.4, 0.4, 0.2, 0.05, 0.45, 0.5, 0.1, 0.5, 0.4, 0.05,
0.65, 0.3, 1.0, 0.0, 0.0, 0.0, 0.07, 0.93, 0.4, 0.55, 0.05, 0.2, 0.45, 0.35, 0.05, 0.55, 0.4, 0.2, 0.4, 0.4])
f_Dewpoints = Prob(Dewpoints, [Scenario], [0.04, 0.05, 0.15, 0.05, 0.19, 0.3, 0.22, 0.05, 0.07, 0.15, 0.1, 0.3, 0.27, 0.06, 0.4, 0.25, 0.0, 0.15, 0.05, 0.02, 0.13, 0.13, 0.22, 0.18, 0.07, 0.34, 0.03, 0.03, 0.15, 0.2, 0.2, 0.18, 0.11, 0.11,
0.05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.98, 0.02, 0.5, 0.27, 0.15, 0.02, 0.02, 0.0, 0.04, 0.0, 0.02, 0.1, 0.05, 0.5, 0.2, 0.13, 0.0, 0.02, 0.7, 0.0, 0.2, 0.04, 0.04, 0.1, 0.45, 0.1, 0.05, 0.26, 0.02, 0.02, 0.1, 0.1, 0.1, 0.2, 0.05, 0.1, 0.35])
f_InsChange = Prob(InsChange, [CompPlFcst, LoLevMoistAd], [0.0, 0.05, 0.95, 0.05, 0.15, 0.8, 0.15, 0.5, 0.35, 0.5, 0.4, 0.1, 0.0,
0.12, 0.88, 0.1, 0.4, 0.5, 0.2, 0.6, 0.2, 0.8, 0.16, 0.04, 0.05, 0.15, 0.8, 0.25, 0.5, 0.25, 0.35, 0.5, 0.15, 0.9, 0.09, 0.01])
f_N34StarFcst = Prob(N34StarFcst, [ScenRel3_4, PlainsFcst], [0.94, 0.05, 0.01, 0.06, 0.89, 0.05, 0.01, 0.05, 0.94, 0.98, 0.02, 0.0, 0.04, 0.94, 0.02, 0.0, 0.03, 0.97,
0.92, 0.06, 0.02, 0.01, 0.89, 0.1, 0.0, 0.01, 0.99, 0.92, 0.06, 0.02, 0.03, 0.92, 0.05, 0.01, 0.04, 0.95, 0.99, 0.01, 0.0, 0.09, 0.9, 0.01, 0.03, 0.12, 0.85])
f_SynForcng = Prob(SynForcng, [Scenario], [0.35, 0.25, 0.0, 0.35, 0.05, 0.06, 0.1, 0.06, 0.3, 0.48, 0.1, 0.27, 0.4, 0.08, 0.15, 0.35, 0.2, 0.1, 0.25, 0.1, 0.15, 0.15, 0.1, 0.15,
0.45, 0.15, 0.1, 0.05, 0.15, 0.55, 0.15, 0.1, 0.1, 0.25, 0.4, 0.25, 0.25, 0.25, 0.15, 0.1, 0.25, 0.2, 0.15, 0.2, 0.2, 0.01, 0.05, 0.01, 0.05, 0.88, 0.2, 0.2, 0.35, 0.15, 0.1])
f_CompPlFcst = Prob(CompPlFcst, [AreaMeso_ALS, CldShadeOth, Boundaries, CldShadeConv], [0.4, 0.35, 0.25, 0.4, 0.35, 0.25, 0.45, 0.3, 0.25, 0.35, 0.35, 0.3, 0.35, 0.35, 0.3, 0.4, 0.35, 0.25, 0.3, 0.3, 0.4, 0.3, 0.3, 0.4, 0.3, 0.35, 0.35, 0.1, 0.35, 0.55, 0.25, 0.3, 0.45, 0.4, 0.3, 0.3, 0.05, 0.35, 0.6, 0.1, 0.35, 0.55, 0.25, 0.4, 0.35, 0.01, 0.25, 0.74, 0.05, 0.6, 0.35, 0.15, 0.35, 0.5, 0.05, 0.3, 0.65, 0.15, 0.35, 0.5, 0.35, 0.3, 0.35, 0.03, 0.25, 0.72, 0.05, 0.3, 0.65, 0.2, 0.4, 0.4, 0.01, 0.2, 0.79, 0.04, 0.27, 0.69, 0.13, 0.35, 0.52, 0.6, 0.25, 0.15, 0.65, 0.25, 0.1, 0.7, 0.22, 0.08, 0.5, 0.25, 0.25, 0.55, 0.25, 0.2, 0.65, 0.25, 0.1, 0.35, 0.25, 0.4, 0.4, 0.25, 0.35, 0.5, 0.25, 0.25, 0.4, 0.3, 0.3, 0.45, 0.3, 0.25, 0.55, 0.3, 0.15, 0.3, 0.35, 0.35, 0.35, 0.35, 0.3, 0.45, 0.35, 0.2, 0.15, 0.4, 0.45, 0.2, 0.4, 0.4, 0.35, 0.35, 0.3, 0.2, 0.5, 0.3, 0.25, 0.5, 0.25, 0.4, 0.45, 0.15, 0.15, 0.45, 0.4, 0.2, 0.5, 0.3, 0.3, 0.5, 0.2, 0.1, 0.35,
0.55, 0.12, 0.43, 0.45, 0.2, 0.45, 0.35, 0.6, 0.35, 0.05, 0.65, 0.3, 0.05, 0.7, 0.27, 0.03, 0.55, 0.3, 0.15, 0.6, 0.3, 0.1, 0.65, 0.3, 0.05, 0.45, 0.3, 0.25, 0.5, 0.3, 0.2, 0.55, 0.35, 0.1, 0.45, 0.4, 0.15, 0.5, 0.4, 0.1, 0.6, 0.3, 0.1, 0.4, 0.4, 0.2, 0.45, 0.4, 0.15, 0.55, 0.3, 0.15, 0.3, 0.4, 0.3, 0.35, 0.4, 0.25, 0.45, 0.35, 0.2, 0.25, 0.45, 0.3, 0.3, 0.45, 0.25, 0.55, 0.33, 0.12, 0.2, 0.4, 0.4, 0.25, 0.5, 0.25, 0.5, 0.3, 0.2, 0.15, 0.4, 0.45, 0.2, 0.45, 0.35, 0.4, 0.35, 0.25, 0.7, 0.27, 0.03, 0.75, 0.23, 0.02, 0.85, 0.14, 0.01, 0.6, 0.35, 0.05, 0.65, 0.3, 0.05, 0.78, 0.18, 0.04, 0.5, 0.35, 0.15, 0.55, 0.35, 0.1, 0.7, 0.24, 0.06, 0.65, 0.3, 0.05, 0.7, 0.26, 0.04, 0.8, 0.17, 0.03, 0.6, 0.3, 0.1, 0.65, 0.3, 0.05, 0.75, 0.2, 0.05, 0.48, 0.32, 0.2, 0.55, 0.3, 0.15, 0.65, 0.28, 0.07, 0.6, 0.35, 0.05, 0.65, 0.32, 0.03, 0.75, 0.23, 0.02, 0.55, 0.33, 0.12, 0.6, 0.35, 0.05, 0.7, 0.25, 0.05, 0.45, 0.35, 0.2, 0.5, 0.4, 0.1, 0.6, 0.3, 0.1])
f_Date = Prob(Date, [], [0.254098, 0.131148,
0.106557, 0.213115, 0.07377, 0.221312])
f_WndHodograph = Prob(WndHodograph, [], [0.3, 0.25, 0.25, 0.2])
f_ScenRel3_4 = Prob(ScenRel3_4, [Scenario], [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0])
f_R5Fcst = Prob(R5Fcst, [MountainFcst, N34StarFcst], [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0])
f_LatestCIN = Prob(LatestCIN, [], [0.4, 0.4, 0.15, 0.05])
f_WindFieldMt = Prob(WindFieldMt, [Scenario], [0.8, 0.2, 0.35, 0.65, 0.75, 0.25, 0.7,
0.3, 0.65, 0.35, 0.15, 0.85, 0.7, 0.3, 0.3, 0.7, 0.5, 0.5, 0.01, 0.99, 0.7, 0.3])
f_RaoContMoist = Prob(RaoContMoist, [], [0.15, 0.2, 0.4, 0.25])
f_AreaMeso_ALS = Prob(AreaMeso_ALS, [CombVerMo], [
1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0])
f_SatContMoist = Prob(SatContMoist, [], [0.15, 0.2, 0.4, 0.25])
f_MorningBound = Prob(MorningBound, [], [0.5, 0.35, 0.15])
f_RHRatio = Prob(RHRatio, [Scenario], [0.05, 0.5, 0.45, 0.1, 0.5, 0.4, 0.4, 0.15, 0.45, 0.2, 0.45, 0.35, 0.8,
0.05, 0.15, 0.0, 0.0, 1.0, 0.6, 0.0, 0.4, 0.0, 0.7, 0.3, 0.1, 0.7, 0.2, 0.4, 0.4, 0.2, 0.15, 0.45, 0.4])
f_WindAloft = Prob(WindAloft, [Scenario], [0.0, 0.95, 0.01, 0.04, 0.2, 0.3, 0.2, 0.3, 0.05, 0.09, 0.59, 0.27, 0.03, 0.32, 0.42, 0.23, 0.07, 0.66,
0.02, 0.25, 0.5, 0.0, 0.0, 0.5, 0.25, 0.3, 0.25, 0.2, 0.2, 0.14, 0.43, 0.23, 0.2, 0.41, 0.1, 0.29, 0.96, 0.0, 0.0, 0.04, 0.03, 0.08, 0.33, 0.56])
f_LowLLapse = Prob(LowLLapse, [Scenario], [0.04, 0.25, 0.35, 0.36, 0.07, 0.31, 0.31, 0.31, 0.35, 0.47, 0.14, 0.04, 0.4, 0.4, 0.13, 0.07, 0.45, 0.35,
0.15, 0.05, 0.01, 0.35, 0.45, 0.19, 0.78, 0.19, 0.03, 0.0, 0.0, 0.02, 0.33, 0.65, 0.22, 0.4, 0.3, 0.08, 0.13, 0.4, 0.35, 0.12, 0.09, 0.4, 0.33, 0.18])
f_AMDewptCalPl = Prob(AMDewptCalPl, [], [0.3, 0.25, 0.45])
f_SubjVertMo = Prob(SubjVertMo, [], [0.15, 0.15, 0.5, 0.2])
f_OutflowFrMt = Prob(OutflowFrMt, [InsInMt, WndHodograph], [1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.5, 0.4,
0.1, 0.15, 0.4, 0.45, 0.35, 0.6, 0.05, 0.8, 0.19, 0.01, 0.05, 0.45, 0.5, 0.01, 0.15, 0.84, 0.1, 0.25, 0.65, 0.6, 0.3, 0.1])
f_AreaMoDryAir = Prob(AreaMoDryAir, [AreaMeso_ALS, CombMoisture], [0.99, 0.01, 0.0, 0.0, 0.7, 0.29, 0.01, 0.0, 0.2, 0.55, 0.24, 0.01, 0.0, 0.25, 0.55, 0.2, 0.8, 0.2, 0.0, 0.0, 0.35, 0.55, 0.1, 0.0, 0.01, 0.39,
0.55, 0.05, 0.0, 0.02, 0.43, 0.55, 0.7, 0.29, 0.01, 0.0, 0.2, 0.6, 0.2, 0.0, 0.01, 0.09, 0.8, 0.1, 0.0, 0.0, 0.3, 0.7, 0.2, 0.74, 0.06, 0.0, 0.05, 0.4, 0.45, 0.1, 0.0, 0.05, 0.5, 0.45, 0.0, 0.0, 0.01, 0.99])
f_MorningCIN = Prob(MorningCIN, [], [0.15, 0.57, 0.2, 0.08])
f_TempDis = Prob(TempDis, [Scenario], [0.13, 0.15, 0.1, 0.62, 0.15, 0.15, 0.25, 0.45, 0.12, 0.1, 0.35, 0.43, 0.1, 0.15, 0.4, 0.35, 0.04, 0.04,
0.82, 0.1, 0.05, 0.12, 0.75, 0.08, 0.03, 0.03, 0.84, 0.1, 0.05, 0.4, 0.5, 0.05, 0.8, 0.19, 0.0, 0.01, 0.1, 0.05, 0.4, 0.45, 0.2, 0.3, 0.3, 0.2])
f_InsSclInScen = Prob(InsSclInScen, [InsChange, AMInsWliScen], [1.0, 0.0, 0.0, 0.6, 0.4, 0.0, 0.25, 0.35,
0.4, 0.9, 0.1, 0.0, 0.15, 0.7, 0.15, 0.0, 0.1, 0.9, 0.4, 0.35, 0.25, 0.0, 0.4, 0.6, 0.0, 0.0, 1.0])
f_WindFieldPln = Prob(WindFieldPln, [Scenario], [0.05, 0.6, 0.02, 0.1, 0.23, 0.0, 0.08, 0.6, 0.02, 0.1, 0.2, 0.0, 0.1, 0.0, 0.75, 0.0, 0.0, 0.15, 0.1, 0.15, 0.2, 0.05, 0.3, 0.2, 0.43, 0.1, 0.15, 0.06, 0.06,
0.2, 0.6, 0.07, 0.01, 0.12, 0.2, 0.0, 0.25, 0.01, 0.3, 0.01, 0.03, 0.4, 0.04, 0.02, 0.04, 0.8, 0.1, 0.0, 0.2, 0.3, 0.05, 0.37, 0.07, 0.01, 0.6, 0.08, 0.07, 0.03, 0.2, 0.02, 0.1, 0.05, 0.1, 0.05, 0.2, 0.5])
f_CurPropConv = Prob(CurPropConv, [LatestCIN, LLIW], [0.7, 0.28, 0.02, 0.0, 0.1, 0.5, 0.3, 0.1, 0.01, 0.14, 0.35, 0.5, 0.0, 0.02, 0.18, 0.8, 0.9, 0.09, 0.01, 0.0, 0.65, 0.25, 0.09, 0.01, 0.25, 0.35, 0.3,
0.1, 0.01, 0.15, 0.33, 0.51, 0.95, 0.05, 0.0, 0.0, 0.75, 0.23, 0.02, 0.0, 0.4, 0.4, 0.18, 0.02, 0.2, 0.3, 0.35, 0.15, 1.0, 0.0, 0.0, 0.0, 0.95, 0.05, 0.0, 0.0, 0.75, 0.2, 0.05, 0.0, 0.5, 0.35, 0.1, 0.05])
bn_hailfinder = Belief_network(
vars=[ScnRelPlFcst, AMInsWliScen, SfcWndShfDis, InsInMt, AMCINInScen, PlainsFcst, CombClouds, LIfr12ZDENSd, CombMoisture, AMInstabMt, CldShadeOth, N0_7muVerMo, LLIW, ScenRelAMIns, VISCloudCov, ScenRelAMCIN, MidLLapse, MvmtFeatures, CapInScen, CldShadeConv, IRCloudCover, CapChange, QGVertMotion, LoLevMoistAd, Scenario, MountainFcst, CombVerMo,
Boundaries, MeanRH, Dewpoints, InsChange, N34StarFcst, SynForcng, CompPlFcst, Date, WndHodograph, ScenRel3_4, R5Fcst, LatestCIN, WindFieldMt, RaoContMoist, AreaMeso_ALS, SatContMoist, MorningBound, RHRatio, WindAloft, LowLLapse, AMDewptCalPl, SubjVertMo, OutflowFrMt, AreaMoDryAir, MorningCIN, TempDis, InsSclInScen, WindFieldPln, CurPropConv],
factors=[f_ScnRelPlFcst, f_AMInsWliScen, f_SfcWndShfDis, f_InsInMt, f_AMCINInScen, f_PlainsFcst, f_CombClouds, f_LIfr12ZDENSd, f_CombMoisture, f_AMInstabMt, f_CldShadeOth, f_N0_7muVerMo, f_LLIW, f_ScenRelAMIns, f_VISCloudCov, f_ScenRelAMCIN, f_MidLLapse, f_MvmtFeatures, f_CapInScen, f_CldShadeConv, f_IRCloudCover, f_CapChange, f_QGVertMotion, f_LoLevMoistAd, f_Scenario, f_MountainFcst, f_CombVerMo,
f_Boundaries, f_MeanRH, f_Dewpoints, f_InsChange, f_N34StarFcst, f_SynForcng, f_CompPlFcst, f_Date, f_WndHodograph, f_ScenRel3_4, f_R5Fcst, f_LatestCIN, f_WindFieldMt, f_RaoContMoist, f_AreaMeso_ALS, f_SatContMoist, f_MorningBound, f_RHRatio, f_WindAloft, f_LowLLapse, f_AMDewptCalPl, f_SubjVertMo, f_OutflowFrMt, f_AreaMoDryAir, f_MorningCIN, f_TempDis, f_InsSclInScen, f_WindFieldPln, f_CurPropConv],
positions=[])
|
danieljaouen/ansible | refs/heads/devel | lib/ansible/modules/cloud/amazon/ec2_snapshot.py | 71 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: ec2_snapshot
short_description: creates a snapshot from an existing volume
description:
- creates an EC2 snapshot from an existing EBS volume
version_added: "1.5"
options:
volume_id:
description:
- volume from which to take the snapshot
required: false
description:
description:
- description to be applied to the snapshot
required: false
instance_id:
description:
- instance that has the required volume to snapshot mounted
required: false
device_name:
description:
- device name of a mounted volume to be snapshotted
required: false
snapshot_tags:
description:
- a hash/dictionary of tags to add to the snapshot
required: false
version_added: "1.6"
wait:
description:
- wait for the snapshot to be ready
type: bool
required: false
default: yes
version_added: "1.5.1"
wait_timeout:
description:
- how long before wait gives up, in seconds
- specify 0 to wait forever
required: false
default: 0
version_added: "1.5.1"
state:
description:
- whether to add or create a snapshot
required: false
default: present
choices: ['absent', 'present']
version_added: "1.9"
snapshot_id:
description:
- snapshot id to remove
required: false
version_added: "1.9"
last_snapshot_min_age:
description:
- If the volume's most recent snapshot has started less than `last_snapshot_min_age' minutes ago, a new snapshot will not be created.
required: false
default: 0
version_added: "2.0"
author: "Will Thames (@willthames)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Simple snapshot of volume using volume_id
- ec2_snapshot:
volume_id: vol-abcdef12
description: snapshot of /data from DB123 taken 2013/11/28 12:18:32
# Snapshot of volume mounted on device_name attached to instance_id
- ec2_snapshot:
instance_id: i-12345678
device_name: /dev/sdb1
description: snapshot of /data from DB123 taken 2013/11/28 12:18:32
# Snapshot of volume with tagging
- ec2_snapshot:
instance_id: i-12345678
device_name: /dev/sdb1
snapshot_tags:
frequency: hourly
source: /data
# Remove a snapshot
- local_action:
module: ec2_snapshot
snapshot_id: snap-abcd1234
state: absent
# Create a snapshot only if the most recent one is older than 1 hour
- local_action:
module: ec2_snapshot
volume_id: vol-abcdef12
last_snapshot_min_age: 60
'''
import time
import datetime
try:
import boto.exception
except ImportError:
pass # Taken care of by ec2.HAS_BOTO
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import HAS_BOTO, ec2_argument_spec, ec2_connect
# Find the most recent snapshot
def _get_snapshot_starttime(snap):
return datetime.datetime.strptime(snap.start_time, '%Y-%m-%dT%H:%M:%S.000Z')
def _get_most_recent_snapshot(snapshots, max_snapshot_age_secs=None, now=None):
"""
Gets the most recently created snapshot and optionally filters the result
if the snapshot is too old
:param snapshots: list of snapshots to search
:param max_snapshot_age_secs: filter the result if its older than this
:param now: simulate time -- used for unit testing
:return:
"""
if len(snapshots) == 0:
return None
if not now:
now = datetime.datetime.utcnow()
youngest_snapshot = max(snapshots, key=_get_snapshot_starttime)
# See if the snapshot is younger that the given max age
snapshot_start = datetime.datetime.strptime(youngest_snapshot.start_time, '%Y-%m-%dT%H:%M:%S.000Z')
snapshot_age = now - snapshot_start
if max_snapshot_age_secs is not None:
if snapshot_age.total_seconds() > max_snapshot_age_secs:
return None
return youngest_snapshot
def _create_with_wait(snapshot, wait_timeout_secs, sleep_func=time.sleep):
"""
Wait for the snapshot to be created
:param snapshot:
:param wait_timeout_secs: fail this step after this many seconds
:param sleep_func:
:return:
"""
time_waited = 0
snapshot.update()
while snapshot.status != 'completed':
sleep_func(3)
snapshot.update()
time_waited += 3
if wait_timeout_secs and time_waited > wait_timeout_secs:
return False
return True
def create_snapshot(module, ec2, state=None, description=None, wait=None,
wait_timeout=None, volume_id=None, instance_id=None,
snapshot_id=None, device_name=None, snapshot_tags=None,
last_snapshot_min_age=None):
snapshot = None
changed = False
required = [volume_id, snapshot_id, instance_id]
if required.count(None) != len(required) - 1: # only 1 must be set
module.fail_json(msg='One and only one of volume_id or instance_id or snapshot_id must be specified')
if instance_id and not device_name or device_name and not instance_id:
module.fail_json(msg='Instance ID and device name must both be specified')
if instance_id:
try:
volumes = ec2.get_all_volumes(filters={'attachment.instance-id': instance_id, 'attachment.device': device_name})
except boto.exception.BotoServerError as e:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
if not volumes:
module.fail_json(msg="Could not find volume with name %s attached to instance %s" % (device_name, instance_id))
volume_id = volumes[0].id
if state == 'absent':
if not snapshot_id:
module.fail_json(msg='snapshot_id must be set when state is absent')
try:
ec2.delete_snapshot(snapshot_id)
except boto.exception.BotoServerError as e:
# exception is raised if snapshot does not exist
if e.error_code == 'InvalidSnapshot.NotFound':
module.exit_json(changed=False)
else:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
# successful delete
module.exit_json(changed=True)
if last_snapshot_min_age > 0:
try:
current_snapshots = ec2.get_all_snapshots(filters={'volume_id': volume_id})
except boto.exception.BotoServerError as e:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
last_snapshot_min_age = last_snapshot_min_age * 60 # Convert to seconds
snapshot = _get_most_recent_snapshot(current_snapshots,
max_snapshot_age_secs=last_snapshot_min_age)
try:
# Create a new snapshot if we didn't find an existing one to use
if snapshot is None:
snapshot = ec2.create_snapshot(volume_id, description=description)
changed = True
if wait:
if not _create_with_wait(snapshot, wait_timeout):
module.fail_json(msg='Timed out while creating snapshot.')
if snapshot_tags:
for k, v in snapshot_tags.items():
snapshot.add_tag(k, v)
except boto.exception.BotoServerError as e:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
module.exit_json(changed=changed,
snapshot_id=snapshot.id,
volume_id=snapshot.volume_id,
volume_size=snapshot.volume_size,
tags=snapshot.tags.copy())
def create_snapshot_ansible_module():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
volume_id=dict(),
description=dict(),
instance_id=dict(),
snapshot_id=dict(),
device_name=dict(),
wait=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=0),
last_snapshot_min_age=dict(type='int', default=0),
snapshot_tags=dict(type='dict', default=dict()),
state=dict(choices=['absent', 'present'], default='present'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
return module
def main():
module = create_snapshot_ansible_module()
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
volume_id = module.params.get('volume_id')
snapshot_id = module.params.get('snapshot_id')
description = module.params.get('description')
instance_id = module.params.get('instance_id')
device_name = module.params.get('device_name')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
last_snapshot_min_age = module.params.get('last_snapshot_min_age')
snapshot_tags = module.params.get('snapshot_tags')
state = module.params.get('state')
ec2 = ec2_connect(module)
create_snapshot(
module=module,
state=state,
description=description,
wait=wait,
wait_timeout=wait_timeout,
ec2=ec2,
volume_id=volume_id,
instance_id=instance_id,
snapshot_id=snapshot_id,
device_name=device_name,
snapshot_tags=snapshot_tags,
last_snapshot_min_age=last_snapshot_min_age
)
if __name__ == '__main__':
main()
|
ashutrix03/inteygrate_flaskapp-master | refs/heads/master | build/lib/google/protobuf/internal/_parameterized.py | 87 | #! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Adds support for parameterized tests to Python's unittest TestCase class.
A parameterized test is a method in a test case that is invoked with different
argument tuples.
A simple example:
class AdditionExample(parameterized.ParameterizedTestCase):
@parameterized.Parameters(
(1, 2, 3),
(4, 5, 9),
(1, 1, 3))
def testAddition(self, op1, op2, result):
self.assertEqual(result, op1 + op2)
Each invocation is a separate test case and properly isolated just
like a normal test method, with its own setUp/tearDown cycle. In the
example above, there are three separate testcases, one of which will
fail due to an assertion error (1 + 1 != 3).
Parameters for invididual test cases can be tuples (with positional parameters)
or dictionaries (with named parameters):
class AdditionExample(parameterized.ParameterizedTestCase):
@parameterized.Parameters(
{'op1': 1, 'op2': 2, 'result': 3},
{'op1': 4, 'op2': 5, 'result': 9},
)
def testAddition(self, op1, op2, result):
self.assertEqual(result, op1 + op2)
If a parameterized test fails, the error message will show the
original test name (which is modified internally) and the arguments
for the specific invocation, which are part of the string returned by
the shortDescription() method on test cases.
The id method of the test, used internally by the unittest framework,
is also modified to show the arguments. To make sure that test names
stay the same across several invocations, object representations like
>>> class Foo(object):
... pass
>>> repr(Foo())
'<__main__.Foo object at 0x23d8610>'
are turned into '<__main__.Foo>'. For even more descriptive names,
especially in test logs, you can use the NamedParameters decorator. In
this case, only tuples are supported, and the first parameters has to
be a string (or an object that returns an apt name when converted via
str()):
class NamedExample(parameterized.ParameterizedTestCase):
@parameterized.NamedParameters(
('Normal', 'aa', 'aaa', True),
('EmptyPrefix', '', 'abc', True),
('BothEmpty', '', '', True))
def testStartsWith(self, prefix, string, result):
self.assertEqual(result, strings.startswith(prefix))
Named tests also have the benefit that they can be run individually
from the command line:
$ testmodule.py NamedExample.testStartsWithNormal
.
--------------------------------------------------------------------
Ran 1 test in 0.000s
OK
Parameterized Classes
=====================
If invocation arguments are shared across test methods in a single
ParameterizedTestCase class, instead of decorating all test methods
individually, the class itself can be decorated:
@parameterized.Parameters(
(1, 2, 3)
(4, 5, 9))
class ArithmeticTest(parameterized.ParameterizedTestCase):
def testAdd(self, arg1, arg2, result):
self.assertEqual(arg1 + arg2, result)
def testSubtract(self, arg2, arg2, result):
self.assertEqual(result - arg1, arg2)
Inputs from Iterables
=====================
If parameters should be shared across several test cases, or are dynamically
created from other sources, a single non-tuple iterable can be passed into
the decorator. This iterable will be used to obtain the test cases:
class AdditionExample(parameterized.ParameterizedTestCase):
@parameterized.Parameters(
c.op1, c.op2, c.result for c in testcases
)
def testAddition(self, op1, op2, result):
self.assertEqual(result, op1 + op2)
Single-Argument Test Methods
============================
If a test method takes only one argument, the single argument does not need to
be wrapped into a tuple:
class NegativeNumberExample(parameterized.ParameterizedTestCase):
@parameterized.Parameters(
-1, -3, -4, -5
)
def testIsNegative(self, arg):
self.assertTrue(IsNegative(arg))
"""
__author__ = 'tmarek@google.com (Torsten Marek)'
import collections
import functools
import re
import types
try:
import unittest2 as unittest
except ImportError:
import unittest
import uuid
import six
ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
_SEPARATOR = uuid.uuid1().hex
_FIRST_ARG = object()
_ARGUMENT_REPR = object()
def _CleanRepr(obj):
return ADDR_RE.sub(r'<\1>', repr(obj))
# Helper function formerly from the unittest module, removed from it in
# Python 2.7.
def _StrClass(cls):
return '%s.%s' % (cls.__module__, cls.__name__)
def _NonStringIterable(obj):
return (isinstance(obj, collections.Iterable) and not
isinstance(obj, six.string_types))
def _FormatParameterList(testcase_params):
if isinstance(testcase_params, collections.Mapping):
return ', '.join('%s=%s' % (argname, _CleanRepr(value))
for argname, value in testcase_params.items())
elif _NonStringIterable(testcase_params):
return ', '.join(map(_CleanRepr, testcase_params))
else:
return _FormatParameterList((testcase_params,))
class _ParameterizedTestIter(object):
"""Callable and iterable class for producing new test cases."""
def __init__(self, test_method, testcases, naming_type):
"""Returns concrete test functions for a test and a list of parameters.
The naming_type is used to determine the name of the concrete
functions as reported by the unittest framework. If naming_type is
_FIRST_ARG, the testcases must be tuples, and the first element must
have a string representation that is a valid Python identifier.
Args:
test_method: The decorated test method.
testcases: (list of tuple/dict) A list of parameter
tuples/dicts for individual test invocations.
naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
"""
self._test_method = test_method
self.testcases = testcases
self._naming_type = naming_type
def __call__(self, *args, **kwargs):
raise RuntimeError('You appear to be running a parameterized test case '
'without having inherited from parameterized.'
'ParameterizedTestCase. This is bad because none of '
'your test cases are actually being run.')
def __iter__(self):
test_method = self._test_method
naming_type = self._naming_type
def MakeBoundParamTest(testcase_params):
@functools.wraps(test_method)
def BoundParamTest(self):
if isinstance(testcase_params, collections.Mapping):
test_method(self, **testcase_params)
elif _NonStringIterable(testcase_params):
test_method(self, *testcase_params)
else:
test_method(self, testcase_params)
if naming_type is _FIRST_ARG:
# Signal the metaclass that the name of the test function is unique
# and descriptive.
BoundParamTest.__x_use_name__ = True
BoundParamTest.__name__ += str(testcase_params[0])
testcase_params = testcase_params[1:]
elif naming_type is _ARGUMENT_REPR:
# __x_extra_id__ is used to pass naming information to the __new__
# method of TestGeneratorMetaclass.
# The metaclass will make sure to create a unique, but nondescriptive
# name for this test.
BoundParamTest.__x_extra_id__ = '(%s)' % (
_FormatParameterList(testcase_params),)
else:
raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
BoundParamTest.__doc__ = '%s(%s)' % (
BoundParamTest.__name__, _FormatParameterList(testcase_params))
if test_method.__doc__:
BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
return BoundParamTest
return (MakeBoundParamTest(c) for c in self.testcases)
def _IsSingletonList(testcases):
"""True iff testcases contains only a single non-tuple element."""
return len(testcases) == 1 and not isinstance(testcases[0], tuple)
def _ModifyClass(class_object, testcases, naming_type):
assert not getattr(class_object, '_id_suffix', None), (
'Cannot add parameters to %s,'
' which already has parameterized methods.' % (class_object,))
class_object._id_suffix = id_suffix = {}
# We change the size of __dict__ while we iterate over it,
# which Python 3.x will complain about, so use copy().
for name, obj in class_object.__dict__.copy().items():
if (name.startswith(unittest.TestLoader.testMethodPrefix)
and isinstance(obj, types.FunctionType)):
delattr(class_object, name)
methods = {}
_UpdateClassDictForParamTestCase(
methods, id_suffix, name,
_ParameterizedTestIter(obj, testcases, naming_type))
for name, meth in methods.items():
setattr(class_object, name, meth)
def _ParameterDecorator(naming_type, testcases):
"""Implementation of the parameterization decorators.
Args:
naming_type: The naming type.
testcases: Testcase parameters.
Returns:
A function for modifying the decorated object.
"""
def _Apply(obj):
if isinstance(obj, type):
_ModifyClass(
obj,
list(testcases) if not isinstance(testcases, collections.Sequence)
else testcases,
naming_type)
return obj
else:
return _ParameterizedTestIter(obj, testcases, naming_type)
if _IsSingletonList(testcases):
assert _NonStringIterable(testcases[0]), (
'Single parameter argument must be a non-string iterable')
testcases = testcases[0]
return _Apply
def Parameters(*testcases):
"""A decorator for creating parameterized tests.
See the module docstring for a usage example.
Args:
*testcases: Parameters for the decorated method, either a single
iterable, or a list of tuples/dicts/objects (for tests
with only one argument).
Returns:
A test generator to be handled by TestGeneratorMetaclass.
"""
return _ParameterDecorator(_ARGUMENT_REPR, testcases)
def NamedParameters(*testcases):
"""A decorator for creating parameterized tests.
See the module docstring for a usage example. The first element of
each parameter tuple should be a string and will be appended to the
name of the test method.
Args:
*testcases: Parameters for the decorated method, either a single
iterable, or a list of tuples.
Returns:
A test generator to be handled by TestGeneratorMetaclass.
"""
return _ParameterDecorator(_FIRST_ARG, testcases)
class TestGeneratorMetaclass(type):
"""Metaclass for test cases with test generators.
A test generator is an iterable in a testcase that produces callables. These
callables must be single-argument methods. These methods are injected into
the class namespace and the original iterable is removed. If the name of the
iterable conforms to the test pattern, the injected methods will be picked
up as tests by the unittest framework.
In general, it is supposed to be used in conjunction with the
Parameters decorator.
"""
def __new__(mcs, class_name, bases, dct):
dct['_id_suffix'] = id_suffix = {}
for name, obj in dct.items():
if (name.startswith(unittest.TestLoader.testMethodPrefix) and
_NonStringIterable(obj)):
iterator = iter(obj)
dct.pop(name)
_UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
return type.__new__(mcs, class_name, bases, dct)
def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
"""Adds individual test cases to a dictionary.
Args:
dct: The target dictionary.
id_suffix: The dictionary for mapping names to test IDs.
name: The original name of the test case.
iterator: The iterator generating the individual test cases.
"""
for idx, func in enumerate(iterator):
assert callable(func), 'Test generators must yield callables, got %r' % (
func,)
if getattr(func, '__x_use_name__', False):
new_name = func.__name__
else:
new_name = '%s%s%d' % (name, _SEPARATOR, idx)
assert new_name not in dct, (
'Name of parameterized test case "%s" not unique' % (new_name,))
dct[new_name] = func
id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
class ParameterizedTestCase(unittest.TestCase):
"""Base class for test cases using the Parameters decorator."""
__metaclass__ = TestGeneratorMetaclass
def _OriginalName(self):
return self._testMethodName.split(_SEPARATOR)[0]
def __str__(self):
return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
def id(self): # pylint: disable=invalid-name
"""Returns the descriptive ID of the test.
This is used internally by the unittesting framework to get a name
for the test to be used in reports.
Returns:
The test id.
"""
return '%s.%s%s' % (_StrClass(self.__class__),
self._OriginalName(),
self._id_suffix.get(self._testMethodName, ''))
def CoopParameterizedTestCase(other_base_class):
"""Returns a new base class with a cooperative metaclass base.
This enables the ParameterizedTestCase to be used in combination
with other base classes that have custom metaclasses, such as
mox.MoxTestBase.
Only works with metaclasses that do not override type.__new__.
Example:
import google3
import mox
from google3.testing.pybase import parameterized
class ExampleTest(parameterized.CoopParameterizedTestCase(mox.MoxTestBase)):
...
Args:
other_base_class: (class) A test case base class.
Returns:
A new class object.
"""
metaclass = type(
'CoopMetaclass',
(other_base_class.__metaclass__,
TestGeneratorMetaclass), {})
return metaclass(
'CoopParameterizedTestCase',
(other_base_class, ParameterizedTestCase), {})
|
sarantapichos/faircoop-market | refs/heads/master | addons/crm/wizard/crm_phonecall_to_meeting.py | 381 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class crm_phonecall2meeting(osv.osv_memory):
""" Phonecall to Meeting """
_name = 'crm.phonecall2meeting'
_description = 'Phonecall To Meeting'
def action_cancel(self, cr, uid, ids, context=None):
"""
Closes Phonecall to Meeting form
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of Phonecall to Meeting IDs
@param context: A standard dictionary for contextual values
"""
return {'type':'ir.actions.act_window_close'}
def action_make_meeting(self, cr, uid, ids, context=None):
""" This opens Meeting's calendar view to schedule meeting on current Phonecall
@return : Dictionary value for created Meeting view
"""
res = {}
phonecall_id = context and context.get('active_id', False) or False
if phonecall_id:
phonecall = self.pool.get('crm.phonecall').browse(cr, uid, phonecall_id, context)
res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'calendar', 'action_calendar_event', context)
res['context'] = {
'default_phonecall_id': phonecall.id,
'default_partner_id': phonecall.partner_id and phonecall.partner_id.id or False,
'default_user_id': uid,
'default_email_from': phonecall.email_from,
'default_state': 'open',
'default_name': phonecall.name,
}
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
frappe/frappe | refs/heads/develop | frappe/patches/v11_0/create_contact_for_user.py | 1 |
import frappe
from frappe.core.doctype.user.user import create_contact
import re
def execute():
""" Create Contact for each User if not present """
frappe.reload_doc('integrations', 'doctype', 'google_contacts')
frappe.reload_doc('contacts', 'doctype', 'contact')
frappe.reload_doc('core', 'doctype', 'dynamic_link')
contact_meta = frappe.get_meta("Contact")
if contact_meta.has_field("phone_nos") and contact_meta.has_field("email_ids"):
frappe.reload_doc('contacts', 'doctype', 'contact_phone')
frappe.reload_doc('contacts', 'doctype', 'contact_email')
users = frappe.get_all('User', filters={"name": ('not in', 'Administrator, Guest')}, fields=["*"])
for user in users:
if frappe.db.exists("Contact", {"email_id": user.email}) or frappe.db.exists("Contact Email", {"email_id": user.email}):
continue
if user.first_name:
user.first_name = re.sub("[<>]+", '', frappe.safe_decode(user.first_name))
if user.last_name:
user.last_name = re.sub("[<>]+", '', frappe.safe_decode(user.last_name))
create_contact(user, ignore_links=True, ignore_mandatory=True)
|
naro/django-guardian | refs/heads/master | guardian/migrations/0005_auto__chg_field_groupobjectpermission_object_pk__chg_field_userobjectp.py | 32 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'guardian.groupobjectpermission': {
'Meta': {'unique_together': "(['group', 'permission', 'content_type', 'object_pk'],)", 'object_name': 'GroupObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"})
},
'guardian.userobjectpermission': {
'Meta': {'unique_together': "(['user', 'permission', 'content_type', 'object_pk'],)", 'object_name': 'UserObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['guardian']
|
TangHao1987/intellij-community | refs/heads/master | plugins/hg4idea/testData/bin/hgext/largefiles/lfutil.py | 93 | # Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''largefiles utility code: must not import other modules in this package.'''
import os
import platform
import shutil
import stat
from mercurial import dirstate, httpconnection, match as match_, util, scmutil
from mercurial.i18n import _
shortname = '.hglf'
shortnameslash = shortname + '/'
longname = 'largefiles'
# -- Private worker functions ------------------------------------------
def getminsize(ui, assumelfiles, opt, default=10):
lfsize = opt
if not lfsize and assumelfiles:
lfsize = ui.config(longname, 'minsize', default=default)
if lfsize:
try:
lfsize = float(lfsize)
except ValueError:
raise util.Abort(_('largefiles: size must be number (not %s)\n')
% lfsize)
if lfsize is None:
raise util.Abort(_('minimum size for largefiles must be specified'))
return lfsize
def link(src, dest):
util.makedirs(os.path.dirname(dest))
try:
util.oslink(src, dest)
except OSError:
# if hardlinks fail, fallback on atomic copy
dst = util.atomictempfile(dest)
for chunk in util.filechunkiter(open(src, 'rb')):
dst.write(chunk)
dst.close()
os.chmod(dest, os.stat(src).st_mode)
def usercachepath(ui, hash):
path = ui.configpath(longname, 'usercache', None)
if path:
path = os.path.join(path, hash)
else:
if os.name == 'nt':
appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA'))
if appdata:
path = os.path.join(appdata, longname, hash)
elif platform.system() == 'Darwin':
home = os.getenv('HOME')
if home:
path = os.path.join(home, 'Library', 'Caches',
longname, hash)
elif os.name == 'posix':
path = os.getenv('XDG_CACHE_HOME')
if path:
path = os.path.join(path, longname, hash)
else:
home = os.getenv('HOME')
if home:
path = os.path.join(home, '.cache', longname, hash)
else:
raise util.Abort(_('unknown operating system: %s\n') % os.name)
return path
def inusercache(ui, hash):
path = usercachepath(ui, hash)
return path and os.path.exists(path)
def findfile(repo, hash):
if instore(repo, hash):
repo.ui.note(_('found %s in store\n') % hash)
return storepath(repo, hash)
elif inusercache(repo.ui, hash):
repo.ui.note(_('found %s in system cache\n') % hash)
path = storepath(repo, hash)
link(usercachepath(repo.ui, hash), path)
return path
return None
class largefilesdirstate(dirstate.dirstate):
def __getitem__(self, key):
return super(largefilesdirstate, self).__getitem__(unixpath(key))
def normal(self, f):
return super(largefilesdirstate, self).normal(unixpath(f))
def remove(self, f):
return super(largefilesdirstate, self).remove(unixpath(f))
def add(self, f):
return super(largefilesdirstate, self).add(unixpath(f))
def drop(self, f):
return super(largefilesdirstate, self).drop(unixpath(f))
def forget(self, f):
return super(largefilesdirstate, self).forget(unixpath(f))
def normallookup(self, f):
return super(largefilesdirstate, self).normallookup(unixpath(f))
def _ignore(self):
return False
def openlfdirstate(ui, repo, create=True):
'''
Return a dirstate object that tracks largefiles: i.e. its root is
the repo root, but it is saved in .hg/largefiles/dirstate.
'''
lfstoredir = repo.join(longname)
opener = scmutil.opener(lfstoredir)
lfdirstate = largefilesdirstate(opener, ui, repo.root,
repo.dirstate._validate)
# If the largefiles dirstate does not exist, populate and create
# it. This ensures that we create it on the first meaningful
# largefiles operation in a new clone.
if create and not os.path.exists(os.path.join(lfstoredir, 'dirstate')):
util.makedirs(lfstoredir)
matcher = getstandinmatcher(repo)
for standin in repo.dirstate.walk(matcher, [], False, False):
lfile = splitstandin(standin)
lfdirstate.normallookup(lfile)
return lfdirstate
def lfdirstatestatus(lfdirstate, repo, rev):
match = match_.always(repo.root, repo.getcwd())
s = lfdirstate.status(match, [], False, False, False)
unsure, modified, added, removed, missing, unknown, ignored, clean = s
for lfile in unsure:
try:
fctx = repo[rev][standin(lfile)]
except LookupError:
fctx = None
if not fctx or fctx.data().strip() != hashfile(repo.wjoin(lfile)):
modified.append(lfile)
else:
clean.append(lfile)
lfdirstate.normal(lfile)
return (modified, added, removed, missing, unknown, ignored, clean)
def listlfiles(repo, rev=None, matcher=None):
'''return a list of largefiles in the working copy or the
specified changeset'''
if matcher is None:
matcher = getstandinmatcher(repo)
# ignore unknown files in working directory
return [splitstandin(f)
for f in repo[rev].walk(matcher)
if rev is not None or repo.dirstate[f] != '?']
def instore(repo, hash):
return os.path.exists(storepath(repo, hash))
def storepath(repo, hash):
return repo.join(os.path.join(longname, hash))
def copyfromcache(repo, hash, filename):
'''Copy the specified largefile from the repo or system cache to
filename in the repository. Return true on success or false if the
file was not found in either cache (which should not happened:
this is meant to be called only after ensuring that the needed
largefile exists in the cache).'''
path = findfile(repo, hash)
if path is None:
return False
util.makedirs(os.path.dirname(repo.wjoin(filename)))
# The write may fail before the file is fully written, but we
# don't use atomic writes in the working copy.
shutil.copy(path, repo.wjoin(filename))
return True
def copytostore(repo, rev, file, uploaded=False):
hash = readstandin(repo, file, rev)
if instore(repo, hash):
return
copytostoreabsolute(repo, repo.wjoin(file), hash)
def copyalltostore(repo, node):
'''Copy all largefiles in a given revision to the store'''
ctx = repo[node]
for filename in ctx.files():
if isstandin(filename) and filename in ctx.manifest():
realfile = splitstandin(filename)
copytostore(repo, ctx.node(), realfile)
def copytostoreabsolute(repo, file, hash):
if inusercache(repo.ui, hash):
link(usercachepath(repo.ui, hash), storepath(repo, hash))
elif not getattr(repo, "_isconverting", False):
util.makedirs(os.path.dirname(storepath(repo, hash)))
dst = util.atomictempfile(storepath(repo, hash),
createmode=repo.store.createmode)
for chunk in util.filechunkiter(open(file, 'rb')):
dst.write(chunk)
dst.close()
linktousercache(repo, hash)
def linktousercache(repo, hash):
path = usercachepath(repo.ui, hash)
if path:
link(storepath(repo, hash), path)
def getstandinmatcher(repo, pats=[], opts={}):
'''Return a match object that applies pats to the standin directory'''
standindir = repo.wjoin(shortname)
if pats:
pats = [os.path.join(standindir, pat) for pat in pats]
else:
# no patterns: relative to repo root
pats = [standindir]
# no warnings about missing files or directories
match = scmutil.match(repo[None], pats, opts)
match.bad = lambda f, msg: None
return match
def composestandinmatcher(repo, rmatcher):
'''Return a matcher that accepts standins corresponding to the
files accepted by rmatcher. Pass the list of files in the matcher
as the paths specified by the user.'''
smatcher = getstandinmatcher(repo, rmatcher.files())
isstandin = smatcher.matchfn
def composedmatchfn(f):
return isstandin(f) and rmatcher.matchfn(splitstandin(f))
smatcher.matchfn = composedmatchfn
return smatcher
def standin(filename):
'''Return the repo-relative path to the standin for the specified big
file.'''
# Notes:
# 1) Some callers want an absolute path, but for instance addlargefiles
# needs it repo-relative so it can be passed to repo[None].add(). So
# leave it up to the caller to use repo.wjoin() to get an absolute path.
# 2) Join with '/' because that's what dirstate always uses, even on
# Windows. Change existing separator to '/' first in case we are
# passed filenames from an external source (like the command line).
return shortnameslash + util.pconvert(filename)
def isstandin(filename):
'''Return true if filename is a big file standin. filename must be
in Mercurial's internal form (slash-separated).'''
return filename.startswith(shortnameslash)
def splitstandin(filename):
# Split on / because that's what dirstate always uses, even on Windows.
# Change local separator to / first just in case we are passed filenames
# from an external source (like the command line).
bits = util.pconvert(filename).split('/', 1)
if len(bits) == 2 and bits[0] == shortname:
return bits[1]
else:
return None
def updatestandin(repo, standin):
file = repo.wjoin(splitstandin(standin))
if os.path.exists(file):
hash = hashfile(file)
executable = getexecutable(file)
writestandin(repo, standin, hash, executable)
def readstandin(repo, filename, node=None):
'''read hex hash from standin for filename at given node, or working
directory if no node is given'''
return repo[node][standin(filename)].data().strip()
def writestandin(repo, standin, hash, executable):
'''write hash to <repo.root>/<standin>'''
repo.wwrite(standin, hash + '\n', executable and 'x' or '')
def copyandhash(instream, outfile):
'''Read bytes from instream (iterable) and write them to outfile,
computing the SHA-1 hash of the data along the way. Return the hash.'''
hasher = util.sha1('')
for data in instream:
hasher.update(data)
outfile.write(data)
return hasher.hexdigest()
def hashrepofile(repo, file):
return hashfile(repo.wjoin(file))
def hashfile(file):
if not os.path.exists(file):
return ''
hasher = util.sha1('')
fd = open(file, 'rb')
for data in util.filechunkiter(fd, 128 * 1024):
hasher.update(data)
fd.close()
return hasher.hexdigest()
def getexecutable(filename):
mode = os.stat(filename).st_mode
return ((mode & stat.S_IXUSR) and
(mode & stat.S_IXGRP) and
(mode & stat.S_IXOTH))
def urljoin(first, second, *arg):
def join(left, right):
if not left.endswith('/'):
left += '/'
if right.startswith('/'):
right = right[1:]
return left + right
url = join(first, second)
for a in arg:
url = join(url, a)
return url
def hexsha1(data):
"""hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
object data"""
h = util.sha1()
for chunk in util.filechunkiter(data):
h.update(chunk)
return h.hexdigest()
def httpsendfile(ui, filename):
return httpconnection.httpsendfile(ui, filename, 'rb')
def unixpath(path):
'''Return a version of path normalized for use with the lfdirstate.'''
return util.pconvert(os.path.normpath(path))
def islfilesrepo(repo):
if ('largefiles' in repo.requirements and
util.any(shortnameslash in f[0] for f in repo.store.datafiles())):
return True
return util.any(openlfdirstate(repo.ui, repo, False))
class storeprotonotcapable(Exception):
def __init__(self, storetypes):
self.storetypes = storetypes
def getstandinsstate(repo):
standins = []
matcher = getstandinmatcher(repo)
for standin in repo.dirstate.walk(matcher, [], False, False):
lfile = splitstandin(standin)
try:
hash = readstandin(repo, lfile)
except IOError:
hash = None
standins.append((lfile, hash))
return standins
def getlfilestoupdate(oldstandins, newstandins):
changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
filelist = []
for f in changedstandins:
if f[0] not in filelist:
filelist.append(f[0])
return filelist
|
uwafsl/MissionPlanner | refs/heads/master | Lib/encodings/cp500.py | 93 | """ Python Character Mapping Codec cp500 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP500.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp500',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x9c' # 0x04 -> CONTROL
u'\t' # 0x05 -> HORIZONTAL TABULATION
u'\x86' # 0x06 -> CONTROL
u'\x7f' # 0x07 -> DELETE
u'\x97' # 0x08 -> CONTROL
u'\x8d' # 0x09 -> CONTROL
u'\x8e' # 0x0A -> CONTROL
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x9d' # 0x14 -> CONTROL
u'\x85' # 0x15 -> CONTROL
u'\x08' # 0x16 -> BACKSPACE
u'\x87' # 0x17 -> CONTROL
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x92' # 0x1A -> CONTROL
u'\x8f' # 0x1B -> CONTROL
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u'\x80' # 0x20 -> CONTROL
u'\x81' # 0x21 -> CONTROL
u'\x82' # 0x22 -> CONTROL
u'\x83' # 0x23 -> CONTROL
u'\x84' # 0x24 -> CONTROL
u'\n' # 0x25 -> LINE FEED
u'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
u'\x1b' # 0x27 -> ESCAPE
u'\x88' # 0x28 -> CONTROL
u'\x89' # 0x29 -> CONTROL
u'\x8a' # 0x2A -> CONTROL
u'\x8b' # 0x2B -> CONTROL
u'\x8c' # 0x2C -> CONTROL
u'\x05' # 0x2D -> ENQUIRY
u'\x06' # 0x2E -> ACKNOWLEDGE
u'\x07' # 0x2F -> BELL
u'\x90' # 0x30 -> CONTROL
u'\x91' # 0x31 -> CONTROL
u'\x16' # 0x32 -> SYNCHRONOUS IDLE
u'\x93' # 0x33 -> CONTROL
u'\x94' # 0x34 -> CONTROL
u'\x95' # 0x35 -> CONTROL
u'\x96' # 0x36 -> CONTROL
u'\x04' # 0x37 -> END OF TRANSMISSION
u'\x98' # 0x38 -> CONTROL
u'\x99' # 0x39 -> CONTROL
u'\x9a' # 0x3A -> CONTROL
u'\x9b' # 0x3B -> CONTROL
u'\x14' # 0x3C -> DEVICE CONTROL FOUR
u'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
u'\x9e' # 0x3E -> CONTROL
u'\x1a' # 0x3F -> SUBSTITUTE
u' ' # 0x40 -> SPACE
u'\xa0' # 0x41 -> NO-BREAK SPACE
u'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE
u'[' # 0x4A -> LEFT SQUARE BRACKET
u'.' # 0x4B -> FULL STOP
u'<' # 0x4C -> LESS-THAN SIGN
u'(' # 0x4D -> LEFT PARENTHESIS
u'+' # 0x4E -> PLUS SIGN
u'!' # 0x4F -> EXCLAMATION MARK
u'&' # 0x50 -> AMPERSAND
u'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE
u'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE
u'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN)
u']' # 0x5A -> RIGHT SQUARE BRACKET
u'$' # 0x5B -> DOLLAR SIGN
u'*' # 0x5C -> ASTERISK
u')' # 0x5D -> RIGHT PARENTHESIS
u';' # 0x5E -> SEMICOLON
u'^' # 0x5F -> CIRCUMFLEX ACCENT
u'-' # 0x60 -> HYPHEN-MINUS
u'/' # 0x61 -> SOLIDUS
u'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xa6' # 0x6A -> BROKEN BAR
u',' # 0x6B -> COMMA
u'%' # 0x6C -> PERCENT SIGN
u'_' # 0x6D -> LOW LINE
u'>' # 0x6E -> GREATER-THAN SIGN
u'?' # 0x6F -> QUESTION MARK
u'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE
u'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE
u'`' # 0x79 -> GRAVE ACCENT
u':' # 0x7A -> COLON
u'#' # 0x7B -> NUMBER SIGN
u'@' # 0x7C -> COMMERCIAL AT
u"'" # 0x7D -> APOSTROPHE
u'=' # 0x7E -> EQUALS SIGN
u'"' # 0x7F -> QUOTATION MARK
u'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE
u'a' # 0x81 -> LATIN SMALL LETTER A
u'b' # 0x82 -> LATIN SMALL LETTER B
u'c' # 0x83 -> LATIN SMALL LETTER C
u'd' # 0x84 -> LATIN SMALL LETTER D
u'e' # 0x85 -> LATIN SMALL LETTER E
u'f' # 0x86 -> LATIN SMALL LETTER F
u'g' # 0x87 -> LATIN SMALL LETTER G
u'h' # 0x88 -> LATIN SMALL LETTER H
u'i' # 0x89 -> LATIN SMALL LETTER I
u'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC)
u'\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE
u'\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC)
u'\xb1' # 0x8F -> PLUS-MINUS SIGN
u'\xb0' # 0x90 -> DEGREE SIGN
u'j' # 0x91 -> LATIN SMALL LETTER J
u'k' # 0x92 -> LATIN SMALL LETTER K
u'l' # 0x93 -> LATIN SMALL LETTER L
u'm' # 0x94 -> LATIN SMALL LETTER M
u'n' # 0x95 -> LATIN SMALL LETTER N
u'o' # 0x96 -> LATIN SMALL LETTER O
u'p' # 0x97 -> LATIN SMALL LETTER P
u'q' # 0x98 -> LATIN SMALL LETTER Q
u'r' # 0x99 -> LATIN SMALL LETTER R
u'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR
u'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR
u'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE
u'\xb8' # 0x9D -> CEDILLA
u'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE
u'\xa4' # 0x9F -> CURRENCY SIGN
u'\xb5' # 0xA0 -> MICRO SIGN
u'~' # 0xA1 -> TILDE
u's' # 0xA2 -> LATIN SMALL LETTER S
u't' # 0xA3 -> LATIN SMALL LETTER T
u'u' # 0xA4 -> LATIN SMALL LETTER U
u'v' # 0xA5 -> LATIN SMALL LETTER V
u'w' # 0xA6 -> LATIN SMALL LETTER W
u'x' # 0xA7 -> LATIN SMALL LETTER X
u'y' # 0xA8 -> LATIN SMALL LETTER Y
u'z' # 0xA9 -> LATIN SMALL LETTER Z
u'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK
u'\xbf' # 0xAB -> INVERTED QUESTION MARK
u'\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC)
u'\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC)
u'\xae' # 0xAF -> REGISTERED SIGN
u'\xa2' # 0xB0 -> CENT SIGN
u'\xa3' # 0xB1 -> POUND SIGN
u'\xa5' # 0xB2 -> YEN SIGN
u'\xb7' # 0xB3 -> MIDDLE DOT
u'\xa9' # 0xB4 -> COPYRIGHT SIGN
u'\xa7' # 0xB5 -> SECTION SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS
u'\xac' # 0xBA -> NOT SIGN
u'|' # 0xBB -> VERTICAL LINE
u'\xaf' # 0xBC -> MACRON
u'\xa8' # 0xBD -> DIAERESIS
u'\xb4' # 0xBE -> ACUTE ACCENT
u'\xd7' # 0xBF -> MULTIPLICATION SIGN
u'{' # 0xC0 -> LEFT CURLY BRACKET
u'A' # 0xC1 -> LATIN CAPITAL LETTER A
u'B' # 0xC2 -> LATIN CAPITAL LETTER B
u'C' # 0xC3 -> LATIN CAPITAL LETTER C
u'D' # 0xC4 -> LATIN CAPITAL LETTER D
u'E' # 0xC5 -> LATIN CAPITAL LETTER E
u'F' # 0xC6 -> LATIN CAPITAL LETTER F
u'G' # 0xC7 -> LATIN CAPITAL LETTER G
u'H' # 0xC8 -> LATIN CAPITAL LETTER H
u'I' # 0xC9 -> LATIN CAPITAL LETTER I
u'\xad' # 0xCA -> SOFT HYPHEN
u'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE
u'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE
u'}' # 0xD0 -> RIGHT CURLY BRACKET
u'J' # 0xD1 -> LATIN CAPITAL LETTER J
u'K' # 0xD2 -> LATIN CAPITAL LETTER K
u'L' # 0xD3 -> LATIN CAPITAL LETTER L
u'M' # 0xD4 -> LATIN CAPITAL LETTER M
u'N' # 0xD5 -> LATIN CAPITAL LETTER N
u'O' # 0xD6 -> LATIN CAPITAL LETTER O
u'P' # 0xD7 -> LATIN CAPITAL LETTER P
u'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
u'R' # 0xD9 -> LATIN CAPITAL LETTER R
u'\xb9' # 0xDA -> SUPERSCRIPT ONE
u'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE
u'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\\' # 0xE0 -> REVERSE SOLIDUS
u'\xf7' # 0xE1 -> DIVISION SIGN
u'S' # 0xE2 -> LATIN CAPITAL LETTER S
u'T' # 0xE3 -> LATIN CAPITAL LETTER T
u'U' # 0xE4 -> LATIN CAPITAL LETTER U
u'V' # 0xE5 -> LATIN CAPITAL LETTER V
u'W' # 0xE6 -> LATIN CAPITAL LETTER W
u'X' # 0xE7 -> LATIN CAPITAL LETTER X
u'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
u'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
u'\xb2' # 0xEA -> SUPERSCRIPT TWO
u'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE
u'0' # 0xF0 -> DIGIT ZERO
u'1' # 0xF1 -> DIGIT ONE
u'2' # 0xF2 -> DIGIT TWO
u'3' # 0xF3 -> DIGIT THREE
u'4' # 0xF4 -> DIGIT FOUR
u'5' # 0xF5 -> DIGIT FIVE
u'6' # 0xF6 -> DIGIT SIX
u'7' # 0xF7 -> DIGIT SEVEN
u'8' # 0xF8 -> DIGIT EIGHT
u'9' # 0xF9 -> DIGIT NINE
u'\xb3' # 0xFA -> SUPERSCRIPT THREE
u'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE
u'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
Serg09/socorro | refs/heads/master | webapp-django/crashstats/symbols/tests/test_models.py | 14 | import os
from nose.tools import ok_
from django.contrib.auth.models import User
from crashstats.base.tests.testbase import DjangoTestCase
from crashstats.symbols import models
from .base import ZIP_FILE
class TestModels(DjangoTestCase):
def test_create_symbols_upload(self):
user = User.objects.create(username='user')
upload = models.SymbolsUpload.objects.create(
user=user,
filename=os.path.basename(ZIP_FILE),
size=12345,
content='Content'
)
ok_(os.path.basename(ZIP_FILE) in repr(upload))
|
aesoll/astrogen | refs/heads/master | astrogen/astrogen.py | 2 | #!/usr/bin/env python
# Pipeline for solving the astrometry of image files
# astrogen.py
#
# Authors:
# Philipp v. Bieberstein (pbieberstein@email.arizona.edu)
# Matt Madrid (matthewmadrid@email.arizona.edu)
# David Sidi (dsidi@email.arizona.edu)
# Adam Soll (adamsoll@email.arizona.edu)
"""
Scalably solve astrometry for image files in FITS format to produce
configuration files for Astrometrica.
"""
import getpass
import os
import re
import subprocess
import tempfile
import logging
import shutil
import time
from irods.exception import CAT_UNKNOWN_COLLECTION, UserInputException
import makeflow_gen
import pdb
from glob import glob
from datetime import datetime
from zipfile import ZipFile
from config import Config
from astropy.io import fits
from irods.session import iRODSSession
from configuration_gen import ConfigFile
__pkg_root__ = os.path.dirname(__file__)
__resources_dir__ = os.path.join(__pkg_root__, os.pardir, 'resources')
__output_dir__ = os.path.join(__pkg_root__, os.pardir, 'output')
__batch_dir__ = os.path.join(__resources_dir__, 'fits_files')
class Astrogen(object):
"""
Preprocesses image files in FITS format, filtering bad files.
Retrieves astronomy data in the form of FITS files from iPlant.
Solves astrometry of a local batch of files, generating (among other things)
a working configuration file for Astrometrica.
"""
def __init__(self):
# get params from config file
config_path = \
os.path.join(__resources_dir__, 'astrogen.cfg')
with open(config_path) as f:
cfg = Config(f)
self.iplant_params = dict(cfg.iplant_login_details)
self.max_batch_size = cfg.batch_details.max_batch_size
self.path_to_solve_field = cfg.solve_field_details.path_to_solve_field
self.path_to_netpbm = cfg.solve_field_details.path_to_netpbm
# uname and pword are given at command line
self.user = raw_input("Enter iPlant username: ")
self.password = getpass.getpass("Enter iPlant password: ")
# set up temporary local file directory for batches
tempfile.tempdir = os.path.join(__pkg_root__, 'resources', 'fits_files')
# set up logging
logging.basicConfig(filename='astrogen.log', level=logging.INFO)
t = time.localtime()
logging.info(
"Astrogen run started {day}-{mo}-{year} at {hour}:{min}:{sec}".format(
day=t.tm_mday, mo=t.tm_mon, year=t.tm_year, hour=t.tm_hour,
min=t.tm_min, sec=t.tm_sec))
# PUBLIC ##################################################################
def get_astrometry(self):
"""
Gets the astrometry data for the FITS files in this iPlant directory.
Note: Nothing but .fits and .arch files are allowed.
"""
# get data objects from iPlant
cleaned_data_objects = self._get_data_objects()
current_batch_size = 0
for data_object in cleaned_data_objects:
if current_batch_size < self.max_batch_size:
self._add_to_local_batch(data_object)
#current_batch_size = os.path.getsize(__batch_dir__) / 1024. ** 2
current_batch_size = \
sum(
[os.path.getsize(f) for f in os.listdir(__batch_dir__)
if os.path.isfile(f)]
) / 1024. ** 2
else:
# call astronomy.net stuff on this batch
self._solve_batch_astrometry()
# clear this batch from directory
all_batched_fits_files = glob(os.path.join(__batch_dir__, '*'))
os.remove(all_batched_fits_files)
current_batch_size = 0
# PRIVATE #################################################################
def _unzipper(self, data_object):
"""
Checks if file can be unzip and if it can sends it to resources/fit_files
"""
with ZipFile(data_object, 'w') as myzip:
testZip = myzip.testzip()
if testZip == None: # if testZip is None then there are no bad files
path_to_unzipper_outputs = \
os.path.join(__resources_dir__, 'fits_files')
myzip.write(tempfile.NamedTemporaryFile())
else:
myzip.moveFileToDirectory("Unusable") #move to non working folder
ZipFile.close()
def _file_extension_validation(self, fits_directory):
"""
Parses filenames in a directory to determine which files are valid solve-field candidates
Files that do not meet criteria are removed
"""
valid_extensions = [
"fit", "fits", "FIT", "FITS", "fts"
]
for fits_file_candidate in os.listdir(fits_directory):
if fits_file_candidate.split(".")[1] not in valid_extensions:
os.remove(fits_directory + "/" + fits_file_candidate)
print("Removing invalid file \"" + fits_file_candidate + "\"...")
return None
def _solve_batch_astrometry(self):
"""
Generate the makeflow script to run astrometry on a batch of local
files.
Assumes only FITS files in the directory.
Assumes a working solve-field on your path.
"""
fits_filenames = os.listdir(__batch_dir__)
makeflow_gen.makeflow_gen(
fits_filenames,
self.path_to_solve_field,
self.path_to_netpbm
)
makeflow_path = os.path.join(__output_dir__, 'makeflows', 'output.mf')
self._run_makeflow(makeflow_path)
self._run_parameter_extraction()
self._move_makeflow_solutions()
@staticmethod
def _run_parameter_extraction():
"""Runs parameter extraction using stored output of solve-field in the
batch directory.
"""
path_to_solve_field_outputs = \
os.path.join(__resources_dir__, 'fits_files')
# where stdout was redirected in call to makeflow
all_stdout_files = os.path.join(path_to_solve_field_outputs, '*.out')
for output_filename in glob(all_stdout_files):
is_not_empty = os.path.getsize(output_filename)
if is_not_empty:
dir_name = os.path.dirname(output_filename)
fits_basename = os.path.basename(output_filename)
fits_filename = os.path.splitext(fits_basename)[0] + '.fit'
fits_path = os.path.join(dir_name, fits_filename)
ConfigFile().process(fits_path, output_filename)
@staticmethod
def _run_makeflow(makeflow_script_name):
"""Runs a makeflow.
Side-effects by generating several files for each fits file in the
batch directory (resources/fits_files).
WARNING: Clears previous runs from the makeflows directory.
:param makeflow_script_name: The absolute path of the makeflow script
to run.
"""
# self._clear_generated_files()
# TODO factor the sections into methods
makeflow_project_name = 'SONORAN'
path_to_solve_field_outputs = \
os.path.join(__resources_dir__, 'fits_files')
##
# Get the shell, stand on head so that `module load` works
#
echo_out = subprocess.check_output('echo $SHELL', shell=True)
shell = os.path.basename(echo_out.strip())
# edge case: if shell is ksh93, use 'ksh'
if shell.startswith('ksh'):
shell = 'ksh'
# called for this particular shell
module_init = '/usr/share/Modules/init/' + shell
##
# build makeflow, pbs_submit_workers commands
#
#TODO modify temp files locations
makeflow_cmd = 'cd {outputs_dir} && ' \
'makeflow --wrapper \'. {shell_module}\' ' \
'-T wq ' \
'-a ' \
'-N {project_name} ' \
'{makeflow_script_name}'.\
format(outputs_dir=path_to_solve_field_outputs,
shell_module=module_init,
project_name=makeflow_project_name,
makeflow_script_name=makeflow_script_name)
pbs_submit_cmd = 'pbs_submit_workers ' \
'-d all ' \
'-N {project_name} ' \
'-p "-N {project_name} ' \
'-W group_list=nirav ' \
'-q standard ' \
'-l jobtype=serial ' \
'-l select=1:ncpus=3:mem=4gb ' \
'-l place=pack:shared ' \
'-l walltime=01:00:00 ' \
'-l cput=01:00:00" ' \
'3'.format(project_name=makeflow_project_name)
##
# call commands
#
print ('Now calling makeflow and pbs_submit_workers (you may want to '
'watch the resources/fits_files directory for .out files in a '
'couple of minutes) ...')
pbs_output_dst = os.path.join(__resources_dir__, 'pbs_output') # TODO add date to fn
makeflow_output_dst = os.path.join(__resources_dir__, 'makeflow_output') # TODO add date
with open(pbs_output_dst, 'w') as f1, open(makeflow_output_dst, 'w') as f2:
subprocess.Popen(pbs_submit_cmd, shell=True, stdout=f1)
subprocess.Popen(makeflow_cmd, shell=True, stdout=f2)
print ('... batch complete.')
t = time.localtime()
logging.info('finished a batch on {day}-{mo}-{year} at {hour}:{min}:{sec}'.format(
day=t.tm_mday, mo=t.tm_mon, year=t.tm_year, hour=t.tm_hour,
min=t.tm_min, sec=t.tm_sec))
def _move_makeflow_solutions(self):
"""Move makeflow solution files to their directory
Issuing shell commands like `imv` is not done because it is not
portable (even though it would be simpler).
"""
def mk_irods_path(leaf_dir):
return os.path.join(
self.iplant_params['iplant_write_path'],
leaf_dir
)
iplant_params = self.iplant_params
logging.info("Writing data to {} as {} ...".
format(iplant_params['host'], self.user))
sess = self._get_irods_session()
output_src = os.path.join(__resources_dir__, 'fits_files')
fits_file_paths = glob(os.path.join(output_src, '*.fit'))
cfg_file_paths = glob(os.path.join(output_src, '*.cfg'))
other_soln_file_paths = \
glob(os.path.join(output_src, '*.out')) + \
glob(os.path.join(output_src, '*.axy')) + \
glob(os.path.join(output_src, '*.xyls')) + \
glob(os.path.join(output_src, '*.match')) + \
glob(os.path.join(output_src, '*.new')) + \
glob(os.path.join(output_src, '*.rdls')) + \
glob(os.path.join(output_src, '*.solved'))
# list of lists created by combining globs
lists_of_file_paths = [fits_file_paths, cfg_file_paths, other_soln_file_paths]
irods_fits_output_dst = mk_irods_path('modified_fits')
irods_cfg_output_dst = mk_irods_path('astrometrica_config_files')
irods_other_soln_output_dst = mk_irods_path('other_solution_files')
output_dsts = [irods_fits_output_dst, irods_cfg_output_dst,
irods_other_soln_output_dst]
for soln_file_paths, output_dst in zip(lists_of_file_paths, output_dsts):
self._move_to_irods_store(sess, output_src, soln_file_paths, output_dst)
def _get_irods_session(self):
iplant_params = self.iplant_params
return iRODSSession(
host=iplant_params['host'],
port=iplant_params['port'],
user=self.user,
password=self.password,
zone=iplant_params['zone']
)
def _move_to_irods_store(self, sess, output_src, files_glob, output_irods_dst):
"""Move files to an irods store.
Note: overwrites files that are identically named.
:param output_irods_dst:
:param files_glob:
:param output_src:
:param sess:
:return:
"""
try:
sess.collections.create(output_irods_dst)
except: # TODO get exception name
pass
for filename in files_glob:
basename = os.path.basename(filename)
iplant_filepath = os.path.join(output_irods_dst, basename)
# create irods file to store the local file
try:
obj = sess.data_objects.create(iplant_filepath)
except UserInputException as e:
logging.info("File {} not moved. Exception details: {}".
format(filename, e))
continue
finally:
os.remove(filename)
# copy the local file
with obj.open('w+') as f, open(filename, 'r') as g:
f.write(g.read())
# TODO rm local file
def _get_data_objects(self):
"""Get and clean data objects from an iRODS collection on iPlant."""
iplant_params = self.iplant_params
logging.info("Reading data from {} as {} ...".
format(iplant_params['host'], self.user))
sess = self._get_irods_session()
coll = sess.collections.get(iplant_params['iplant_filepath'])
data_objects = coll.data_objects
# cleaned_data_objects = \
# filter(lambda x: x.name.lower().endswith('.fits') or
# x.name.lower().endswith('.arch'),
# data_objects)
return data_objects
@staticmethod
def _clear_generated_files():
"""Clears the files generated by the pipeline."""
erase_fits = raw_input("Erase all non FITS files from {} (y/n)?".format(__batch_dir__))
if erase_fits.lower() == 'y':
print "Erasing..."
try:
for filename in os.listdir(__batch_dir__):
if not filename.lower().endswith('*.fit'):
os.remove(filename)
except:
logging.error("Could not erase files from {}".format(__batch_dir__))
else:
print "Not erasing..."
makeflows_dir = os.path.join(__output_dir__, 'makeflows')
erase_makeflows = raw_input("Erase all files from {} (y/n)?".format(makeflows_dir))
if erase_makeflows.lower() == 'y':
print "Erasing..."
try:
map(os.remove, glob(os.path.join(makeflows_dir, '*')))
except:
logging.error("Could not erase files from {}".format(makeflows_dir))
else:
print "Not Erasing..."
@staticmethod
def _extract_datetime(datetime_str):
"""Get a datetime object from the date numbers in a FITS header."""
p = re.compile(
r"\'(\d\d\d\d)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)\.(\d\d\d\d\d\d)\'"
)
datetime_match = p.match(datetime_str)
datetime_numbers = tuple(int(x) for x in datetime_match.groups())
return datetime(*datetime_numbers)
@staticmethod
def _passes_muster(hdus):
"""Up or down vote on a list of hdus. Currently a no-op."""
# TODO useful for later, if we decide to filter
# header = hdus[0].header # first element is primary HDU, we don't use others
# dt = Preprocessor._extract_datetime(header['DATE-OBS'][0])
# image_type = header['VIMTYPE'][0][1:-1].strip()
# ao_loop_state = header['AOLOOPST'][0][1:-1].strip()
# shutter_state = header['VSHUTTER'][0][1:-1].strip()
return True
@staticmethod
def _add_to_local_batch(data_object):
"""Add a FITS file from iPlant datastore to a local batch.
"""
# TODO decorate for IO error catching
try:
name = data_object.name
# write to temp. local file
filepath = os.path.join(__batch_dir__, name)
with open(filepath, 'w') as f:
with data_object.open('r') as irods_f:
f.write(irods_f.read())
except IOError:
logging.info('File rejected: {}.'.format(data_object.name))
|
pwmarcz/django | refs/heads/master | django/db/backends/dummy/__init__.py | 12133432 | |
andela-ooladayo/django | refs/heads/master | tests/dates/__init__.py | 12133432 | |
jarshwah/django | refs/heads/master | django/views/decorators/__init__.py | 12133432 | |
Nexenta/cinder | refs/heads/master | cinder/hacking/__init__.py | 12133432 | |
prakharjain09/qds-sdk-py | refs/heads/master | setup.py | 2 | import os
import sys
from setuptools import setup
INSTALL_REQUIRES = ['requests >=1.0.3', 'boto >=2.1.1', 'six >=1.2.0', 'urllib3 >= 1.0.2', 'inflection >= 0.3.1']
if sys.version_info < (2, 7, 0):
INSTALL_REQUIRES.append('argparse>=1.1')
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="qds_sdk",
version="1.9.0",
author="Qubole",
author_email="dev@qubole.com",
description=("Python SDK for coding to the Qubole Data Service API"),
keywords="qubole sdk api",
url="https://github.com/qubole/qds-sdk-py",
packages=['qds_sdk'],
scripts=['bin/qds.py'],
install_requires=INSTALL_REQUIRES,
long_description=read('README.rst'),
classifiers=[
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
|
kk47/Python | refs/heads/master | django/td2.0/app/forms.py | 1 | # coding: utf-8
from django import forms
from models import *
#from django.contrib.localflavor.us.forms import USPhoneNumberField
from django.contrib.auth.models import User
from django.forms.fields import DateField, ChoiceField, MultipleChoiceField
from django.forms.widgets import RadioSelect, CheckboxSelectMultiple
from django.forms.extras.widgets import SelectDateWidget
class FormRoom(forms.ModelForm):
class Meta:
model = Room
class FormSwitch(forms.ModelForm):
idroom = forms.ModelChoiceField(queryset=Room.objects.all(),widget=forms.Select(),empty_label=None,label='机柜' )
class Meta:
model = Switch
class FormMac(forms.ModelForm):
idroom = forms.ModelChoiceField(queryset=Room.objects.all(),widget=forms.Select(),empty_label=None,label='机柜' )
class Meta:
model = Mac
class FormServer(forms.ModelForm):
idroom = forms.ModelChoiceField(queryset=Room.objects.all(),widget=forms.Select(),empty_label=None,label='机柜' )
start_time = forms.DateTimeField(error_messages={'required':u'必填:时间格式0000-00-00'},label='开始时间')
end_time = forms.DateTimeField(error_messages={'required':u'必填:时间格式0000-00-00'},label='截止时间')
class Meta:
model = Server
exclude = ('is_avlie',);
class FormRepair(forms.ModelForm):
class Meta:
model = Repair
|
lazywei/scikit-learn | refs/heads/master | examples/svm/plot_svm_nonlinear.py | 61 | """
==============
Non-linear SVM
==============
Perform binary classification using non-linear SVC
with RBF kernel. The target to predict is a XOR of the
inputs.
The color map illustrates the decision function learn by the SVC.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm
xx, yy = np.meshgrid(np.linspace(-3, 3, 500),
np.linspace(-3, 3, 500))
np.random.seed(0)
X = np.random.randn(300, 2)
Y = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0)
# fit the model
clf = svm.NuSVC()
clf.fit(X, Y)
# plot the decision function for each datapoint on the grid
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.imshow(Z, interpolation='nearest',
extent=(xx.min(), xx.max(), yy.min(), yy.max()), aspect='auto',
origin='lower', cmap=plt.cm.PuOr_r)
contours = plt.contour(xx, yy, Z, levels=[0], linewidths=2,
linetypes='--')
plt.scatter(X[:, 0], X[:, 1], s=30, c=Y, cmap=plt.cm.Paired)
plt.xticks(())
plt.yticks(())
plt.axis([-3, 3, -3, 3])
plt.show()
|
SurfasJones/icecream-info | refs/heads/master | icecream/lib/python2.7/site-packages/djangocms_text_ckeditor/__init__.py | 3 | __version__ = "2.1.4"
|
btabibian/scikit-learn | refs/heads/master | sklearn/gaussian_process/correlation_models.py | 72 | # -*- coding: utf-8 -*-
# Author: Vincent Dubourg <vincent.dubourg@gmail.com>
# (mostly translation, see implementation details)
# License: BSD 3 clause
"""
The built-in correlation models submodule for the gaussian_process module.
"""
import numpy as np
def absolute_exponential(theta, d):
"""
Absolute exponential autocorrelation model.
(Ornstein-Uhlenbeck stochastic process)::
n
theta, d --> r(theta, d) = exp( sum - theta_i * |d_i| )
i = 1
Parameters
----------
theta : array_like
An array with shape 1 (isotropic) or n (anisotropic) giving the
autocorrelation parameter(s).
d : array_like
An array with shape (n_eval, n_features) giving the componentwise
distances between locations x and x' at which the correlation model
should be evaluated.
Returns
-------
r : array_like
An array with shape (n_eval, ) containing the values of the
autocorrelation model.
"""
theta = np.asarray(theta, dtype=np.float64)
d = np.abs(np.asarray(d, dtype=np.float64))
if d.ndim > 1:
n_features = d.shape[1]
else:
n_features = 1
if theta.size == 1:
return np.exp(- theta[0] * np.sum(d, axis=1))
elif theta.size != n_features:
raise ValueError("Length of theta must be 1 or %s" % n_features)
else:
return np.exp(- np.sum(theta.reshape(1, n_features) * d, axis=1))
def squared_exponential(theta, d):
"""
Squared exponential correlation model (Radial Basis Function).
(Infinitely differentiable stochastic process, very smooth)::
n
theta, d --> r(theta, d) = exp( sum - theta_i * (d_i)^2 )
i = 1
Parameters
----------
theta : array_like
An array with shape 1 (isotropic) or n (anisotropic) giving the
autocorrelation parameter(s).
d : array_like
An array with shape (n_eval, n_features) giving the componentwise
distances between locations x and x' at which the correlation model
should be evaluated.
Returns
-------
r : array_like
An array with shape (n_eval, ) containing the values of the
autocorrelation model.
"""
theta = np.asarray(theta, dtype=np.float64)
d = np.asarray(d, dtype=np.float64)
if d.ndim > 1:
n_features = d.shape[1]
else:
n_features = 1
if theta.size == 1:
return np.exp(-theta[0] * np.sum(d ** 2, axis=1))
elif theta.size != n_features:
raise ValueError("Length of theta must be 1 or %s" % n_features)
else:
return np.exp(-np.sum(theta.reshape(1, n_features) * d ** 2, axis=1))
def generalized_exponential(theta, d):
"""
Generalized exponential correlation model.
(Useful when one does not know the smoothness of the function to be
predicted.)::
n
theta, d --> r(theta, d) = exp( sum - theta_i * |d_i|^p )
i = 1
Parameters
----------
theta : array_like
An array with shape 1+1 (isotropic) or n+1 (anisotropic) giving the
autocorrelation parameter(s) (theta, p).
d : array_like
An array with shape (n_eval, n_features) giving the componentwise
distances between locations x and x' at which the correlation model
should be evaluated.
Returns
-------
r : array_like
An array with shape (n_eval, ) with the values of the autocorrelation
model.
"""
theta = np.asarray(theta, dtype=np.float64)
d = np.asarray(d, dtype=np.float64)
if d.ndim > 1:
n_features = d.shape[1]
else:
n_features = 1
lth = theta.size
if n_features > 1 and lth == 2:
theta = np.hstack([np.repeat(theta[0], n_features), theta[1]])
elif lth != n_features + 1:
raise Exception("Length of theta must be 2 or %s" % (n_features + 1))
else:
theta = theta.reshape(1, lth)
td = theta[:, 0:-1].reshape(1, n_features) * np.abs(d) ** theta[:, -1]
r = np.exp(- np.sum(td, 1))
return r
def pure_nugget(theta, d):
"""
Spatial independence correlation model (pure nugget).
(Useful when one wants to solve an ordinary least squares problem!)::
n
theta, d --> r(theta, d) = 1 if sum |d_i| == 0
i = 1
0 otherwise
Parameters
----------
theta : array_like
None.
d : array_like
An array with shape (n_eval, n_features) giving the componentwise
distances between locations x and x' at which the correlation model
should be evaluated.
Returns
-------
r : array_like
An array with shape (n_eval, ) with the values of the autocorrelation
model.
"""
theta = np.asarray(theta, dtype=np.float64)
d = np.asarray(d, dtype=np.float64)
n_eval = d.shape[0]
r = np.zeros(n_eval)
r[np.all(d == 0., axis=1)] = 1.
return r
def cubic(theta, d):
"""
Cubic correlation model::
theta, d --> r(theta, d) =
n
prod max(0, 1 - 3(theta_j*d_ij)^2 + 2(theta_j*d_ij)^3) , i = 1,...,m
j = 1
Parameters
----------
theta : array_like
An array with shape 1 (isotropic) or n (anisotropic) giving the
autocorrelation parameter(s).
d : array_like
An array with shape (n_eval, n_features) giving the componentwise
distances between locations x and x' at which the correlation model
should be evaluated.
Returns
-------
r : array_like
An array with shape (n_eval, ) with the values of the autocorrelation
model.
"""
theta = np.asarray(theta, dtype=np.float64)
d = np.asarray(d, dtype=np.float64)
if d.ndim > 1:
n_features = d.shape[1]
else:
n_features = 1
lth = theta.size
if lth == 1:
td = np.abs(d) * theta
elif lth != n_features:
raise Exception("Length of theta must be 1 or " + str(n_features))
else:
td = np.abs(d) * theta.reshape(1, n_features)
td[td > 1.] = 1.
ss = 1. - td ** 2. * (3. - 2. * td)
r = np.prod(ss, 1)
return r
def linear(theta, d):
"""
Linear correlation model::
theta, d --> r(theta, d) =
n
prod max(0, 1 - theta_j*d_ij) , i = 1,...,m
j = 1
Parameters
----------
theta : array_like
An array with shape 1 (isotropic) or n (anisotropic) giving the
autocorrelation parameter(s).
d : array_like
An array with shape (n_eval, n_features) giving the componentwise
distances between locations x and x' at which the correlation model
should be evaluated.
Returns
-------
r : array_like
An array with shape (n_eval, ) with the values of the autocorrelation
model.
"""
theta = np.asarray(theta, dtype=np.float64)
d = np.asarray(d, dtype=np.float64)
if d.ndim > 1:
n_features = d.shape[1]
else:
n_features = 1
lth = theta.size
if lth == 1:
td = np.abs(d) * theta
elif lth != n_features:
raise Exception("Length of theta must be 1 or %s" % n_features)
else:
td = np.abs(d) * theta.reshape(1, n_features)
td[td > 1.] = 1.
ss = 1. - td
r = np.prod(ss, 1)
return r
|
Acehaidrey/incubator-airflow | refs/heads/master | tests/providers/microsoft/azure/operators/test_azure_cosmos.py | 10 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import unittest
import uuid
from unittest import mock
from airflow.models import Connection
from airflow.providers.microsoft.azure.operators.azure_cosmos import AzureCosmosInsertDocumentOperator
from airflow.utils import db
class TestAzureCosmosDbHook(unittest.TestCase):
# Set up an environment to test with
def setUp(self):
# set up some test variables
self.test_end_point = 'https://test_endpoint:443'
self.test_master_key = 'magic_test_key'
self.test_database_name = 'test_database_name'
self.test_collection_name = 'test_collection_name'
db.merge_conn(
Connection(
conn_id='azure_cosmos_test_key_id',
conn_type='azure_cosmos',
login=self.test_end_point,
password=self.test_master_key,
extra=json.dumps(
{'database_name': self.test_database_name, 'collection_name': self.test_collection_name}
),
)
)
@mock.patch('airflow.providers.microsoft.azure.hooks.azure_cosmos.CosmosClient')
def test_insert_document(self, cosmos_mock):
test_id = str(uuid.uuid4())
cosmos_mock.return_value.CreateItem.return_value = {'id': test_id}
op = AzureCosmosInsertDocumentOperator(
database_name=self.test_database_name,
collection_name=self.test_collection_name,
document={'id': test_id, 'data': 'sometestdata'},
azure_cosmos_conn_id='azure_cosmos_test_key_id',
task_id='azure_cosmos_sensor',
)
expected_calls = [
mock.call().CreateItem(
'dbs/' + self.test_database_name + '/colls/' + self.test_collection_name,
{'data': 'sometestdata', 'id': test_id},
)
]
op.execute(None)
cosmos_mock.assert_any_call(self.test_end_point, {'masterKey': self.test_master_key})
cosmos_mock.assert_has_calls(expected_calls)
|
vincentlooi/FCIS | refs/heads/master | lib/dataset/ds_utils.py | 21 | import numpy as np
def unique_boxes(boxes, scale=1.0):
""" return indices of unique boxes """
v = np.array([1, 1e3, 1e6, 1e9])
hashes = np.round(boxes * scale).dot(v)
_, index = np.unique(hashes, return_index=True)
return np.sort(index)
def filter_small_boxes(boxes, min_size):
w = boxes[:, 2] - boxes[:, 0]
h = boxes[:, 3] - boxes[:, 1]
keep = np.where((w >= min_size) & (h > min_size))[0]
return keep |
xaviercobain88/framework-python | refs/heads/master | openerp/addons/mrp/report/mrp_report.py | 56 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
class report_workcenter_load(osv.osv):
_name="report.workcenter.load"
_description="Work Center Load"
_auto = False
_log_access = False
_columns = {
'name': fields.char('Week', size=64, required=True),
'workcenter_id': fields.many2one('mrp.workcenter', 'Work Center', required=True),
'cycle': fields.float('Number of Cycles'),
'hour': fields.float('Number of Hours'),
}
def init(self, cr):
cr.execute("""
create or replace view report_workcenter_load as (
SELECT
min(wl.id) as id,
to_char(p.date_planned,'YYYY:mm:dd') as name,
SUM(wl.hour) AS hour,
SUM(wl.cycle) AS cycle,
wl.workcenter_id as workcenter_id
FROM
mrp_production_workcenter_line wl
LEFT JOIN mrp_production p
ON p.id = wl.production_id
GROUP BY
wl.workcenter_id,
to_char(p.date_planned,'YYYY:mm:dd')
)""")
report_workcenter_load()
class report_mrp_inout(osv.osv):
_name="report.mrp.inout"
_description="Stock value variation"
_auto = False
_log_access = False
_rec_name = 'date'
_columns = {
'date': fields.char('Week', size=64, required=True),
'value': fields.float('Stock value', required=True, digits=(16,2)),
}
def init(self, cr):
cr.execute("""
create or replace view report_mrp_inout as (
select
min(sm.id) as id,
to_char(sm.date,'YYYY:IW') as date,
sum(case when (sl.usage='internal') then
pt.standard_price * sm.product_qty
else
0.0
end - case when (sl2.usage='internal') then
pt.standard_price * sm.product_qty
else
0.0
end) as value
from
stock_move sm
left join product_product pp
on (pp.id = sm.product_id)
left join product_template pt
on (pt.id = pp.product_tmpl_id)
left join stock_location sl
on ( sl.id = sm.location_id)
left join stock_location sl2
on ( sl2.id = sm.location_dest_id)
where
sm.state in ('waiting','confirmed','assigned')
group by
to_char(sm.date,'YYYY:IW')
)""")
report_mrp_inout()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Bachaco-ve/odoo | refs/heads/8.0 | addons/payment/tests/common.py | 392 | # -*- coding: utf-8 -*-
from openerp.tests import common
class PaymentAcquirerCommon(common.TransactionCase):
def setUp(self):
super(PaymentAcquirerCommon, self).setUp()
self.payment_acquirer = self.registry('payment.acquirer')
self.payment_transaction = self.registry('payment.transaction')
self.currency_euro_id = self.registry('res.currency').search(
self.cr, self.uid, [('name', '=', 'EUR')], limit=1)[0]
self.currency_euro = self.registry('res.currency').browse(
self.cr, self.uid, self.currency_euro_id)
self.country_belgium_id = self.registry('res.country').search(
self.cr, self.uid, [('code', 'like', 'BE')], limit=1)[0]
self.country_france_id = self.registry('res.country').search(
self.cr, self.uid, [('code', 'like', 'FR')], limit=1)[0]
# dict partner values
self.buyer_values = {
'name': 'Norbert Buyer',
'lang': 'en_US',
'email': 'norbert.buyer@example.com',
'street': 'Huge Street',
'street2': '2/543',
'phone': '0032 12 34 56 78',
'city': 'Sin City',
'zip': '1000',
'country_id': self.country_belgium_id,
'country_name': 'Belgium',
}
# test partner
self.buyer_id = self.registry('res.partner').create(
self.cr, self.uid, {
'name': 'Norbert Buyer',
'lang': 'en_US',
'email': 'norbert.buyer@example.com',
'street': 'Huge Street',
'street2': '2/543',
'phone': '0032 12 34 56 78',
'city': 'Sin City',
'zip': '1000',
'country_id': self.country_belgium_id,
}
)
|
3ev0/rdns-monitor | refs/heads/master | rdnsmonitor/work.py | 1 | import threading
import logging
import datetime
import socket
import dns.resolver
import dns.reversename
import dns.exception
from dns.exception import Timeout
from rdnsmonitor import handy
log = logging.getLogger(__name__)
class SERVFAIL(dns.exception.DNSException):
pass
class CommException(dns.exception.DNSException):
pass
class Worker():
COMMERR_TRESH = 10
def __init__(self, nameservers=[], use_tcp=False):
self.current_job = None
self.default_nameserver = dns.resolver.get_default_resolver().nameservers[0]
self.nameservers = nameservers + [self.default_nameserver]
self.cur_nameserver = None
self.resolver = dns.resolver.Resolver()
self.nameserver_stats = {nsname:{"good":True} for nsname in self.nameservers}
self._switchDNS()
self.timeout = 3
self.jobstats = {"timeoutcnt":0,
"resolvecnt":0,
"nxdcnt":0,
"tot_duration":datetime.timedelta(0),
"errcnt":0}
self.use_tcp = use_tcp
return
def work(self):
self._fetchJob()
while self.current_job:
self._workJob()
self._finishJob()
self._fetchJob()
return
def _fetchJob(self):
raise NotImplementedError
def _finishJob(self):
raise NotImplementedError
def _workJob(self):
raise NotImplementedError
def _sendResults(self):
return
def _resolveIP(self, ipAddress):
log.debug("Resolving %s...", ipAddress)
addr = dns.reversename.from_address(ipAddress)
try:
start = datetime.datetime.now()
data = self.resolver.query(addr, "PTR", tcp=self.use_tcp)[0].to_text()
duration = datetime.datetime.now() - start
self.nameserver_stats[self.cur_nameserver]["tot_duration"] += duration
self.jobstats["tot_duration"] += duration
self.jobstats["resolvecnt"] += 1
self.nameserver_stats[self.cur_nameserver]["resolvecnt"] += 1
except dns.resolver.NXDOMAIN:
log.debug("%s -> NXDOMAIN", ipAddress)
data = "NXDOMAIN"
duration = datetime.datetime.now() - start
self.nameserver_stats[self.cur_nameserver]["resolvecnt"] += 1
self.nameserver_stats[self.cur_nameserver]["nxdcnt"] += 1
self.jobstats["resolvecnt"] += 1
self.jobstats["nxdcnt"] += 1
self.nameserver_stats[self.cur_nameserver]["tot_duration"] += duration
self.jobstats["tot_duration"] += duration
except Timeout as exc: #no answer within lifetime. This is often not the fault of the open resolver.
log.warning("Timeout occured @%s querying %s", self.cur_nameserver, addr)
self.nameserver_stats[self.cur_nameserver]["timeoutcnt"] += 1
self.jobstats["timeoutcnt"] += 1
data = "TIMEOUT"
except CommException:
log.warning("CommuException occured @%s querying %s", self.cur_nameserver, addr)
self.nameserver_stats[self.cur_nameserver]["errcnt"] += 1
self.jobstats["errcnt"] += 1
data = "ERROR"
if self.nameserver_stats[self.cur_nameserver]["errcnt"] > LocalWorker.COMMERR_TRESH:
log.warning("Comm error count for %s exceeded threshold", self.cur_nameserver)
self._switchDNS()
except SERVFAIL:
log.warning("SERVFAIL received @%s querying %s", self.cur_nameserver, addr)
self.nameserver_stats[self.cur_nameserver]["servfailcnt"] += 1
self.jobstats["servfailcnt"] += 1
data = "SERVFAIL"
except Exception as exc:
log.error("Uncaught exception: %s", repr(exc))
raise exc
log.debug("Got %s", data)
return data
def query(self, qname, nameserver, tcp=False):
"""
Rip from the dns.resolver.query so that I can differntiate between SERVFAIL responses and connection problems.
"""
source_port=0
source=None
qname = dns.name.from_text(qname, None)
rdtype = dns.rdatatype.from_text("PTR")
rdclass = dns.rdataclass.from_text(dns.rdataclass.IN)
request = dns.message.make_query(qname, rdtype, rdclass)
request.use_edns(self.resolver.edns, self.resolver.ednsflags, self.resolver.payload)
if self.resolver.flags is not None:
request.flags = self.resolver.flags
response = None
timeout = self.resolver.timeout
try:
if tcp:
response = dns.query.tcp(request, nameserver,
timeout, self.resolver.port,
source=source,
source_port=source_port)
else:
response = dns.query.udp(request, nameserver,
timeout, self.resolver.port,
source=source,
source_port=source_port)
if response.flags & dns.flags.TC:
# Response truncated; retry with TCP.
timeout = self.resolver.timeout
response = dns.query.tcp(request, nameserver,
timeout, self.resolver.port,
source=source,
source_port=source_port)
except (socket.error, dns.query.UnexpectedSource, dns.exception.FormError, EOFError):
# These all indicate comm problem with this nameserver.
raise CommException
rcode = response.rcode()
if rcode == dns.rcode.YXDOMAIN:
raise dns.resolver.YXDOMAIN
if rcode == dns.rcode.NXDOMAIN:
raise dns.resolver.NXDOMAIN
if rcode == dns.rcode.SERVFAIL:
raise SERVFAIL
answer = dns.resolver.Answer(qname, rdtype, rdclass, response, True)
return answer
def _switchDNS(self):
if self.cur_nameserver:
log.info("Abandoning nameserver %s, stats: %s", self.cur_nameserver, repr(self.nameserver_stats[self.cur_nameserver]))
self.nameserver_stats[self.cur_namserver]["good"] = False
available = [ns for ns in self.nameserver_stats if self.nameserver_stats[ns]["good"]]
if not len(available):
log.warning("No more available nameservers")
raise Exception("No more available nameservers")
self.cur_nameserver = available[0]
self.nameserver_stats[self.cur_nameserver].update({"timeoutcnt":0,
"resolvecnt":0,
"nxdcnt":0,
"tot_duration":datetime.timedelta(0),
"errcnt":0})
log.info("Switched to nameserver %s", self.cur_nameserver)
return True
def __repr__(self):
raise NotImplementedError()
class LocalWorker(Worker, threading.Thread):
workers = []
SMAX_RESULTBATCH = 1024
def __init__(self, c2server, nameservers=None, use_tcp=False, **kwargs):
threading.Thread.__init__(self, daemon=False, **kwargs)
Worker.__init__(self, nameservers=nameservers, use_tcp=use_tcp)
LocalWorker.workers.append(self)
self._c2server = c2server
return
def run(self):
log.info("Worker started: %s",repr(self))
self.work()
def _fetchJob(self):
log.info("fetching new job...")
self.current_job = self._c2server.retrieveNewJob()
self.jobstats = {"timeoutcnt":0,
"resolvecnt":0,
"servfailcnt":0,
"nxdcnt":0,
"tot_duration":datetime.timedelta(0),
"errcnt":0}
log.info("Got new job: %s", repr(self.current_job))
return self.current_job
def _workJob(self):
self.current_job.started = datetime.datetime.now()
log.info("Working %s", repr(self.current_job))
results = []
for i in range(self.current_job.ipfrom, self.current_job.ipto):
ipaddr = handy.intToIp(i)
res = self._resolveIP(ipaddr)
results.append((i, res))
if len(results) >= LocalWorker.SMAX_RESULTBATCH:
self._sendResults(results)
results = []
self._sendResults(results)
log.info("Work done!")
return True
def _sendResults(self, results):
log.info("Sending %d results to server... jobstats:%s", len(results), repr(self.jobstats))
self._c2server.storeResults(results)
log.info("Results sent!")
return True
def _finishJob(self):
self.current_job.completed = datetime.datetime.now()
self.current_job.nameserver = self.cur_nameserver
self.current_job.error_count = self.jobstats["errcnt"] + self.jobstats["timeoutcnt"] + self.jobstats["servfailcnt"]
self.current_job.nxdomain_count = self.jobstats["nxdcnt"]
log.info("Sending finished job %s to server...", self.current_job)
self._c2server.finishJob(self.current_job)
self.current_job = None
log.info("job sent!")
def __repr__(self):
return "<LocalWorker(nameserver={}, name={}, use_tcp={:b})".format(self.cur_nameserver, self.name, self.use_tcp)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.