gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
from __future__ import absolute_import
import functools
from datetime import datetime, timedelta
import mock
import pytest
import pytz
from django.core import mail
from sentry.app import tsdb
from sentry.models import Project, UserOption
from sentry.tasks.reports import (
DISABLED_ORGANIZATIONS_USER_OPTION_KEY, Report, Skipped, change,
clean_series, colorize, deliver_organization_user_report,
get_calendar_range, get_percentile, has_valid_aggregates, index_to_month,
merge_mappings, merge_sequences, merge_series, month_to_index,
prepare_reports, safe_add, user_subscribed_to_organization_reports
)
from sentry.testutils.cases import TestCase
from sentry.utils.dates import to_datetime, to_timestamp
@pytest.yield_fixture(scope="module")
def interval():
stop = datetime(2016, 9, 12, tzinfo=pytz.utc)
yield stop - timedelta(days=7), stop
def test_change():
assert change(1, 0) is None
assert change(10, 5) == 1.00 # 100% increase
assert change(50, 100) == -0.50 # 50% decrease
assert change(None, 100) == -1.00 # 100% decrease
assert change(50, None) is None
def test_safe_add():
assert safe_add(1, 1) == 2
assert safe_add(None, 1) == 1
assert safe_add(1, None) == 1
assert safe_add(None, None) is None
def test_merge_mappings():
assert merge_mappings(
{'a': 1, 'b': 2, 'c': 3},
{'a': 0, 'b': 1, 'c': 2},
) == {'a': 1, 'b': 3, 'c': 5}
def test_merge_mappings_custom_operator():
assert merge_mappings(
{
'a': {'x': 1, 'y': 1},
'b': {'x': 2, 'y': 2},
},
{
'a': {'x': 1, 'y': 1},
'b': {'x': 2, 'y': 2},
},
lambda left, right: merge_mappings(left, right),
) == {
'a': {'x': 2, 'y': 2},
'b': {'x': 4, 'y': 4},
}
def test_merge_mapping_different_keys():
with pytest.raises(AssertionError):
merge_mappings({'a': 1}, {'b': 2})
def test_merge_sequences():
assert merge_sequences(
range(0, 4),
range(0, 4),
) == [i * 2 for i in xrange(0, 4)]
def test_merge_sequences_custom_operator():
assert merge_sequences(
[{chr(65 + i): i} for i in xrange(0, 26)],
[{chr(65 + i): i} for i in xrange(0, 26)],
merge_mappings,
) == [{chr(65 + i): i * 2} for i in xrange(0, 26)]
def test_merge_series():
assert merge_series(
[(i, i) for i in xrange(0, 10)],
[(i, i) for i in xrange(0, 10)],
) == [(i, i * 2) for i in xrange(0, 10)]
def test_merge_series_custom_operator():
assert merge_series(
[(i, {chr(65 + i): i}) for i in xrange(0, 26)],
[(i, {chr(65 + i): i}) for i in xrange(0, 26)],
merge_mappings,
) == [(i, {chr(65 + i): i * 2}) for i in xrange(0, 26)]
def test_merge_series_offset_timestamps():
with pytest.raises(AssertionError):
merge_series(
[(i, i) for i in xrange(0, 10)],
[(i + 1, i) for i in xrange(0, 10)],
)
def test_merge_series_different_lengths():
with pytest.raises(AssertionError):
merge_series(
[(i, i) for i in xrange(0, 1)],
[(i, i) for i in xrange(0, 10)],
)
with pytest.raises(AssertionError):
merge_series(
[(i, i) for i in xrange(0, 10)],
[(i, i) for i in xrange(0, 1)],
)
def test_clean_series():
rollup = 60
n = 5
start = to_datetime(rollup * 0)
stop = to_datetime(rollup * n)
series = [(rollup * i, i) for i in xrange(0, n)]
assert clean_series(
start,
stop,
rollup,
series,
) == series
def test_clean_series_trims_extra():
rollup = 60
n = 5
start = to_datetime(rollup * 0)
stop = to_datetime(rollup * n)
series = [(rollup * i, i) for i in xrange(0, n + 1)]
assert clean_series(
start,
stop,
rollup,
series,
) == series[:n]
def test_clean_series_rejects_offset_timestamp():
rollup = 60
n = 5
start = to_datetime(rollup * 0)
stop = to_datetime(rollup * n)
series = [(rollup * (i * 1.1), i) for i in xrange(0, n)]
with pytest.raises(AssertionError):
clean_series(
start,
stop,
rollup,
series,
)
def test_has_valid_aggregates(interval):
project = None # parameter is unused
def make_report(aggregates):
return Report(None, aggregates, None, None, None)
assert has_valid_aggregates(
interval,
(project, make_report([None] * 4)),
) is False
assert has_valid_aggregates(
interval,
(project, make_report([0] * 4)),
) is False
assert has_valid_aggregates(
interval,
(project, make_report([1, 0, 0, 0])),
) is True
def test_percentiles():
values = [3, 6, 7, 8, 8, 9, 10, 13, 15, 16, 20]
get_percentile(values, 0.25) == 7
get_percentile(values, 0.50) == 9
get_percentile(values, 0.75) == 15
get_percentile(values, 1.00) == 20
def test_colorize():
colors = ['green', 'yellow', 'red']
values = [2, 5, 1, 3, 4, 0]
legend, results = colorize(colors, values)
assert results == [
(2, 'yellow'),
(5, 'red'),
(1, 'green'),
(3, 'yellow'),
(4, 'red'),
(0, 'green'),
]
def test_month_indexing():
assert index_to_month(month_to_index(1986, 10)) == (1986, 10)
def test_calendar_range():
assert get_calendar_range(
(None, datetime(2016, 2, 1, tzinfo=pytz.utc)),
months=3,
) == (
month_to_index(2015, 11),
month_to_index(2016, 1),
)
class ReportTestCase(TestCase):
def test_integration(self):
Project.objects.all().delete()
now = datetime(2016, 9, 12, tzinfo=pytz.utc)
project = self.create_project(
organization=self.organization,
team=self.team,
date_added=now - timedelta(days=90),
)
tsdb.incr(
tsdb.models.project,
project.id,
now - timedelta(days=1),
)
member_set = set(project.team.member_set.all())
with self.tasks(), \
mock.patch.object(tsdb, 'get_earliest_timestamp') as get_earliest_timestamp:
# Ensure ``get_earliest_timestamp`` is relative to the fixed
# "current" timestamp -- this prevents filtering out data points
# that would be considered expired relative to the *actual* current
# timestamp.
get_earliest_timestamp.return_value = to_timestamp(now - timedelta(days=60))
prepare_reports(timestamp=to_timestamp(now))
assert len(mail.outbox) == len(member_set) == 1
message = mail.outbox[0]
assert self.organization.name in message.subject
def test_deliver_organization_user_report_respects_settings(self):
user = self.user
organization = self.organization
set_option_value = functools.partial(
UserOption.objects.set_value,
user,
DISABLED_ORGANIZATIONS_USER_OPTION_KEY,
)
deliver_report = functools.partial(
deliver_organization_user_report,
0,
60 * 60 * 24 * 7,
organization.id,
user.id,
)
set_option_value([])
assert deliver_report() is not Skipped.NotSubscribed
set_option_value([organization.id])
assert deliver_report() is Skipped.NotSubscribed
def test_user_subscribed_to_organization_reports(self):
user = self.user
organization = self.organization
set_option_value = functools.partial(
UserOption.objects.set_value,
user,
DISABLED_ORGANIZATIONS_USER_OPTION_KEY,
)
set_option_value([])
assert user_subscribed_to_organization_reports(user, organization) is True
set_option_value([-1])
assert user_subscribed_to_organization_reports(user, organization) is True
set_option_value([organization.id])
assert user_subscribed_to_organization_reports(user, organization) is False
|
|
# Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import mock
from oslo_config import cfg
from oslo_utils import timeutils
from six.moves.urllib import parse as urlparse
from magnum.api.controllers.v1 import x509keypair as api_x509keypair
from magnum.common import utils
from magnum.conductor import api as rpcapi
from magnum import objects
from magnum.tests import base
from magnum.tests.unit.api import base as api_base
from magnum.tests.unit.api import utils as apiutils
from magnum.tests.unit.objects import utils as obj_utils
class TestX509KeyPairObject(base.TestCase):
def test_x509keypair_init(self):
x509keypair_dict = apiutils.x509keypair_post_data(bay_uuid=None)
x509keypair = api_x509keypair.X509KeyPair(**x509keypair_dict)
self.assertEqual('certificate', x509keypair.certificate)
class TestListX509KeyPair(api_base.FunctionalTest):
def setUp(self):
super(TestListX509KeyPair, self).setUp()
self.bay = obj_utils.create_test_bay(self.context)
def test_empty(self):
response = self.get_json('/x509keypairs')
self.assertEqual([], response['x509keypairs'])
def test_one(self):
x509keypair = obj_utils.create_test_x509keypair(self.context)
response = self.get_json('/x509keypairs')
self.assertEqual(x509keypair.uuid, response['x509keypairs'][0]["uuid"])
self.assertIn('name', response['x509keypairs'][0])
self.assertIn('bay_uuid', response['x509keypairs'][0])
self.assertIn('certificate', response['x509keypairs'][0])
self.assertIn('private_key', response['x509keypairs'][0])
def test_get_one(self):
x509keypair = obj_utils.create_test_x509keypair(self.context)
response = self.get_json('/x509keypairs/%s' % x509keypair['uuid'])
self.assertEqual(x509keypair.uuid, response['uuid'])
self.assertIn('name', response)
self.assertIn('bay_uuid', response)
self.assertIn('certificate', response)
self.assertIn('private_key', response)
def test_get_one_by_name(self):
x509keypair = obj_utils.create_test_x509keypair(self.context)
response = self.get_json('/x509keypairs/%s' % x509keypair['name'])
self.assertEqual(x509keypair.uuid, response['uuid'])
self.assertIn('name', response)
self.assertIn('bay_uuid', response)
self.assertIn('certificate', response)
self.assertIn('private_key', response)
def test_get_one_by_name_not_found(self):
response = self.get_json(
'/x509keypairs/not_found',
expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_get_one_by_name_multiple_x509keypair(self):
obj_utils.create_test_x509keypair(self.context,
name='test_x509keypair',
uuid=utils.generate_uuid())
obj_utils.create_test_x509keypair(self.context,
name='test_x509keypair',
uuid=utils.generate_uuid())
response = self.get_json('/x509keypairs/test_x509keypair',
expect_errors=True)
self.assertEqual(409, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_detail(self):
x509keypair = obj_utils.create_test_x509keypair(self.context)
response = self.get_json('/x509keypairs/detail')
self.assertEqual(x509keypair.uuid, response['x509keypairs'][0]["uuid"])
self.assertIn('name', response['x509keypairs'][0])
self.assertIn('bay_uuid', response['x509keypairs'][0])
self.assertIn('certificate', response['x509keypairs'][0])
self.assertIn('private_key', response['x509keypairs'][0])
def test_detail_against_single(self):
x509keypair = obj_utils.create_test_x509keypair(self.context)
response = self.get_json(
'/x509keypairs/%s/detail' % x509keypair['uuid'],
expect_errors=True)
self.assertEqual(404, response.status_int)
def test_many(self):
keypair_list = []
for id_ in range(5):
x509keypair = obj_utils.create_test_x509keypair(
self.context, id=id_,
uuid=utils.generate_uuid())
keypair_list.append(x509keypair.uuid)
response = self.get_json('/x509keypairs')
self.assertEqual(len(keypair_list), len(response['x509keypairs']))
uuids = [b['uuid'] for b in response['x509keypairs']]
self.assertEqual(sorted(keypair_list), sorted(uuids))
def test_links(self):
uuid = utils.generate_uuid()
obj_utils.create_test_x509keypair(self.context, id=1, uuid=uuid)
response = self.get_json('/x509keypairs/%s' % uuid)
self.assertIn('links', response.keys())
self.assertEqual(2, len(response['links']))
self.assertIn(uuid, response['links'][0]['href'])
for l in response['links']:
bookmark = l['rel'] == 'bookmark'
self.assertTrue(self.validate_link(l['href'], bookmark=bookmark))
def test_collection_links(self):
for id_ in range(5):
obj_utils.create_test_x509keypair(self.context, id=id_,
uuid=utils.generate_uuid())
response = self.get_json('/x509keypairs/?limit=3')
self.assertEqual(3, len(response['x509keypairs']))
next_marker = response['x509keypairs'][-1]['uuid']
self.assertIn(next_marker, response['next'])
def test_collection_links_default_limit(self):
cfg.CONF.set_override('max_limit', 3, 'api')
for id_ in range(5):
obj_utils.create_test_x509keypair(self.context, id=id_,
uuid=utils.generate_uuid())
response = self.get_json('/x509keypairs')
self.assertEqual(3, len(response['x509keypairs']))
next_marker = response['x509keypairs'][-1]['uuid']
self.assertIn(next_marker, response['next'])
class TestPost(api_base.FunctionalTest):
def setUp(self):
super(TestPost, self).setUp()
self.bay = obj_utils.create_test_bay(self.context)
p = mock.patch.object(rpcapi.API, 'x509keypair_create')
self.mock_x509keypair_create = p.start()
self.mock_x509keypair_create.side_effect = \
self._simulate_rpc_x509keypair_create
self.addCleanup(p.stop)
def _simulate_rpc_x509keypair_create(self, x509keypair):
x509keypair.create()
return x509keypair
@mock.patch('oslo_utils.timeutils.utcnow')
def test_create_x509keypair(self, mock_utcnow):
cdict = apiutils.x509keypair_post_data()
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
response = self.post_json('/x509keypairs', cdict)
self.assertEqual('application/json', response.content_type)
self.assertEqual(201, response.status_int)
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/x509keypairs/%s' % cdict['uuid']
self.assertEqual(expected_location,
urlparse.urlparse(response.location).path)
self.assertEqual(cdict['uuid'], response.json['uuid'])
self.assertNotIn('updated_at', response.json.keys)
return_created_at = timeutils.parse_isotime(
response.json['created_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_created_at)
def test_create_x509keypair_set_project_id_and_user_id(self):
cdict = apiutils.x509keypair_post_data()
def _simulate_keypair_create(x509keypair):
self.assertEqual(self.context.project_id, x509keypair.project_id)
self.assertEqual(self.context.user_id, x509keypair.user_id)
x509keypair.create()
return x509keypair
self.mock_x509keypair_create.side_effect = _simulate_keypair_create
self.post_json('/x509keypairs', cdict)
def test_create_x509keypair_doesnt_contain_id(self):
with mock.patch.object(self.dbapi, 'create_x509keypair',
wraps=self.dbapi.create_x509keypair) as cc_mock:
cdict = apiutils.x509keypair_post_data(
name='x509keypair_example_A')
response = self.post_json('/x509keypairs', cdict)
self.assertEqual(cdict['name'], response.json['name'])
cc_mock.assert_called_once_with(mock.ANY)
# Check that 'id' is not in first arg of positional args
self.assertNotIn('id', cc_mock.call_args[0][0])
def test_create_x509keypair_generate_uuid(self):
cdict = apiutils.x509keypair_post_data()
del cdict['uuid']
response = self.post_json('/x509keypairs', cdict)
self.assertEqual('application/json', response.content_type)
self.assertEqual(201, response.status_int)
self.assertEqual(cdict['name'], response.json['name'])
self.assertTrue(utils.is_uuid_like(response.json['uuid']))
def test_create_x509keypair_no_bay_uuid(self):
cdict = apiutils.x509keypair_post_data()
del cdict['bay_uuid']
response = self.post_json('/x509keypairs', cdict, expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_int)
def test_create_x509keypair_with_non_existent_bay_uuid(self):
cdict = apiutils.x509keypair_post_data(bay_uuid=utils.generate_uuid())
response = self.post_json('/x509keypairs', cdict, expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_int)
self.assertTrue(response.json['errors'])
def test_create_x509keypair_with_bay_name(self):
cdict = apiutils.x509keypair_post_data(bay_uuid=self.bay.name)
response = self.post_json('/x509keypairs', cdict, expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(201, response.status_int)
class TestDelete(api_base.FunctionalTest):
def setUp(self):
super(TestDelete, self).setUp()
self.bay = obj_utils.create_test_bay(self.context)
self.x509keypair = obj_utils.create_test_x509keypair(self.context)
p = mock.patch.object(rpcapi.API, 'x509keypair_delete')
self.mock_x509keypair_delete = p.start()
self.mock_x509keypair_delete.side_effect = \
self._simulate_rpc_x509keypair_delete
self.addCleanup(p.stop)
def _simulate_rpc_x509keypair_delete(self, x509keypair_uuid):
x509keypair = objects.X509KeyPair.get_by_uuid(self.context,
x509keypair_uuid)
x509keypair.destroy()
def test_delete_x509keypair(self):
self.delete('/x509keypairs/%s' % self.x509keypair.uuid)
response = self.get_json('/x509keypairs/%s' % self.x509keypair.uuid,
expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_delete_x509keypair_not_found(self):
uuid = utils.generate_uuid()
response = self.delete('/x509keypairs/%s' % uuid, expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_delete_x509keypair_with_name_not_found(self):
response = self.delete('/x509keypairs/not_found', expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_delete_x509keypair_with_name(self):
response = self.delete('/x509keypairs/%s' % self.x509keypair.name,
expect_errors=True)
self.assertEqual(204, response.status_int)
def test_delete_multiple_x509keypair_by_name(self):
obj_utils.create_test_x509keypair(self.context,
name='test_x509keypair',
uuid=utils.generate_uuid())
obj_utils.create_test_x509keypair(self.context,
name='test_x509keypair',
uuid=utils.generate_uuid())
response = self.delete('/x509keypairs/test_x509keypair',
expect_errors=True)
self.assertEqual(409, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
|
|
#!/usr/bin/env python
#
# Copyright (c) 2013 Intel Corporation. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import distutils.dir_util
import optparse
import os
import shutil
import sys
LIBRARY_PROJECT_NAME = 'xwalk_core_library'
XWALK_CORE_SHELL_APK = 'xwalk_core_shell_apk'
def AddGeneratorOptions(option_parser):
option_parser.add_option('-s', dest='source',
help='Source directory of project root.',
type='string')
option_parser.add_option('-t', dest='target',
help='Product out target directory.',
type='string')
def CleanLibraryProject(out_directory):
out_project_path = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src')
if os.path.exists(out_project_path):
shutil.rmtree(out_project_path)
def CopyProjectFiles(project_source, out_directory):
print 'Copying library project files...'
# Copy AndroidManifest.xml from template.
source_file = os.path.join(project_source, 'xwalk', 'build', 'android',
'xwalkcore_library_template',
'AndroidManifest.xml')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME,
'AndroidManifest.xml')
shutil.copyfile(source_file, target_file)
# Copy Eclipse project properties from template.
source_file = os.path.join(project_source, 'xwalk', 'build', 'android',
'xwalkcore_library_template',
'project.properties')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME,
'project.properties')
shutil.copyfile(source_file, target_file)
# Copy Ant build file.
source_file = os.path.join(project_source, 'xwalk', 'build', 'android',
'xwalkcore_library_template',
'build.xml')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'build.xml')
shutil.copyfile(source_file, target_file)
# Copy Ant properties file.
source_file = os.path.join(project_source, 'xwalk', 'build', 'android',
'xwalkcore_library_template',
'ant.properties')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'ant.properties')
shutil.copyfile(source_file, target_file)
def CopyChromiumJavaSources(project_source, out_directory):
print 'Copying Java sources...'
target_package_directory = os.path.join(out_directory, LIBRARY_PROJECT_NAME,
'src', 'org', 'chromium')
if not os.path.exists(target_package_directory):
os.makedirs(target_package_directory)
source_path = os.path.join(project_source, 'base', 'android', 'java', 'src',
'org', 'chromium', 'base')
target_path = os.path.join(target_package_directory, 'base')
shutil.copytree(source_path, target_path)
source_path = os.path.join(project_source, 'content', 'public', 'android',
'java', 'src', 'org', 'chromium', 'content')
target_path = os.path.join(target_package_directory, 'content')
shutil.copytree(source_path, target_path)
source_path = os.path.join(project_source, 'media', 'base', 'android', 'java',
'src', 'org', 'chromium', 'media')
target_path = os.path.join(target_package_directory, 'media')
shutil.copytree(source_path, target_path)
source_path = os.path.join(project_source, 'net', 'android', 'java', 'src',
'org', 'chromium', 'net')
target_path = os.path.join(target_package_directory, 'net')
shutil.copytree(source_path, target_path)
source_path = os.path.join(project_source, 'ui', 'android', 'java', 'src',
'org', 'chromium', 'ui')
target_path = os.path.join(target_package_directory, 'ui')
shutil.copytree(source_path, target_path)
source_path = os.path.join(project_source, 'components',
'navigation_interception', 'android', 'java',
'src', 'org', 'chromium', 'components',
'navigation_interception',)
target_path = os.path.join(target_package_directory, 'components',
'navigation_interception')
shutil.copytree(source_path, target_path)
source_path = os.path.join(project_source, 'components',
'web_contents_delegate_android', 'android', 'java',
'src', 'org', 'chromium', 'components',
'web_contents_delegate_android')
target_path = os.path.join(target_package_directory, 'components',
'web_contents_delegate_android')
shutil.copytree(source_path, target_path)
source_file = os.path.join(project_source, 'content', 'public', 'android',
'java', 'resource_map', 'org', 'chromium',
'content', 'R.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'content', 'R.java')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(project_source, 'ui', 'android', 'java',
'resource_map', 'org', 'chromium', 'ui', 'R.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'ui', 'R.java')
shutil.copyfile(source_file, target_file)
def CopyGeneratedSources(out_directory):
print 'Copying generated source files...'
source_file = os.path.join(out_directory, XWALK_CORE_SHELL_APK,
'native_libraries_java',
'NativeLibraries.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'content',
'app', 'NativeLibraries.java')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'gen', 'templates', 'org',
'chromium', 'content', 'common',
'ResultCodes.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'content', 'common',
'ResultCodes.java')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'gen', 'templates',
'org', 'chromium', 'net', 'NetError.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'net', 'NetError.java')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'gen', 'templates',
'org', 'chromium', 'content', 'browser',
'PageTransitionTypes.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'content', 'browser',
'PageTransitionTypes.java')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'gen', 'templates',
'org', 'chromium', 'content', 'browser',
'SpeechRecognitionError.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'content', 'browser',
'SpeechRecognitionError.java')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'gen', 'templates',
'org', 'chromium', 'net', 'PrivateKeyType.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'net', 'PrivateKeyType.java')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'gen', 'templates',
'org', 'chromium', 'net',
'CertVerifyResultAndroid.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'net', 'CertVerifyResultAndroid.java')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'gen', 'templates',
'org', 'chromium', 'net',
'CertificateMimeType.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'net', 'CertificateMimeType.java')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'gen', 'templates',
'org', 'chromium', 'base',
'ActivityState.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'base', 'ActivityState.java')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'gen', 'templates',
'org', 'chromium', 'base',
'MemoryPressureLevelList.java')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src', 'org',
'chromium', 'base', 'MemoryPressureLevelList.java')
shutil.copyfile(source_file, target_file)
def CopyXwalkJavaSource(project_source, out_directory):
print 'Copying XWalk Java sources...'
target_package_directory = os.path.join(out_directory, LIBRARY_PROJECT_NAME,
'src', 'org', 'xwalk')
if not os.path.exists(target_package_directory):
os.mkdir(target_package_directory)
source_path = os.path.join(project_source, 'xwalk', 'runtime', 'android',
'java', 'src', 'org', 'xwalk', 'core')
target_path = os.path.join(target_package_directory, 'core')
shutil.copytree(source_path, target_path)
source_path = os.path.join(project_source, 'xwalk', 'extensions', 'android',
'java', 'src', 'org', 'xwalk', 'core', 'extensions')
target_path = os.path.join(target_package_directory, 'core', 'extensions')
shutil.copytree(source_path, target_path)
def CopyBinaries(out_directory):
print 'Copying binaries...'
asset_directory = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'assets')
if not os.path.exists(asset_directory):
os.mkdir(asset_directory)
source_file = os.path.join(out_directory,
'xwalk.pak')
target_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'assets',
'xwalk.pak')
shutil.copyfile(source_file, target_file)
# Copy jar files to libs.
libs_directory = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'libs')
if not os.path.exists(libs_directory):
os.mkdir(libs_directory)
source_file = os.path.join(out_directory, 'lib.java', 'eyesfree_java.jar')
target_file = os.path.join(libs_directory, 'eyesfree_java.jar')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'lib.java', 'guava_javalib.jar')
target_file = os.path.join(libs_directory, 'guava_javalib.jar')
shutil.copyfile(source_file, target_file)
source_file = os.path.join(out_directory, 'lib.java', 'jsr_305_javalib.jar')
target_file = os.path.join(libs_directory, 'jsr_305_javalib.jar')
shutil.copyfile(source_file, target_file)
source_dir = os.path.join(out_directory, XWALK_CORE_SHELL_APK, 'libs')
target_dir = libs_directory
distutils.dir_util.copy_tree(source_dir, target_dir)
def CopyResources(out_directory):
print 'Copying resources...'
res_directory = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'res')
if os.path.exists(res_directory):
shutil.rmtree(res_directory)
source_path = os.path.join(out_directory, 'gen', 'content_java', 'res_grit')
distutils.dir_util.copy_tree(source_path, res_directory)
source_path = os.path.join(out_directory, 'gen', 'xwalk_core_java',
'res_grit')
distutils.dir_util.copy_tree(source_path, res_directory)
source_path = os.path.join(out_directory, 'gen', 'ui_java', 'res_grit')
distutils.dir_util.copy_tree(source_path, res_directory)
def PostCopyLibraryProject(out_directory):
print 'Post Copy Library Project...'
common_aidl_file = os.path.join(out_directory, LIBRARY_PROJECT_NAME, 'src',
'org', 'chromium', 'content', 'common',
'common.aidl')
if os.path.exists(common_aidl_file):
os.remove(common_aidl_file)
def main(argv):
print 'Generating XWalkCore Library Project...'
option_parser = optparse.OptionParser()
AddGeneratorOptions(option_parser)
options, _ = option_parser.parse_args(argv)
if not os.path.exists(options.source):
print 'Source project does not exist, please provide correct directory.'
sys.exit(1)
out_directory = options.target
# Clean directory for project first.
CleanLibraryProject(out_directory)
out_project_directory = os.path.join(out_directory, LIBRARY_PROJECT_NAME)
if not os.path.exists(out_project_directory):
os.mkdir(out_project_directory)
# Copy Eclipse project files of library project.
CopyProjectFiles(options.source, out_directory)
# Copy Java sources of chromium.
CopyChromiumJavaSources(options.source, out_directory)
CopyXwalkJavaSource(options.source, out_directory)
CopyGeneratedSources(out_directory)
# Copy binaries and resuorces.
CopyBinaries(out_directory)
CopyResources(out_directory)
# Post copy library project.
PostCopyLibraryProject(out_directory)
print 'Your Android library project has been created at %s' % (
out_project_directory)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
|
"""Losonczy Lab data archive utilities.
These utilities can be used to archive and restore files to a remote host.
Initially designed to work with Amazon Web Services S3.
Working with the AWS S3 bucket requires two packages:
>>> pip install --user boto3 awscli
To get started, you need to setup an account with AWS IAM that grants you
access to the lab S3 bucket and load your authentication keys.
See:
https://boto3.readthedocs.io/en/latest/guide/quickstart.html#configuration
To archive a file:
>>> archive('/data/user/mouse1/TSeries1/TSeries1.h5')
To restore a file:
>>> restore('/data/user/mouse1/TSeries1/TSeries1.h5.archive')
"""
import os
import boto3
import time
import json
import hashlib
import base64
import binascii
S3_BUCKET = 'losonczylab.data.archive'
# The number of a days a restored file will remain before being deleted.
# Files restored from Glacier storage are kept in both Glacier and a copy in
# Reduced Redundancy storage, so for this many days we pay double for the data
# storage.
GLACIER_RETENTION_PERIOD = 7
# Load S3 modules
boto3.resource('s3')
TRANSFER_CONFIG = boto3.s3.transfer.TransferConfig(
multipart_threshold=8388608,
max_concurrency=10,
multipart_chunksize=8388608,
num_download_attempts=5,
max_io_queue=100,
io_chunksize=262144)
def archive(local_path, host='aws', **kwargs):
"""Archive a local file to a remote host.
Parameters
----------
local_path : str
Path to the local file to upload.
host : {'aws'}
Remote host/service to upload file to.
kwargs : optional
Additional host-specific arguments.
"""
print("[{}] Beginning upload: {}".format(
convert_time(time.time()), local_path))
if host == 'aws':
remote_path = aws_archive(local_path, **kwargs)
else:
raise TypeError("[{}] Unrecognized host: {}".format(
convert_time(time.time()), host))
if remote_path:
os.remove(local_path)
print("[{}] Upload successful: {}".format(
convert_time(time.time()), local_path))
else:
print("[{}] Upload failed: {}".format(
convert_time(time.time()), local_path))
def restore(placeholder_file, restore_in_place=False, **kwargs):
"""Restore a file from a remote host.
Parameters
----------
placeholder_file : str
Path to the placeholder file (.archive) that contains information on
the remote location of the file to restore.
restore_in_place : bool
If True, ignore the stored local file path in the placeholder file and
restore the file in the same location as the placeholder file.
kwargs : optional
Additional host-specific keyword arguments.
"""
data = parse_placeholder(placeholder_file)
local_restore_path = placeholder_file.rstrip('.archive') if \
restore_in_place else data['local_path']
print("[{}] Beginning restore: {}".format(
convert_time(time.time()), local_restore_path))
if data.get('host') == 'aws':
local_path = aws_restore(
remote_path=data['remote_path'],
local_path=local_restore_path,
bucket=data['aws_bucket'],
checksum_md5=data['checksum_md5'],
orig_stat=data['stat'],
**kwargs)
else:
raise TypeError("[{}] Unrecognized host: {}".format(
convert_time(time.time()), data.get('host')))
if local_path:
if 'Glacier restore' in local_path:
print("[{}] {}".format(convert_time(time.time()), local_path))
else:
os.remove(placeholder_file)
print("[{}] File restored: {}".format(
convert_time(time.time()), local_path))
else:
print("[{}] Restore failed: {}".format(
convert_time(time.time()), placeholder_file))
def aws_archive(local_path, bucket=S3_BUCKET, storage_class='STANDARD',
transfer_config=TRANSFER_CONFIG):
"""Upload a file to S3 and write the placeholder file.
Parameters
----------
local_path : str
Path to file, will be mirrored in S3.
bucket : str
Name of S3 bucket to upload file to.
storage_class : {'STANDARD', 'STANDARD_IA', 'REDUCED_REDUNDANCY'}
Initial storage class of file. Lifecycle rules on the bucket might
change this.
transfer_config : boto3.s3.transfer.TransferConfig, optional
Transfer configuration objects which describes the parameters used
by the transfer manager.
"""
local_path = os.path.abspath(local_path)
remote_path = local_path.lstrip('/')
checksum_md5 = md5(local_path)
extra_args = {'StorageClass': storage_class,
'Metadata': {
'checksum_md5': checksum_md5,
'local_path': local_path,
'timestamp': convert_time(time.time())
}
}
aws_upload(
local_path,
remote_path,
bucket,
ExtraArgs=extra_args,
Config=transfer_config)
if aws_verify(local_path, remote_path, bucket, transfer_config):
write_placeholder(
local_path, remote_path, host='aws', aws_bucket=bucket,
checksum_md5=checksum_md5)
return remote_path
else:
aws_delete(remote_path, bucket)
return None
def aws_restore(remote_path, local_path, bucket=S3_BUCKET, checksum_md5=None,
transfer_config=TRANSFER_CONFIG, orig_stat=None):
"""Restore a file from AWS S3 to the local machine.
Parameters
----------
remote_path : str
Remote path to file.
local_path : str
Local path to file.
bucket : str
Name of S3 bucket.
checksum_md5 : str, optional
transfer_config : boto3.s3.transfer.TransferConfig, optional
Transfer configuration objects which describes the parameters used
by the transfer manager.
orig_stat : dict, optional
Dictionary of file stat properties. If passed in, will attempt to
restore some of the values. Should at least include: 'mode',
'uid', and 'gid'.
"""
file_query = aws_query(remote_path, bucket)
if file_query.get('StorageClass', 'STANDARD') != 'GLACIER' or \
file_query.get('Restore', '').startswith(
'ongoing-request="false"'):
aws_download(
remote_path=remote_path,
local_path=local_path,
bucket=bucket,
Config=transfer_config)
if checksum_md5 is None or checksum_md5 == md5(local_path):
if orig_stat:
restore_file_stat(local_path, orig_stat)
aws_delete(remote_path, bucket)
return local_path
else:
return None
else:
if file_query.get('Restore') is None:
aws_glacier_restore(remote_path, bucket=bucket)
return "Glacier restore initiated: {}".format(local_path)
elif file_query.get('Restore') == 'ongoing-request="true"':
return "Glacier restore in progress: {}".format(local_path)
return None
def aws_glacier_restore(remote_path, bucket=S3_BUCKET,
retention_period=GLACIER_RETENTION_PERIOD):
"""Initiate a restore of a file storage in Glacier storage.
See: https://boto3.readthedocs.io/en/latest/reference/services/s3.html#id26
Parameters
----------
remote_path : str
Remote path to file.
bucket : str
Name of S3 bucket.
retention_period : int
The number of a days a restored file copy should exist before being
deleted. The original copy in Glacier storage is never deleted by this
action.
"""
s3 = boto3.client('s3')
s3.restore_object(
Bucket=bucket, Key=remote_path,
RestoreRequest={'Days': retention_period})
def aws_verify(local_path, remote_path, bucket=S3_BUCKET,
transfer_config=TRANSFER_CONFIG):
"""Compare a locally calculated eTag with the remote eTag.
Parameters
----------
local_path : str
Local path to file.
remote_path : str
Remote path to file.
bucket : str
Name of S3 bucket.
transfer_config : boto3.s3.transfer.TransferConfig, optional
Transfer configuration objects which describes the parameters used
by the transfer manager.
"""
local_etag = etag(
local_path,
upload_max_size=transfer_config.multipart_threshold,
upload_part_size=transfer_config.multipart_chunksize)
file_query = aws_query(remote_path, bucket)
return file_query.get('ETag', '').strip('"') == local_etag
def aws_upload(local_path, remote_path, bucket=S3_BUCKET, **kwargs):
"""Upload a file to AWS S3.
Parameters
----------
local_path : str
Local path to file.
remote_path : str
Remote path to file.
bucket : str
Name of S3 bucket.
kwargs : optional
Additional arguments to pass directly to boto3.s3.upload_file.
"""
s3 = boto3.client('s3')
s3.upload_file(
Filename=local_path, Bucket=bucket, Key=remote_path, **kwargs)
def aws_download(remote_path, local_path, bucket=S3_BUCKET, **kwargs):
"""Download a file from AWS S3.
Parameters
----------
remote_path : str
Remote path to file.
local_path : str
Local path to file.
bucket : str
Name of S3 bucket.
kwargs : optional
Additional arguments to pass directly to boto3.s3.download_file.
"""
s3 = boto3.client('s3')
s3.download_file(
Bucket=bucket, Key=remote_path, Filename=local_path, **kwargs)
def aws_delete(remote_path, bucket=S3_BUCKET):
"""Delete a file from AWS S3.
Parameters
----------
remote_path : str
Remote path to file.
bucket : str
Name of S3 bucket.
"""
s3 = boto3.client('s3')
s3.delete_object(Bucket=bucket, Key=remote_path)
def aws_query(remote_path, bucket=S3_BUCKET):
"""Return the metadata associated with a remote file in AWS S3.
Parameters
----------
remote_path : str
Remote path to file.
bucket : str
Name of S3 bucket.
"""
s3 = boto3.client('s3')
return s3.head_object(Bucket=bucket, Key=remote_path)
def write_placeholder(
local_path, remote_path, host, checksum_md5=None,
**additional_metadata):
"""Write placeholder file that references remote location.
The placeholder file should contain all the information to locate the
remote file and also verify that a file downloaded from the remote
location matches the original file.
Parameters
----------
local_path : str
Local path to file.
remote_path : str
Remote path to file.
host : str
Remote host/service where file was uploaded.
checksum_md5 : str, optional
MD5 checksum of the local file. If None, it will be calculated.
additional_metadata : optional
Additional host-specific information to store in the file.
"""
placeholder_path = local_path + '.archive'
if os.path.exists(placeholder_path):
raise ValueError('File already exists: {}'.format(placeholder_path))
st_mode, st_ino, st_dev, st_nlink, st_uid, st_gid, st_size, st_atime, \
st_mtime, st_ctime = os.stat(local_path)
if checksum_md5 is None:
checksum_md5 = md5(local_path)
data = {
'stat': {
'mode': st_mode,
'ino': st_ino,
'dev': st_dev,
'nlink': st_nlink,
'uid': st_uid,
'gid': st_gid,
'size': st_size,
'atime': convert_time(st_atime),
'mtime': convert_time(st_mtime),
'ctime': convert_time(st_ctime)},
'timestamp': convert_time(time.time()),
'host': host,
'local_path': local_path,
'remote_path': remote_path,
'checksum_md5': checksum_md5,
}
data.update(additional_metadata)
json.dump(data, open(placeholder_path, 'w'), sort_keys=True, indent=4,
separators=(',', ': '))
def parse_placeholder(placeholder_path):
"""Returned the parsed contents of a placeholder file."""
return json.load(open(placeholder_path, 'r'))
def restore_file_stat(local_path, stat):
"""Attempt to restore the file properties to original values.
Parameters
----------
local_path : str
Local path to file.
stat : dict
Dictionary of file stat properties. Should at least include: 'mode',
'uid', and 'gid'.
"""
try:
os.chmod(local_path, stat['mode'])
except OSError:
pass
try:
os.chown(local_path, stat['uid'], stat['gid'])
except OSError:
pass
def md5(file_path):
"""Iteratively calculate the MD5 hash of a file.
Should be equivalent to the shell command:
>>> openssl md5 -binary file_path | base64
Parameters
----------
file_path : str
Path to file.
"""
hash_md5 = hashlib.md5()
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return base64.b64encode(hash_md5.digest())
def etag(file_path, upload_max_size=TRANSFER_CONFIG.multipart_threshold,
upload_part_size=TRANSFER_CONFIG.multipart_chunksize):
"""Calculate the same eTag that AWS will calculate after upload.
The algorithm is different for multi-part uploads, so it depends on the
size of the file.
This is not officially supported by Amazon, so it could change in the
future.
Modified from:
http://stackoverflow.com/questions/6591047/etag-definition-changed-in-amazon-s3
Parameters
----------
file_path : str
Path to file.
upload_max_size : int
Max size of a file (in bytes) that will be uploaded as a single chunk.
upload_part_size : int
Size (in byes) of each chunk of a multi-part upload.
"""
filesize = os.path.getsize(file_path)
file_hash = hashlib.md5()
if filesize > upload_max_size:
block_count = 0
md5string = ""
with open(file_path, "rb") as f:
for block in iter(lambda: f.read(upload_part_size), ""):
file_hash = hashlib.md5()
file_hash.update(block)
md5string = md5string + binascii.unhexlify(
file_hash.hexdigest())
block_count += 1
file_hash = hashlib.md5()
file_hash.update(md5string)
return file_hash.hexdigest() + "-" + str(block_count)
else:
with open(file_path, "rb") as f:
for block in iter(lambda: f.read(upload_part_size), ""):
file_hash.update(block)
return file_hash.hexdigest()
def convert_time(time_in_seconds):
return time.strftime(
'%Y-%m-%d-%Hh%Mm%Ss', time.localtime(time_in_seconds))
|
|
# Copyright 2014 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from oslo.config import cfg
import six
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from neutron.api.v2 import base
from neutron.api.v2 import resource_helper
from neutron.common import exceptions as nexception
from neutron import manager
from neutron.plugins.common import constants
from neutron.services import service_base
# TODO(dougw) - stop hard-coding these constants when this extension moves
# to the neutron-lbaas repo
#from neutron.services.loadbalancer import constants as lb_const
LB_METHOD_ROUND_ROBIN = 'ROUND_ROBIN'
LB_METHOD_LEAST_CONNECTIONS = 'LEAST_CONNECTIONS'
LB_METHOD_SOURCE_IP = 'SOURCE_IP'
SUPPORTED_LB_ALGORITHMS = (LB_METHOD_LEAST_CONNECTIONS, LB_METHOD_ROUND_ROBIN,
LB_METHOD_SOURCE_IP)
PROTOCOL_TCP = 'TCP'
PROTOCOL_HTTP = 'HTTP'
PROTOCOL_HTTPS = 'HTTPS'
SUPPORTED_PROTOCOLS = (PROTOCOL_TCP, PROTOCOL_HTTPS, PROTOCOL_HTTP)
HEALTH_MONITOR_PING = 'PING'
HEALTH_MONITOR_TCP = 'TCP'
HEALTH_MONITOR_HTTP = 'HTTP'
HEALTH_MONITOR_HTTPS = 'HTTPS'
SUPPORTED_HEALTH_MONITOR_TYPES = (HEALTH_MONITOR_HTTP, HEALTH_MONITOR_HTTPS,
HEALTH_MONITOR_PING, HEALTH_MONITOR_TCP)
SESSION_PERSISTENCE_SOURCE_IP = 'SOURCE_IP'
SESSION_PERSISTENCE_HTTP_COOKIE = 'HTTP_COOKIE'
SESSION_PERSISTENCE_APP_COOKIE = 'APP_COOKIE'
SUPPORTED_SP_TYPES = (SESSION_PERSISTENCE_SOURCE_IP,
SESSION_PERSISTENCE_HTTP_COOKIE,
SESSION_PERSISTENCE_APP_COOKIE)
# Loadbalancer Exceptions
# This exception is only for a workaround when having v1 and v2 lbaas extension
# and plugins enabled
class RequiredAttributeNotSpecified(nexception.BadRequest):
message = _("Required attribute %(attr_name)s not specified")
class EntityNotFound(nexception.NotFound):
message = _("%(name)s %(id)s could not be found")
class DelayOrTimeoutInvalid(nexception.BadRequest):
message = _("Delay must be greater than or equal to timeout")
class EntityInUse(nexception.InUse):
message = _("%(entity_using)s %(id)s is using this %(entity_in_use)s")
class LoadBalancerListenerProtocolPortExists(nexception.Conflict):
message = _("Load Balancer %(lb_id)s already has a listener with "
"protocol_port of %(protocol_port)s")
class ListenerPoolProtocolMismatch(nexception.Conflict):
message = _("Listener protocol %(listener_proto)s and pool protocol "
"%(pool_proto)s are not compatible.")
class AttributeIDImmutable(nexception.NeutronException):
message = _("Cannot change %(attribute)s if one already exists")
class StateInvalid(nexception.NeutronException):
message = _("Invalid state %(state)s of loadbalancer resource %(id)s")
class MemberNotFoundForPool(nexception.NotFound):
message = _("Member %(member_id)s could not be found in pool %(pool_id)s")
class MemberExists(nexception.Conflict):
message = _("Member with address %(address)s and protocol_port %(port)s "
"already present in pool %(pool)s")
class MemberAddressTypeSubnetTypeMismatch(nexception.NeutronException):
message = _("Member with address %(address)s and subnet %(subnet_id) "
" have mismatched IP versions")
class DriverError(nexception.NeutronException):
message = _("An error happened in the driver")
class LBConfigurationUnsupported(nexception.NeutronException):
message = _("Load balancer %(load_balancer_id)s configuration is not"
"supported by driver %(driver_name)s")
RESOURCE_ATTRIBUTE_MAP = {
'loadbalancers': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': '',
'is_visible': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'vip_subnet_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'vip_address': {'allow_post': True, 'allow_put': False,
'default': attr.ATTR_NOT_SPECIFIED,
'validate': {'type:ip_address_or_none': None},
'is_visible': True},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True}
},
'listeners': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': '',
'is_visible': True},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'loadbalancer_id': {'allow_post': True, 'allow_put': True,
'validate': {'type:uuid_or_none': None},
'default': attr.ATTR_NOT_SPECIFIED,
'is_visible': True},
'default_pool_id': {'allow_post': True, 'allow_put': True,
'validate': {'type:uuid_or_none': None},
'default': attr.ATTR_NOT_SPECIFIED,
'is_visible': True},
'connection_limit': {'allow_post': True, 'allow_put': True,
'default': -1,
'convert_to': attr.convert_to_int,
'is_visible': True},
'protocol': {'allow_post': True, 'allow_put': False,
'validate': {'type:values': SUPPORTED_PROTOCOLS},
'is_visible': True},
'protocol_port': {'allow_post': True, 'allow_put': False,
'validate': {'type:range': [0, 65535]},
'convert_to': attr.convert_to_int,
'is_visible': True},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True}
},
'pools': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'healthmonitor_id': {'allow_post': True, 'allow_put': True,
'validate': {'type:string_or_none': None},
'is_visible': True,
'default': attr.ATTR_NOT_SPECIFIED},
'protocol': {'allow_post': True, 'allow_put': False,
'validate': {'type:values': SUPPORTED_PROTOCOLS},
'is_visible': True},
'lb_algorithm': {'allow_post': True, 'allow_put': True,
'validate': {
'type:values': SUPPORTED_LB_ALGORITHMS},
# TODO(brandon-logan) remove when old API is removed
# because this is a required attribute)
'default': attr.ATTR_NOT_SPECIFIED,
'is_visible': True},
'session_persistence': {
'allow_post': True, 'allow_put': True,
'convert_to': attr.convert_none_to_empty_dict,
'default': {},
'validate': {
'type:dict_or_empty': {
'type': {
'type:values': SUPPORTED_SP_TYPES,
'required': True},
'cookie_name': {'type:string': None,
'required': False}}},
'is_visible': True},
'members': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True}
},
'healthmonitors': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'type': {'allow_post': True, 'allow_put': False,
'validate': {
'type:values': SUPPORTED_HEALTH_MONITOR_TYPES},
'is_visible': True},
'delay': {'allow_post': True, 'allow_put': True,
'validate': {'type:non_negative': None},
'convert_to': attr.convert_to_int,
'is_visible': True},
'timeout': {'allow_post': True, 'allow_put': True,
'validate': {'type:non_negative': None},
'convert_to': attr.convert_to_int,
'is_visible': True},
'max_retries': {'allow_post': True, 'allow_put': True,
'validate': {'type:range': [1, 10]},
'convert_to': attr.convert_to_int,
'is_visible': True},
'http_method': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': 'GET',
'is_visible': True},
'url_path': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': '/',
'is_visible': True},
'expected_codes': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:regex': r'^(\d{3}(\s*,\s*\d{3})*)$|^(\d{3}-\d{3})$'
},
'default': '200',
'is_visible': True
},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True}
}
}
SUB_RESOURCE_ATTRIBUTE_MAP = {
'members': {
'parent': {'collection_name': 'pools',
'member_name': 'pool'},
'parameters': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'address': {'allow_post': True, 'allow_put': False,
'validate': {'type:ip_address': None},
'is_visible': True},
'protocol_port': {'allow_post': True, 'allow_put': False,
'validate': {'type:range': [0, 65535]},
'convert_to': attr.convert_to_int,
'is_visible': True},
'weight': {'allow_post': True, 'allow_put': True,
'default': 1,
'validate': {'type:range': [0, 256]},
'convert_to': attr.convert_to_int,
'is_visible': True},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'subnet_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
}
}
}
lbaasv2_quota_opts = [
cfg.IntOpt('quota_loadbalancer',
default=10,
help=_('Number of LoadBalancers allowed per tenant. '
'A negative value means unlimited.')),
cfg.IntOpt('quota_listener',
default=-1,
help=_('Number of Loadbalancer Listeners allowed per tenant. '
'A negative value means unlimited.')),
cfg.IntOpt('quota_pool',
default=10,
help=_('Number of pools allowed per tenant. '
'A negative value means unlimited.')),
cfg.IntOpt('quota_member',
default=-1,
help=_('Number of pool members allowed per tenant. '
'A negative value means unlimited.')),
cfg.IntOpt('quota_healthmonitor',
default=-1,
help=_('Number of health monitors allowed per tenant. '
'A negative value means unlimited.'))
]
cfg.CONF.register_opts(lbaasv2_quota_opts, 'QUOTAS')
class Loadbalancerv2(extensions.ExtensionDescriptor):
@classmethod
def get_name(cls):
return "LoadBalancing service v2"
@classmethod
def get_alias(cls):
return "lbaasv2"
@classmethod
def get_description(cls):
return "Extension for LoadBalancing service v2"
@classmethod
def get_namespace(cls):
return "http://wiki.openstack.org/neutron/LBaaS/API_2.0"
@classmethod
def get_updated(cls):
return "2014-06-18T10:00:00-00:00"
@classmethod
def get_resources(cls):
plural_mappings = resource_helper.build_plural_mappings(
{}, RESOURCE_ATTRIBUTE_MAP)
action_map = {'loadbalancer': {'stats': 'GET'}}
plural_mappings['members'] = 'member'
attr.PLURALS.update(plural_mappings)
resources = resource_helper.build_resource_info(
plural_mappings,
RESOURCE_ATTRIBUTE_MAP,
constants.LOADBALANCERV2,
action_map=action_map,
register_quota=True)
plugin = manager.NeutronManager.get_service_plugins()[
constants.LOADBALANCERV2]
for collection_name in SUB_RESOURCE_ATTRIBUTE_MAP:
# Special handling needed for sub-resources with 'y' ending
# (e.g. proxies -> proxy)
resource_name = collection_name[:-1]
parent = SUB_RESOURCE_ATTRIBUTE_MAP[collection_name].get('parent')
params = SUB_RESOURCE_ATTRIBUTE_MAP[collection_name].get(
'parameters')
controller = base.create_resource(collection_name, resource_name,
plugin, params,
allow_bulk=True,
parent=parent,
allow_pagination=True,
allow_sorting=True)
resource = extensions.ResourceExtension(
collection_name,
controller, parent,
path_prefix=constants.COMMON_PREFIXES[
constants.LOADBALANCERV2],
attr_map=params)
resources.append(resource)
return resources
@classmethod
def get_plugin_interface(cls):
return LoadBalancerPluginBaseV2
def update_attributes_map(self, attributes, extension_attrs_map=None):
super(Loadbalancerv2, self).update_attributes_map(
attributes, extension_attrs_map=RESOURCE_ATTRIBUTE_MAP)
def get_extended_resources(self, version):
if version == "2.0":
return RESOURCE_ATTRIBUTE_MAP
else:
return {}
@six.add_metaclass(abc.ABCMeta)
class LoadBalancerPluginBaseV2(service_base.ServicePluginBase):
def get_plugin_name(self):
return constants.LOADBALANCERV2
def get_plugin_type(self):
return constants.LOADBALANCERV2
def get_plugin_description(self):
return 'LoadBalancer service plugin v2'
@abc.abstractmethod
def get_loadbalancers(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_loadbalancer(self, context, id, fields=None):
pass
@abc.abstractmethod
def create_loadbalancer(self, context, loadbalancer):
pass
@abc.abstractmethod
def update_loadbalancer(self, context, id, loadbalancer):
pass
@abc.abstractmethod
def delete_loadbalancer(self, context, id):
pass
@abc.abstractmethod
def create_listener(self, context, listener):
pass
@abc.abstractmethod
def get_listener(self, context, id, fields=None):
pass
@abc.abstractmethod
def get_listeners(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def update_listener(self, context, id, listener):
pass
@abc.abstractmethod
def delete_listener(self, context, id):
pass
@abc.abstractmethod
def get_pools(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_pool(self, context, id, fields=None):
pass
@abc.abstractmethod
def create_pool(self, context, pool):
pass
@abc.abstractmethod
def update_pool(self, context, id, pool):
pass
@abc.abstractmethod
def delete_pool(self, context, id):
pass
@abc.abstractmethod
def stats(self, context, loadbalancer_id):
pass
@abc.abstractmethod
def get_pool_members(self, context, pool_id,
filters=None,
fields=None):
pass
@abc.abstractmethod
def get_pool_member(self, context, id, pool_id,
fields=None):
pass
@abc.abstractmethod
def create_pool_member(self, context, member,
pool_id):
pass
@abc.abstractmethod
def update_pool_member(self, context, member, id,
pool_id):
pass
@abc.abstractmethod
def delete_pool_member(self, context, id, pool_id):
pass
@abc.abstractmethod
def get_healthmonitors(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_healthmonitor(self, context, id, fields=None):
pass
@abc.abstractmethod
def create_healthmonitor(self, context, healthmonitor):
pass
@abc.abstractmethod
def update_healthmonitor(self, context, id, healthmonitor):
pass
@abc.abstractmethod
def delete_healthmonitor(self, context, id):
pass
@abc.abstractmethod
def get_members(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_member(self, context, id, fields=None):
pass
|
|
import os
from multiprocessing import Process, cpu_count, Queue
from multiprocessing.queues import Empty
from time import sleep
from warnings import warn
from .CubeTextureClass import CubeTexture
from ..events_processing.eventClasses import Event, EventTypes
from .TextureManagerServer import serve, TexturesManagerServer
# from ThreadedSystemClass import ThreadedSystem
# from ParallelServiceClass import messageType
from ..Logging import logLevelsEnum
from .._baseManager import BaseManager
class textureLoadedEvent(Event):
def __init__(self, textureID):
super(textureLoadedEvent, self).__init__(EventTypes.Custom)
self.name = TexturesManager.textureLoaded
self.textureID = textureID
class TexturesManager(BaseManager):
textureLoaded = 'textureLoaded'
def __init__(self):
# ThreadedSystem.__init__(self)
super(TexturesManager, self).__init__()
self._textureCache = {}
self._cubeTexCache = {}
self._context = None
self._window = None
self._defaultTexture = None
self._defaultNormalMap = None
self._engine = None
def initialize(self, engine):
self._engine = engine
self.remotequeue = Queue()
self.localqueue = Queue()
# self.parallelProcess = Process(target=serve, args=[self.remotequeue, self.localqueue])
# self.parallelProcess.start()
dt = os.path.join(self._engine.path.defaults.textures, "default.png")
dtnm = os.path.join(self._engine.path.defaults.textures, "default_nm.png")
try:
self.loadTexture(dt, "default", serial=True, raiseOnError=True)
self._defaultTexture = self._textureCache.get('default')
self.loadTexture(dtnm, "_defaultNM", serial=True, raiseOnError=True)
self._defaultNormalMap = self._textureCache.get('_defaultNM')
except Exception:
raise
def run(self):
# if SDL_GL_MakeCurrent(self._window, self._context):
# raise RuntimeError(SDL_GetError())
while self._running:
sleep(1.5)
self.checkQueue()
def getDefaultNormalMap(self):
return self._defaultNormalMap
def checkQueue(self):
# return
try:
if not self.localqueue._closed:
# remoteID, ttype, taskID, args = self.localqueue.get(False, 1)
remoteID, ttype, taskID, args = self.localqueue.get_nowait()
if ttype == messageType.ready:
self._fillTexture(args)
elif ttype == messageType.exception:
raise RuntimeError(*args)
except Empty:
pass
def createEmpty2DTexture(self, ID, width, height):
self._engine.log('Using untested createEmpty2DTexture', logLevelsEnum.warning)
if self.exists(ID):
raise RuntimeError('the ID is already in use.')
tex = self._engine.backend.create2DTexture(ID, -1, None, width, height)
self._textureCache[ID] = tex
def update2DTexture(self, ID, data, fromTuple, toTuple):
self._engine.backend.update2DTexture(ID, data, fromTuple, toTuple)
def _fillTexture(self, args): # todo: move to base backend
pix, w, h, ID, mipmapsNumber, repeat = args
try:
tex = self._engine.backend.create2DTexture(ID, mipmapsNumber, pix, w, h, repeat)
self._textureCache[ID] = tex
except Exception as ex:
self._engine.log('Error loading texture \'{0}\':\n\t{1}\n'
'Using default texture.'.format(ID, str(ex)))
if self._defaultTexture is None:
raise
self._textureCache[ID] = self._defaultTexture
self._engine.postEvent(textureLoadedEvent(ID))
def loadTexture(self, filePath, ID, mipmapsNumber=10, serial=True, raiseOnError=False, repeat=True, force=False):
"""
Load texture 'filename' as 'ID'. If 'serial', loading will be done secuentially, so
this function won't return until the load is finished.
If 'raiseOnError' is True, any error will raise an Exception. Otherwise, the default
texture will be returned.
@param raiseOnError:
@type raiseOnError:bool
@type ID: str
@type filePath: str
@rtype : None
@param filePath:
@param ID:
"""
warn('forcing serial texture load')
serial = True
filePath = os.path.abspath(filePath)
tex = self._textureCache.get(ID)
getdefault = False
if tex is None or force:
if not os.path.exists(filePath):
defaultTexturesDir = os.path.join(self._engine.path, 'defaults', 'textures')
filename = os.path.basename(filePath)
defaultedpath = os.path.join(defaultTexturesDir, filename)
if not os.path.exists(defaultedpath):
getdefault = True
if raiseOnError:
raise RuntimeError('File not Found', filePath)
else:
self._engine.log('Error loading texture \'{0}\'\n'
'\tUsing default texture.'.format(filePath), logLevelsEnum.error)
else:
self._engine.log('Error loading texture \'{0}\'\n'
'\tUsing texture found at: {1}'.format(filePath, defaultedpath), logLevelsEnum.error)
filePath = defaultedpath
if not getdefault:
if serial:
pix, w, h = TexturesManagerServer.getPILpixels(filePath)
self._fillTexture([pix, w, h, ID, mipmapsNumber, repeat])
else:
self.remotequeue.put_nowait(('loadTexture', [filePath, ID, mipmapsNumber, repeat]))
else:
if self._defaultTexture is None:
raise AttributeError("Fatal error: Default texture not defined.")
else:
tex = self._defaultTexture
self._textureCache[ID] = tex
def loadCubeTexture(self, folderPath, ID):
"""
@type ID: str
@type folderPath: str
@rtype : None
@param ID:
"""
# TODO: turn into parallel
cube = self._cubeTexCache.get(ID)
if not cube:
if not os.path.exists(folderPath):
# folderPath = self._engine.io.findPath(folderPath)
# if not folderPath:
self._engine.log('Error loading cube texture {0}:\n{1}'.format(folderPath, 'Folder not found.'), logLevelsEnum.error)
cube = CubeTexture(self._engine, ID)
cube.loadFromFolder(folderPath, TexturesManagerServer.getPILpixels)
return cube
def exists(self, texID):
return texID in self._textureCache
def existsCube(self, cubeID):
return cubeID in self._cubeTexCache
def getTexture(self, ID):
return self._textureCache.get(ID, self._defaultTexture)
def getCubeTexture(self, ID):
return self._cubeTexCache.get(ID)
def _addTextureIDs(self, IDs):
for a, b in IDs:
if a not in self._textureCache.keys():
self._textureCache[a] = b
else:
raise AttributeError('Texture ID \'{}\' already exist.'.format(a))
def getDefaultTexture(self):
return self._defaultTexture
def terminate(self):
try:
pass
# self.remotequeue.put_nowait(('close', []))
# self.remotequeue.close()
except BrokenPipeError:
pass
# self.parallelProcess.terminate()
# TODO: Move following code to backend
# glBindTexture(GL_TEXTURE_2D, 0)
# for t in self._textureCache.values():
#
# glDeleteTextures(1, np.array([t], np.int32))
#
# for t in self._cubeTexCache.values():
# glDeleteTextures(1, np.array([t], np.int32))
#
# # SDL_GL_DeleteContext(self._context)
|
|
# Copyright 2015 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traitsui.api import UItem, Item, HGroup, VGroup, Group, EnumEditor, spring, View
from pychron.core.pychron_traits import BorderVGroup, BorderHGroup
from pychron.envisage.icon_button_editor import icon_button_editor
from pychron.options.options import (
SubOptions,
AppearanceSubOptions,
GroupSubOptions,
MainOptions,
TitleSubOptions,
)
from pychron.processing.j_error_mixin import J_ERROR_GROUP
from pychron.pychron_constants import (
MAIN,
APPEARANCE,
SCHAEN2020_3,
SCHAEN2020_3youngest,
)
class DisplaySubOptions(TitleSubOptions):
def traits_view(self):
errbar_grp = VGroup(
HGroup(
Item("x_end_caps", label="X End Caps"),
Item("y_end_caps", label="Y End Caps"),
),
HGroup(
Item("error_bar_line_width", label="Line Width"),
Item("error_bar_nsigma", label="NSigma"),
),
show_border=True,
label="Error Bars",
)
an_grp = VGroup(
Item("analysis_number_sorting", label="Analysis# Order"),
Item(
"global_analysis_number_sorting",
label="Global Sort",
tooltip="Applicable only when " "using Aux Grouping",
),
HGroup(
Item("include_group_legend", label="Include Group Legend"),
UItem(
"group_legend_label_attribute", enabled_when="include_group_legend"
),
),
HGroup(
Item("use_cmap_analysis_number", label="Use Color Mapping"),
UItem("cmap_analysis_number", enabled_when="use_cmap_analysis_number"),
),
Item("use_latest_overlay"),
show_border=True,
label="Analysis #",
)
label_grp = VGroup(
HGroup(
Item("label_box"),
Item(
"analysis_label_display",
label="Label Format",
width=100,
style="readonly",
),
spring,
icon_button_editor(
"edit_label_format_button", "cog", tooltip="Open Label maker"
),
),
VGroup(
Item(
"label_all_peaks",
label="Label Peaks",
tooltip="Label each peak with its calculated age",
),
HGroup(
Item("peak_label_bgcolor_enabled", label="Background"),
UItem(
"peak_label_bgcolor", enabled_when="peak_label_bgcolor_enabled"
),
),
HGroup(
Item(
"peak_label_border",
label="Border Width",
tooltip="Border width in pixels, user 0 to disable",
),
Item("peak_label_border_color", label="Border"),
enabled_when="peak_label_border",
),
Item("peak_label_sigfigs", label="SigFigs"),
show_border=True,
label="Peaks",
),
show_border=True,
label="Label",
)
inset_grp = VGroup(
HGroup(
Item("display_inset", label="Use"),
Item("inset_location", label="Location"),
Item("inset_width", label="Width"),
Item("inset_height", label="Height"),
),
show_border=True,
label="Inset",
)
mean_label = HGroup(
Item(
"mean_label_display",
label="Mean Label Format",
width=100,
style="readonly",
),
spring,
icon_button_editor(
"edit_mean_format_button", "cog", tooltip="Open Mean Label maker"
),
)
submean = HGroup(
VGroup(Item("display_group_marker", label="Group Marker")),
VGroup(
Item(
"display_mean",
label="Value",
),
Item(
"display_percent_error",
label="%Error",
),
),
VGroup(
Item(
"display_mean_mswd",
label="MSWD",
),
Item("display_mean_n", label="N"),
Item("display_mswd_pvalue", label="P-Value"),
),
VGroup(
Item("mean_sig_figs", label="Mean SigFigs"),
Item("mswd_sig_figs", label="MSWD SigFigs"),
),
enabled_when="display_mean_indicator",
)
mean_grp = VGroup(
Item("display_mean_indicator", label="Indicator"),
submean,
mean_label,
show_border=True,
label="Mean",
)
info_grp = HGroup(
Item("show_info", label="Show"),
Item("show_mean_info", label="Mean", enabled_when="show_info"),
Item("show_error_type_info", label="Error Type", enabled_when="show_info"),
show_border=True,
label="Info",
)
display_grp = VGroup(
mean_grp,
an_grp,
inset_grp,
self._get_title_group(),
label_grp,
info_grp,
errbar_grp,
scrollable=True,
show_border=True,
label="Display",
)
return self._make_view(display_grp)
class CalculationSubOptions(SubOptions):
def traits_view(self):
calcgrp = BorderVGroup(
Item(
"probability_curve_kind", width=-150, label="Probability Curve Method"
),
Item("mean_calculation_kind", width=-150, label="Mean Calculation Method"),
BorderVGroup(
Item("shapiro_wilk_alpha", label="Shapiro-Wilk alpha"),
HGroup(
Item("skew_min", label="Skew Min."),
Item("skew_max", label="Skew Max"),
),
visible_when='mean_calculation_kind =="{}" '
'or mean_calculation_kind=="{}"'.format(
SCHAEN2020_3, SCHAEN2020_3youngest
),
label="Normality Options",
),
Item("error_calc_method", width=-150, label="Error Calculation Method"),
Item("nsigma", label="Age Error NSigma"),
BorderVGroup(
J_ERROR_GROUP,
BorderHGroup(
Item("include_irradiation_error"), Item("include_decay_error")
),
label="Uncertainty",
),
label="Calculations",
)
return self._make_view(calcgrp)
class IdeogramSubOptions(SubOptions):
def traits_view(self):
xgrp = VGroup(
Item("index_attr", editor=EnumEditor(name="index_attrs"), label="X Value"),
HGroup(
Item("age_normalize", label="Normalize Age"),
UItem("age_normalize_value"),
),
Item(
"reverse_x_axis",
label="Reverse",
tooltip="Display decreasing left to right",
),
HGroup(
UItem("use_static_limits"),
Item("xlow", label="Min.", enabled_when="object.use_static_limits"),
Item("xhigh", label="Max.", enabled_when="object.use_static_limits"),
show_border=True,
label="Static Limits",
),
HGroup(
UItem("use_asymptotic_limits"),
# Item('asymptotic_width', label='% Width',
# tooltip='Width of asymptotic section that is less than the Asymptotic %'),
Item(
"asymptotic_height_percent",
tooltip="Percent of Max probability",
label="% Height",
),
# icon_button_editor('refresh_asymptotic_button', 'refresh',
# enabled_when='object.use_asymptotic_limits',
# tooltip='Refresh plot with defined asymptotic limits'),
enabled_when="not object.use_centered_range and not object.use_static_limits",
show_border=True,
label="Asymptotic Limits",
),
HGroup(
UItem("use_centered_range"),
UItem("centered_range", enabled_when="object.use_centered_range"),
label="Center on fixed range",
show_border=True,
enabled_when="not object.use_static_limits",
),
HGroup(
UItem("use_xpad"),
Item("xpad", label="Pad", enabled_when="use_xpad"),
Item(
"xpad_as_percent",
tooltip="Treat Pad as a percent of the nominal width, otherwise Pad is in Ma. "
"e.g if width=10 Ma, Pad=0.5 "
"the final width will be 10 + (10*0.5)*2 = 20 Ma.",
enabled_when="use_xpad",
label="%",
),
label="X Pad",
show_border=True,
),
show_border=True,
label="X",
)
tgrp = BorderVGroup(
Item(
"omit_by_tag",
label="Omit Tags",
tooltip='If selected only analyses tagged as "OK" are included in the calculations',
),
label="Tags",
)
rtgrp = BorderVGroup(
Item(
"show_results_table",
label="Show Summary",
tooltip="Display a summary table below the ideogram",
),
Item("show_ttest_table", label="Show T-test"),
Item("show_rvalues", label="Show R Values"),
label="Aux. Tables",
)
cgrp = BorderVGroup(Item("show_correlation_ellipses"), label="Correlation")
return self._make_view(VGroup(xgrp, tgrp, rtgrp, cgrp))
class IdeogramAppearance(AppearanceSubOptions):
def traits_view(self):
mi = BorderVGroup(
HGroup(UItem("mean_indicator_fontname"), UItem("mean_indicator_fontsize")),
Item("display_mean_location", label="Location"),
label="Mean Indicator",
)
ee = BorderHGroup(
UItem("error_info_fontname"),
UItem("error_info_fontsize"),
label="Error Info",
)
ll = BorderHGroup(
UItem("label_fontname"), UItem("label_fontsize"), label="Labels"
)
fgrp = BorderVGroup(
BorderHGroup(UItem("fontname"), label="Change All"),
HGroup(mi, ee),
ll,
HGroup(self._get_xfont_group(), self._get_yfont_group()),
label="Fonts",
)
subgroup = BorderVGroup(Item("show_subgroup_indicators"), label="Subgroup")
g = VGroup(
subgroup,
self._get_nominal_group(),
self._get_layout_group(),
self._get_padding_group(),
fgrp,
)
return self._make_view(g)
class IdeogramMainOptions(MainOptions):
def _get_edit_view(self):
tooltip = """'Omit analyses based on the provided criteria. For example x>10 will omit any analysis
greater than 10. The value of x depends on the Auxiliary plot e.g. x is age for Analysis Number or K/Ca for KCa.
x is simply a placeholder and can be replaced by any letter or word except for a few exceptions
(i.e and, or, is, on, if, not...). To filter based on error or %error use "error" and "percent_error". Multiple predicates may be combined
with "and", "or". Valid comparators are "<,<=,>,>=,==,!=". "==" means "equals" and "!=" means "not equal".
Additional examples
1. x<10
2. age<10 or age>100
3. age<10 or error>1
4. x<=10 or percent_error>50
5. xyz<=10 and error>=0.1"""
sigma_tooltip = """Omit analyses greater the N sigma from the arithmetic mean"""
fgrp = BorderVGroup(
HGroup(
Item("filter_str", tooltip=tooltip, label="Filter"),
UItem("filter_str_tag"),
),
HGroup(
Item("sigma_filter_n", label="Sigma Filter N", tooltip=sigma_tooltip),
UItem("sigma_filter_tag"),
),
label="Filtering",
)
v = View(
BorderVGroup(
self._get_name_grp(),
self._get_yticks_grp(),
self._get_ylimits_group(),
self._get_marker_group(),
fgrp,
)
)
return v
# ===============================================================
# ===============================================================
VIEWS = {
MAIN.lower(): IdeogramMainOptions,
"ideogram": IdeogramSubOptions,
APPEARANCE.lower(): IdeogramAppearance,
"calculations": CalculationSubOptions,
"display": DisplaySubOptions,
"groups": GroupSubOptions,
}
# ===============================================================
# ===============================================================
# ============= EOF =============================================
|
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Participant.order'
db.add_column(u'public_project_participant', 'order',
self.gf('django.db.models.fields.IntegerField')(default=500, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Participant.order'
db.delete_column(u'public_project_participant', 'order')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'public_project.activitylog': {
'Meta': {'ordering': "['-date']", 'object_name': 'ActivityLog'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
u'public_project.comment': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'Comment'},
'activation_hash': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '250'}),
'feedback_allowed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'published_by': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.commentrelation': {
'Meta': {'object_name': 'CommentRelation'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.Comment']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'page': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'public_project.document': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'Document'},
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'document': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'events': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_documents'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_documents'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Participant']"}),
'pdf_images_generated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'project_parts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_documents'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.ProjectPart']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.event': {
'Meta': {'ordering': "['-date']", 'object_name': 'Event'},
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'important': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_events'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Participant']"}),
'project_parts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_events'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.ProjectPart']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.image': {
'Meta': {'ordering': "['title']", 'object_name': 'Image'},
'attribution_html': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.membership': {
'Meta': {'object_name': 'Membership'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'from_participant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'from_memberships'", 'to': u"orm['public_project.Participant']"}),
'function': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'to_participant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'to_memberships'", 'to': u"orm['public_project.Participant']"})
},
u'public_project.page': {
'Meta': {'ordering': "['number']", 'object_name': 'Page'},
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.IntegerField', [], {})
},
u'public_project.participant': {
'Meta': {'ordering': "['order', 'name']", 'object_name': 'Participant'},
'belongs_to': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['public_project.Participant']", 'through': u"orm['public_project.Membership']", 'symmetrical': 'False'}),
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '100', 'null': 'True', 'blank': 'True'})
},
u'public_project.projectgoal': {
'Meta': {'ordering': "['order']", 'object_name': 'ProjectGoal'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'performance_figure': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'project_goal_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.ProjectGoalGroup']"})
},
u'public_project.projectgoalgroup': {
'Meta': {'object_name': 'ProjectGoalGroup'},
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'project_part': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.ProjectPart']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.projectpart': {
'Meta': {'ordering': "['order', 'name']", 'object_name': 'ProjectPart'},
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_project_parts': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['public_project.ProjectPart']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'public_project.question': {
'Meta': {'ordering': "['title']", 'object_name': 'Question'},
'answer': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'answered': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'documents': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_documents'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Document']"}),
'events': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_questions'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Event']"}),
'explanations': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_questions'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Participant']"}),
'project_parts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_questions'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.ProjectPart']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.researchrequest': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'ResearchRequest'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nr': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'open': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.researchrequestrelation': {
'Meta': {'object_name': 'ResearchRequestRelation'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'page': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'research_request': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.ResearchRequest']"})
},
u'public_project.searchtag': {
'Meta': {'ordering': "['order']", 'object_name': 'SearchTag'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'public_project.searchtagcacheentry': {
'Meta': {'ordering': "['-num_results']", 'object_name': 'SearchTagCacheEntry'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_results': ('django.db.models.fields.IntegerField', [], {}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.SearchTag']"})
},
u'public_project.sitecategory': {
'Meta': {'object_name': 'SiteCategory'},
'category': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'documents': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_site_categories'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'public_project.siteconfig': {
'Meta': {'object_name': 'SiteConfig'},
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'contact_html': ('django.db.models.fields.TextField', [], {'default': "u'This text will be shown on the contact page.'"}),
'desc_about': ('django.db.models.fields.TextField', [], {'default': "u'About text'"}),
'footer_html': ('django.db.models.fields.TextField', [], {'default': "u'This text will be shown in the footer of the site.'"}),
'header_image': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.Image']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro_text': ('django.db.models.fields.TextField', [], {'default': "u'This is a project watch website.'"}),
'navi_link_color': ('django.db.models.fields.CharField', [], {'default': "'#FFFFFF'", 'max_length': '7'}),
'short_title': ('django.db.models.fields.CharField', [], {'default': "u'ProjectWatch'", 'max_length': '250'}),
'sub_title': ('django.db.models.fields.CharField', [], {'default': "u'Project Website Subtitle'", 'max_length': '250'}),
'sub_title_color': ('django.db.models.fields.CharField', [], {'default': "'#444444'", 'max_length': '7'}),
'title': ('django.db.models.fields.CharField', [], {'default': "u'ProjectWatch'", 'max_length': '250'}),
'title_color': ('django.db.models.fields.CharField', [], {'default': "'#990000'", 'max_length': '7'})
},
u'public_project.userprofile': {
'Meta': {'object_name': 'UserProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'receive_new_comment_emails': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'public_project.websource': {
'Meta': {'ordering': "['order']", 'object_name': 'WebSource'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
}
}
complete_apps = ['public_project']
|
|
"""Compute Linearly constrained minimum variance (LCMV) beamformer."""
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Roman Goj <roman.goj@gmail.com>
# Britta Westner <britta.wstnr@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
from ..rank import compute_rank
from ..io.meas_info import _simplify_info
from ..io.pick import pick_channels_cov, pick_info
from ..forward import _subject_from_forward
from ..minimum_norm.inverse import combine_xyz, _check_reference, _check_depth
from ..source_estimate import _make_stc, _get_src_type
from ..utils import logger, verbose, _check_channels_spatial_filter
from ..utils import _check_one_ch_type, _check_info_inv
from ._compute_beamformer import (
_check_proj_match, _prepare_beamformer_input, _compute_power,
_compute_beamformer, _check_src_type, Beamformer)
@verbose
def make_lcmv(info, forward, data_cov, reg=0.05, noise_cov=None, label=None,
pick_ori=None, rank='info',
weight_norm='unit-noise-gain-invariant',
reduce_rank=False, depth=None, inversion='matrix', verbose=None):
"""Compute LCMV spatial filter.
Parameters
----------
info : instance of Info
The measurement info to specify the channels to include.
Bad channels in info['bads'] are not used.
forward : instance of Forward
Forward operator.
data_cov : instance of Covariance
The data covariance.
reg : float
The regularization for the whitened data covariance.
noise_cov : instance of Covariance
The noise covariance. If provided, whitening will be done. Providing a
noise covariance is mandatory if you mix sensor types, e.g.
gradiometers with magnetometers or EEG with MEG.
label : instance of Label
Restricts the LCMV solution to a given label.
%(bf_pick_ori)s
- ``'vector'``
Keeps the currents for each direction separate
%(rank_info)s
%(weight_norm)s
Defaults to ``'unit-noise-gain-invariant'``.
%(reduce_rank)s
%(depth)s
.. versionadded:: 0.18
%(bf_inversion)s
.. versionadded:: 0.21
%(verbose)s
Returns
-------
filters : instance of Beamformer
Dictionary containing filter weights from LCMV beamformer.
Contains the following keys:
'kind' : str
The type of beamformer, in this case 'LCMV'.
'weights' : array
The filter weights of the beamformer.
'data_cov' : instance of Covariance
The data covariance matrix used to compute the beamformer.
'noise_cov' : instance of Covariance | None
The noise covariance matrix used to compute the beamformer.
'whitener' : None | ndarray, shape (n_channels, n_channels)
Whitening matrix, provided if whitening was applied to the
covariance matrix and leadfield during computation of the
beamformer weights.
'weight_norm' : str | None
Type of weight normalization used to compute the filter
weights.
'pick-ori' : None | 'max-power' | 'normal' | 'vector'
The orientation in which the beamformer filters were computed.
'ch_names' : list of str
Channels used to compute the beamformer.
'proj' : array
Projections used to compute the beamformer.
'is_ssp' : bool
If True, projections were applied prior to filter computation.
'vertices' : list
Vertices for which the filter weights were computed.
'is_free_ori' : bool
If True, the filter was computed with free source orientation.
'n_sources' : int
Number of source location for which the filter weight were
computed.
'src_type' : str
Type of source space.
'source_nn' : ndarray, shape (n_sources, 3)
For each source location, the surface normal.
'proj' : ndarray, shape (n_channels, n_channels)
Projections used to compute the beamformer.
'subject' : str
The subject ID.
'rank' : int
The rank of the data covariance matrix used to compute the
beamformer weights.
'max-power-ori' : ndarray, shape (n_sources, 3) | None
When pick_ori='max-power', this fields contains the estimated
direction of maximum power at each source location.
'inversion' : 'single' | 'matrix'
Whether the spatial filters were computed for each dipole
separately or jointly for all dipoles at each vertex using a
matrix inversion.
Notes
-----
The original reference is :footcite:`VanVeenEtAl1997`.
To obtain the Sekihara unit-noise-gain vector beamformer, you should use
``weight_norm='unit-noise-gain', pick_ori='vector'`` followed by
:meth:`vec_stc.project('pca', src) <mne.VectorSourceEstimate.project>`.
.. versionchanged:: 0.21
The computations were extensively reworked, and the default for
``weight_norm`` was set to ``'unit-noise-gain-invariant'``.
References
----------
.. footbibliography::
"""
# check number of sensor types present in the data and ensure a noise cov
info = _simplify_info(info)
noise_cov, _, allow_mismatch = _check_one_ch_type(
'lcmv', info, forward, data_cov, noise_cov)
# XXX we need this extra picking step (can't just rely on minimum norm's
# because there can be a mismatch. Should probably add an extra arg to
# _prepare_beamformer_input at some point (later)
picks = _check_info_inv(info, forward, data_cov, noise_cov)
info = pick_info(info, picks)
data_rank = compute_rank(data_cov, rank=rank, info=info)
noise_rank = compute_rank(noise_cov, rank=rank, info=info)
for key in data_rank:
if (key not in noise_rank or data_rank[key] != noise_rank[key]) and \
not allow_mismatch:
raise ValueError('%s data rank (%s) did not match the noise '
'rank (%s)'
% (key, data_rank[key],
noise_rank.get(key, None)))
del noise_rank
rank = data_rank
logger.info('Making LCMV beamformer with rank %s' % (rank,))
del data_rank
depth = _check_depth(depth, 'depth_sparse')
if inversion == 'single':
depth['combine_xyz'] = False
is_free_ori, info, proj, vertno, G, whitener, nn, orient_std = \
_prepare_beamformer_input(
info, forward, label, pick_ori, noise_cov=noise_cov, rank=rank,
pca=False, **depth)
ch_names = list(info['ch_names'])
data_cov = pick_channels_cov(data_cov, include=ch_names)
Cm = data_cov._get_square()
if 'estimator' in data_cov:
del data_cov['estimator']
# Whiten the data covariance
Cm = np.dot(whitener, np.dot(Cm, whitener.T))
# Restore to positive semi-definite, as
# (negative eigenvalues are errant / due to massive scaling differences)
s, u = np.linalg.eigh(Cm)
Cm = np.dot(u * np.abs(s), u.T.conj())
rank_int = sum(rank.values())
del rank
# compute spatial filter
n_orient = 3 if is_free_ori else 1
W, max_power_ori = _compute_beamformer(
G, Cm, reg, n_orient, weight_norm, pick_ori, reduce_rank, rank_int,
inversion=inversion, nn=nn, orient_std=orient_std)
# get src type to store with filters for _make_stc
src_type = _get_src_type(forward['src'], vertno)
# get subject to store with filters
subject_from = _subject_from_forward(forward)
# Is the computed beamformer a scalar or vector beamformer?
is_free_ori = is_free_ori if pick_ori in [None, 'vector'] else False
is_ssp = bool(info['projs'])
filters = Beamformer(
kind='LCMV', weights=W, data_cov=data_cov, noise_cov=noise_cov,
whitener=whitener, weight_norm=weight_norm, pick_ori=pick_ori,
ch_names=ch_names, proj=proj, is_ssp=is_ssp, vertices=vertno,
is_free_ori=is_free_ori, n_sources=forward['nsource'],
src_type=src_type, source_nn=forward['source_nn'].copy(),
subject=subject_from, rank=rank_int, max_power_ori=max_power_ori,
inversion=inversion)
return filters
def _proj_whiten_data(M, proj, filters):
if filters['is_ssp']:
# check whether data and filter projs match
_check_proj_match(proj, filters)
if filters['whitener'] is None:
M = np.dot(filters['proj'], M)
if filters['whitener'] is not None:
M = np.dot(filters['whitener'], M)
return M
def _apply_lcmv(data, filters, info, tmin, max_ori_out):
"""Apply LCMV spatial filter to data for source reconstruction."""
if max_ori_out != 'signed':
raise ValueError('max_ori_out must be "signed", got %s'
% (max_ori_out,))
if isinstance(data, np.ndarray) and data.ndim == 2:
data = [data]
return_single = True
else:
return_single = False
W = filters['weights']
for i, M in enumerate(data):
if len(M) != len(filters['ch_names']):
raise ValueError('data and picks must have the same length')
if not return_single:
logger.info("Processing epoch : %d" % (i + 1))
M = _proj_whiten_data(M, info['projs'], filters)
# project to source space using beamformer weights
vector = False
if filters['is_free_ori']:
sol = np.dot(W, M)
if filters['pick_ori'] == 'vector':
vector = True
else:
logger.info('combining the current components...')
sol = combine_xyz(sol)
else:
# Linear inverse: do computation here or delayed
if (M.shape[0] < W.shape[0] and
filters['pick_ori'] != 'max-power'):
sol = (W, M)
else:
sol = np.dot(W, M)
if filters['pick_ori'] == 'max-power' and max_ori_out == 'abs':
sol = np.abs(sol)
tstep = 1.0 / info['sfreq']
# compatibility with 0.16, add src_type as None if not present:
filters, warn_text = _check_src_type(filters)
yield _make_stc(sol, vertices=filters['vertices'], tmin=tmin,
tstep=tstep, subject=filters['subject'],
vector=vector, source_nn=filters['source_nn'],
src_type=filters['src_type'], warn_text=warn_text)
logger.info('[done]')
@verbose
def apply_lcmv(evoked, filters, max_ori_out='signed', verbose=None):
"""Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights.
Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights
on evoked data.
Parameters
----------
evoked : Evoked
Evoked data to invert.
filters : instance of Beamformer
LCMV spatial filter (beamformer weights).
Filter weights returned from :func:`make_lcmv`.
max_ori_out : 'signed'
Specify in case of pick_ori='max-power'.
%(verbose)s
Returns
-------
stc : SourceEstimate | VolSourceEstimate | VectorSourceEstimate
Source time courses.
See Also
--------
make_lcmv, apply_lcmv_raw, apply_lcmv_epochs, apply_lcmv_cov
Notes
-----
.. versionadded:: 0.18
"""
_check_reference(evoked)
info = evoked.info
data = evoked.data
tmin = evoked.times[0]
sel = _check_channels_spatial_filter(evoked.ch_names, filters)
data = data[sel]
stc = _apply_lcmv(data=data, filters=filters, info=info,
tmin=tmin, max_ori_out=max_ori_out)
return next(stc)
@verbose
def apply_lcmv_epochs(epochs, filters, max_ori_out='signed',
return_generator=False, verbose=None):
"""Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights.
Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights
on single trial data.
Parameters
----------
epochs : Epochs
Single trial epochs.
filters : instance of Beamformer
LCMV spatial filter (beamformer weights)
Filter weights returned from :func:`make_lcmv`.
max_ori_out : 'signed'
Specify in case of pick_ori='max-power'.
return_generator : bool
Return a generator object instead of a list. This allows iterating
over the stcs without having to keep them all in memory.
%(verbose)s
Returns
-------
stc: list | generator of (SourceEstimate | VolSourceEstimate)
The source estimates for all epochs.
See Also
--------
make_lcmv, apply_lcmv_raw, apply_lcmv, apply_lcmv_cov
"""
_check_reference(epochs)
info = epochs.info
tmin = epochs.times[0]
sel = _check_channels_spatial_filter(epochs.ch_names, filters)
data = epochs.get_data()[:, sel, :]
stcs = _apply_lcmv(data=data, filters=filters, info=info,
tmin=tmin, max_ori_out=max_ori_out)
if not return_generator:
stcs = [s for s in stcs]
return stcs
@verbose
def apply_lcmv_raw(raw, filters, start=None, stop=None, max_ori_out='signed',
verbose=None):
"""Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights.
Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights
on raw data.
Parameters
----------
raw : mne.io.Raw
Raw data to invert.
filters : instance of Beamformer
LCMV spatial filter (beamformer weights).
Filter weights returned from :func:`make_lcmv`.
start : int
Index of first time sample (index not time is seconds).
stop : int
Index of first time sample not to include (index not time is seconds).
max_ori_out : 'signed'
Specify in case of pick_ori='max-power'.
%(verbose)s
Returns
-------
stc : SourceEstimate | VolSourceEstimate
Source time courses.
See Also
--------
make_lcmv, apply_lcmv_epochs, apply_lcmv, apply_lcmv_cov
"""
_check_reference(raw)
info = raw.info
sel = _check_channels_spatial_filter(raw.ch_names, filters)
data, times = raw[sel, start:stop]
tmin = times[0]
stc = _apply_lcmv(data=data, filters=filters, info=info,
tmin=tmin, max_ori_out=max_ori_out)
return next(stc)
@verbose
def apply_lcmv_cov(data_cov, filters, verbose=None):
"""Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights.
Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights
to a data covariance matrix to estimate source power.
Parameters
----------
data_cov : instance of Covariance
Data covariance matrix.
filters : instance of Beamformer
LCMV spatial filter (beamformer weights).
Filter weights returned from :func:`make_lcmv`.
%(verbose)s
Returns
-------
stc : SourceEstimate | VolSourceEstimate
Source power.
See Also
--------
make_lcmv, apply_lcmv, apply_lcmv_epochs, apply_lcmv_raw
"""
sel = _check_channels_spatial_filter(data_cov.ch_names, filters)
sel_names = [data_cov.ch_names[ii] for ii in sel]
data_cov = pick_channels_cov(data_cov, sel_names)
n_orient = filters['weights'].shape[0] // filters['n_sources']
# Need to project and whiten along both dimensions
data = _proj_whiten_data(data_cov['data'].T, data_cov['projs'], filters)
data = _proj_whiten_data(data.T, data_cov['projs'], filters)
del data_cov
source_power = _compute_power(data, filters['weights'], n_orient)
# compatibility with 0.16, add src_type as None if not present:
filters, warn_text = _check_src_type(filters)
return(_make_stc(source_power, vertices=filters['vertices'],
src_type=filters['src_type'], tmin=0., tstep=1.,
subject=filters['subject'],
source_nn=filters['source_nn'], warn_text=warn_text))
|
|
# -*- coding: utf-8 -*-
from datetime import timedelta
from cms import constants
from cms.utils.conf import get_cms_setting
from django.core.exceptions import PermissionDenied
from cms.exceptions import NoHomeFound, PublicIsUnmodifiable
from cms.models.managers import PageManager, PagePermissionsPermissionManager
from cms.models.metaclasses import PageMetaClass
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.publisher.errors import MpttPublisherCantPublish
from cms.utils import i18n, page as page_utils
from cms.utils.copy_plugins import copy_plugins_to
from cms.utils.helpers import reversion_register
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.utils.translation import get_language, ugettext_lazy as _
from menus.menu_pool import menu_pool
from mptt.models import MPTTModel
from os.path import join
class Page(MPTTModel):
"""
A simple hierarchical page model
"""
__metaclass__ = PageMetaClass
LIMIT_VISIBILITY_IN_MENU_CHOICES = (
(1, _('for logged in users only')),
(2, _('for anonymous users only')),
)
PUBLISHER_STATE_DEFAULT = 0
PUBLISHER_STATE_DIRTY = 1
PUBLISHER_STATE_DELETE = 2
# Page was marked published, but some of page parents are not.
PUBLISHER_STATE_PENDING = 4
template_choices = [(x, _(y)) for x, y in get_cms_setting('TEMPLATES')]
created_by = models.CharField(_("created by"), max_length=70, editable=False)
changed_by = models.CharField(_("changed by"), max_length=70, editable=False)
parent = models.ForeignKey('self', null=True, blank=True, related_name='children', db_index=True)
creation_date = models.DateTimeField(auto_now_add=True)
changed_date = models.DateTimeField(auto_now=True)
publication_date = models.DateTimeField(_("publication date"), null=True, blank=True, help_text=_(
'When the page should go live. Status must be "Published" for page to go live.'), db_index=True)
publication_end_date = models.DateTimeField(_("publication end date"), null=True, blank=True,
help_text=_('When to expire the page. Leave empty to never expire.'),
db_index=True)
in_navigation = models.BooleanField(_("in navigation"), default=True, db_index=True)
soft_root = models.BooleanField(_("soft root"), db_index=True, default=False,
help_text=_("All ancestors will not be displayed in the navigation"))
reverse_id = models.CharField(_("id"), max_length=40, db_index=True, blank=True, null=True, help_text=_(
"An unique identifier that is used with the page_url templatetag for linking to this page"))
navigation_extenders = models.CharField(_("attached menu"), max_length=80, db_index=True, blank=True, null=True)
published = models.BooleanField(_("is published"), blank=True)
template = models.CharField(_("template"), max_length=100, choices=template_choices,
help_text=_('The template used to render the content.'))
site = models.ForeignKey(Site, help_text=_('The site the page is accessible at.'), verbose_name=_("site"))
login_required = models.BooleanField(_("login required"), default=False)
limit_visibility_in_menu = models.SmallIntegerField(_("menu visibility"), default=None, null=True, blank=True,
choices=LIMIT_VISIBILITY_IN_MENU_CHOICES, db_index=True,
help_text=_("limit when this page is visible in the menu"))
level = models.PositiveIntegerField(db_index=True, editable=False)
lft = models.PositiveIntegerField(db_index=True, editable=False)
rght = models.PositiveIntegerField(db_index=True, editable=False)
tree_id = models.PositiveIntegerField(db_index=True, editable=False)
# Placeholders (plugins)
placeholders = models.ManyToManyField(Placeholder, editable=False)
# Publisher fields
publisher_is_draft = models.BooleanField(default=True, editable=False, db_index=True)
# This is misnamed - the one-to-one relation is populated on both ends
publisher_public = models.OneToOneField('self', related_name='publisher_draft', null=True, editable=False)
publisher_state = models.SmallIntegerField(default=0, editable=False, db_index=True)
# Managers
objects = PageManager()
permissions = PagePermissionsPermissionManager()
class Meta:
permissions = (
('view_page', 'Can view page'),
('publish_page', 'Can publish page'),
)
verbose_name = _('page')
verbose_name_plural = _('pages')
ordering = ('tree_id', 'lft')
app_label = 'cms'
class PublisherMeta:
exclude_fields_append = ['id', 'publisher_is_draft', 'publisher_public',
'publisher_state', 'moderator_state',
'placeholders', 'lft', 'rght', 'tree_id',
'parent']
def __unicode__(self):
title = self.get_menu_title(fallback=True)
if title is None:
title = u""
return unicode(title)
def __repr__(self):
# This is needed to solve the infinite recursion when
# adding new pages.
return object.__repr__(self)
def is_dirty(self):
return self.publisher_state == self.PUBLISHER_STATE_DIRTY
def get_absolute_url(self, language=None, fallback=True):
if self.is_home():
return reverse('pages-root')
path = self.get_path(language, fallback) or self.get_slug(language, fallback)
return reverse('pages-details-by-slug', kwargs={"slug": path})
def move_page(self, target, position='first-child'):
"""
Called from admin interface when page is moved. Should be used on
all the places which are changing page position. Used like an interface
to mptt, but after move is done page_moved signal is fired.
Note for issue #1166: url conflicts are handled by updated
check_title_slugs, overwrite_url on the moved page don't need any check
as it remains the same regardless of the page position in the tree
"""
# do not mark the page as dirty after page moves
self._publisher_keep_state = True
# make sure move_page does not break when using INHERIT template
# and moving to a top level position
if (position in ('left', 'right')
and not target.parent
and self.template == constants.TEMPLATE_INHERITANCE_MAGIC):
self.template = self.get_template()
self.move_to(target, position)
# fire signal
import cms.signals as cms_signals
cms_signals.page_moved.send(sender=Page, instance=self)
self.save() # always save the page after move, because of publisher
# check the slugs
page_utils.check_title_slugs(self)
if self.publisher_public_id:
# Ensure we have up to date mptt properties
public_page = Page.objects.get(pk=self.publisher_public_id)
# Ensure that the page is in the right position and save it
public_page = self._publisher_save_public(public_page)
cms_signals.page_moved.send(sender=Page, instance=public_page)
public_page.save()
page_utils.check_title_slugs(public_page)
def _copy_titles(self, target):
"""
Copy all the titles to a new page (which must have a pk).
:param target: The page where the new titles should be stored
"""
old_titles = dict(target.title_set.values_list('language', 'pk'))
for title in self.title_set.all():
# If an old title exists, overwrite. Otherwise create new
title.pk = old_titles.pop(title.language, None)
title.page = target
title.save()
if old_titles:
from titlemodels import Title
Title.objects.filter(id__in=old_titles.values()).delete()
def _copy_contents(self, target):
"""
Copy all the plugins to a new page.
:param target: The page where the new content should be stored
"""
# TODO: Make this into a "graceful" copy instead of deleting and overwriting
# copy the placeholders (and plugins on those placeholders!)
CMSPlugin.objects.filter(placeholder__page=target).delete()
for ph in self.placeholders.all():
plugins = ph.get_plugins_list()
try:
ph = target.placeholders.get(slot=ph.slot)
except Placeholder.DoesNotExist:
ph.pk = None # make a new instance
ph.save()
target.placeholders.add(ph)
# update the page copy
except Placeholder.MultipleObjectsReturned:
pass
# print "TARGET:", target.__class__, target.id
# print "PLACEHOLDER:", ph
# print "SLOT:", ph.slot
# for plach in target.placeholders.filter(slot=ph.slot):
# print
# print "PLACH:", plach.id, plach, repr(plach), dir(plach)
# raise
if plugins:
copy_plugins_to(plugins, ph)
def _copy_attributes(self, target):
"""
Copy all page data to the target. This excludes parent and other values
that are specific to an exact instance.
:param target: The Page to copy the attributes to
"""
target.publication_date = self.publication_date
target.publication_end_date = self.publication_end_date
target.in_navigation = self.in_navigation
target.login_required = self.login_required
target.limit_visibility_in_menu = self.limit_visibility_in_menu
target.soft_root = self.soft_root
target.reverse_id = self.reverse_id
target.navigation_extenders = self.navigation_extenders
target.template = self.template
target.site_id = self.site_id
def copy_page(self, target, site, position='first-child',
copy_permissions=True):
"""
Copy a page [ and all its descendants to a new location ]
Doesn't checks for add page permissions anymore, this is done in PageAdmin.
Note: public_copy was added in order to enable the creation of a copy
for creating the public page during the publish operation as it sets the
publisher_is_draft=False.
Note for issue #1166: when copying pages there is no need to check for
conflicting URLs as pages are copied unpublished.
"""
from cms.utils.moderator import update_moderation_message
page_copy = None
pages = [self] + list(self.get_descendants().order_by('-rght'))
site_reverse_ids = Page.objects.filter(site=site, reverse_id__isnull=False).values_list('reverse_id', flat=True)
if target:
target.old_pk = -1
if position == "first-child":
tree = [target]
elif target.parent_id:
tree = [target.parent]
else:
tree = []
else:
tree = []
if tree:
tree[0].old_pk = tree[0].pk
first = True
# loop over all affected pages (self is included in descendants)
for page in pages:
titles = list(page.title_set.all())
# get all current placeholders (->plugins)
placeholders = list(page.placeholders.all())
origin_id = page.id
# create a copy of this page by setting pk = None (=new instance)
page.old_pk = page.pk
page.pk = None
page.level = None
page.rght = None
page.lft = None
page.tree_id = None
page.published = False
page.publisher_public_id = None
# only set reverse_id on standard copy
if page.reverse_id in site_reverse_ids:
page.reverse_id = None
if first:
first = False
if tree:
page.parent = tree[0]
else:
page.parent = None
page.insert_at(target, position)
else:
count = 1
found = False
for prnt in tree:
if prnt.old_pk == page.parent_id:
page.parent = prnt
tree = tree[0:count]
found = True
break
count += 1
if not found:
page.parent = None
tree.append(page)
page.site = site
page.save()
# copy permissions if necessary
if get_cms_setting('PERMISSION') and copy_permissions:
from cms.models.permissionmodels import PagePermission
for permission in PagePermission.objects.filter(page__id=origin_id):
permission.pk = None
permission.page = page
permission.save()
update_moderation_message(page, unicode(_('Page was copied.')))
# copy titles of this page
for title in titles:
title.pk = None # setting pk = None creates a new instance
title.page = page
# create slug-copy for standard copy
title.slug = page_utils.get_available_slug(title)
title.save()
# copy the placeholders (and plugins on those placeholders!)
for ph in placeholders:
plugins = ph.get_plugins_list()
try:
ph = page.placeholders.get(slot=ph.slot)
except Placeholder.DoesNotExist:
ph.pk = None # make a new instance
ph.save()
page.placeholders.add(ph)
# update the page copy
page_copy = page
if plugins:
copy_plugins_to(plugins, ph)
# invalidate the menu for this site
menu_pool.clear(site_id=site.pk)
return page_copy # return the page_copy or None
def save(self, no_signals=False, commit=True, **kwargs):
"""
Args:
commit: True if model should be really saved
"""
# delete template cache
if hasattr(self, '_template_cache'):
delattr(self, '_template_cache')
created = not bool(self.pk)
# Published pages should always have a publication date
# if the page is published we set the publish date if not set yet.
if self.publication_date is None and self.published:
self.publication_date = timezone.now() - timedelta(seconds=5)
if self.reverse_id == "":
self.reverse_id = None
from cms.utils.permissions import _thread_locals
user = getattr(_thread_locals, "user", None)
if user:
self.changed_by = user.username
else:
self.changed_by = "script"
if created:
self.created_by = self.changed_by
if commit:
if no_signals: # ugly hack because of mptt
self.save_base(cls=self.__class__, **kwargs)
else:
super(Page, self).save(**kwargs)
def save_base(self, *args, **kwargs):
"""Overridden save_base. If an instance is draft, and was changed, mark
it as dirty.
Dirty flag is used for changed nodes identification when publish method
takes place. After current changes are published, state is set back to
PUBLISHER_STATE_DEFAULT (in publish method).
"""
keep_state = getattr(self, '_publisher_keep_state', None)
if self.publisher_is_draft and not keep_state:
self.publisher_state = self.PUBLISHER_STATE_DIRTY
if keep_state:
delattr(self, '_publisher_keep_state')
ret = super(Page, self).save_base(*args, **kwargs)
return ret
def publish(self):
"""Overrides Publisher method, because there may be some descendants, which
are waiting for parent to publish, so publish them if possible.
:returns: True if page was successfully published.
"""
# Publish can only be called on draft pages
if not self.publisher_is_draft:
raise PublicIsUnmodifiable('The public instance cannot be published. Use draft.')
# publish, but only if all parents are published!!
published = None
if not self.pk:
self.save()
if not self.parent_id:
self.clear_home_pk_cache()
if self._publisher_can_publish():
if self.publisher_public_id:
# Ensure we have up to date mptt properties
public_page = Page.objects.get(pk=self.publisher_public_id)
else:
public_page = Page(created_by=self.created_by)
self._copy_attributes(public_page)
# we need to set relate this new public copy to its draft page (self)
public_page.publisher_public = self
public_page.publisher_is_draft = False
# Ensure that the page is in the right position and save it
public_page = self._publisher_save_public(public_page)
public_page.published = (public_page.parent_id is None or public_page.parent.published)
public_page.save()
# The target page now has a pk, so can be used as a target
self._copy_titles(public_page)
self._copy_contents(public_page)
# invalidate the menu for this site
menu_pool.clear(site_id=self.site_id)
# taken from Publisher - copy_page needs to call self._publisher_save_public(copy) for mptt insertion
# insert_at() was maybe calling _create_tree_space() method, in this
# case may tree_id change, so we must update tree_id from db first
# before save
if getattr(self, 'tree_id', None):
me = self._default_manager.get(pk=self.pk)
self.tree_id = me.tree_id
self.publisher_public = public_page
published = True
else:
# Nothing left to do
pass
if self.publisher_public and self.publisher_public.published:
self.publisher_state = Page.PUBLISHER_STATE_DEFAULT
else:
self.publisher_state = Page.PUBLISHER_STATE_PENDING
self.published = True
self._publisher_keep_state = True
self.save()
# If we are publishing, this page might have become a "home" which
# would change the path
if self.is_home():
for title in self.title_set.all():
if title.path != '':
title.save()
# clean moderation log
self.pagemoderatorstate_set.all().delete()
if not published:
# was not published, escape
return
# Check if there are some children which are waiting for parents to
# become published.
publish_set = self.get_descendants().filter(published=True).select_related('publisher_public')
for page in publish_set:
if page.publisher_public:
# ensure that parent exists
if page.publisher_public.parent:
if page.publisher_public.parent.published:
if not page.publisher_public.published:
page.publisher_public.published = True
page.publisher_public.save()
if page.publisher_state == Page.PUBLISHER_STATE_PENDING:
page.publisher_state = Page.PUBLISHER_STATE_DEFAULT
page._publisher_keep_state = True
page.save()
elif page.publisher_state == Page.PUBLISHER_STATE_PENDING:
page.publish()
# fire signal after publishing is done
import cms.signals as cms_signals
cms_signals.post_publish.send(sender=Page, instance=self)
return published
def unpublish(self):
"""
Removes this page from the public site
:returns: True if this page was successfully unpublished
"""
# Publish can only be called on draft pages
if not self.publisher_is_draft:
raise PublicIsUnmodifiable('The public instance cannot be unpublished. Use draft.')
# First, make sure we are in the correct state
self.published = False
self.save()
public_page = self.get_public_object()
if public_page:
public_page.published = False
public_page.save()
# Go through all children of our public instance
descendants = public_page.get_descendants()
for child in descendants:
child.published = False
child.save()
draft = child.publisher_public
if (draft and draft.published and
draft.publisher_state == Page.PUBLISHER_STATE_DEFAULT):
draft.publisher_state = Page.PUBLISHER_STATE_PENDING
draft._publisher_keep_state = True
draft.save()
return True
def revert(self):
"""Revert the draft version to the same state as the public version
"""
# Revert can only be called on draft pages
if not self.publisher_is_draft:
raise PublicIsUnmodifiable('The public instance cannot be reverted. Use draft.')
if not self.publisher_public:
# TODO: Issue an error
return
public = self.publisher_public
public._copy_titles(self)
if self.parent != (self.publisher_public.parent_id and
self.publisher_public.parent.publisher_draft):
# We don't send the signals here
self.move_to(public.parent.publisher_draft)
public._copy_contents(self)
public._copy_attributes(self)
self.published = True
self.publisher_state = self.PUBLISHER_STATE_DEFAULT
self._publisher_keep_state = True
self.save()
# clean moderation log
self.pagemoderatorstate_set.all().delete()
def delete(self):
"""Mark public instance for deletion and delete draft.
"""
placeholders = self.placeholders.all()
for ph in placeholders:
plugin = CMSPlugin.objects.filter(placeholder=ph)
plugin.delete()
ph.delete()
if self.publisher_public_id:
# mark the public instance for deletion
self.publisher_public.publisher_state = self.PUBLISHER_STATE_DELETE
self.publisher_public.save()
super(Page, self).delete()
def delete_with_public(self):
"""
Assuming this page and all its descendants have been marked for
deletion, recursively deletes the entire set of pages including the
public instance.
"""
descendants = list(self.get_descendants().order_by('level'))
descendants.reverse()
descendants.append(self)
# Get all pages that are children of any public page that would be deleted
public_children = Page.objects.public().filter(
parent__publisher_public__in=descendants)
public_pages = Page.objects.public().filter(publisher_public__in=descendants)
if set(public_children).difference(public_pages):
raise PermissionDenied('There are pages that would be orphaned. '
'Publish their move requests first.')
for page in descendants:
placeholders = list(page.placeholders.all())
if page.publisher_public_id:
placeholders = placeholders + list(page.publisher_public.placeholders.all())
plugins = CMSPlugin.objects.filter(placeholder__in=placeholders)
plugins.delete()
for ph in placeholders:
ph.delete()
if page.publisher_public_id:
page.publisher_public.delete()
super(Page, page).delete()
def get_draft_object(self):
if not self.publisher_is_draft:
return self.publisher_draft
return self
def get_public_object(self):
if not self.publisher_is_draft:
return self
return self.publisher_public
def get_languages(self):
"""
get the list of all existing languages for this page
"""
from cms.models.titlemodels import Title
from django.conf import settings
#if len(settings.CMS_LANGUAGES) == 1:
# # If this is a mono-lingular site, then save one database hit by
# # inferring that all pages must be in this language.
# # CMS_LANGUAGES will then be defined like:
# # CMS_LANGUAGES = (('nb', 'Norwegian'),)
# # so get the language code part of the first language tuple.
# self.all_languages = settings.CMS_LANGUAGES[0][0]
#elif not hasattr(self, "all_languages"):
if not hasattr(self, "all_languages"):
self.all_languages = Title.objects.filter(page=self).values_list("language", flat=True).distinct()
self.all_languages = list(self.all_languages)
self.all_languages.sort()
self.all_languages = map(str, self.all_languages)
return self.all_languages
def get_cached_ancestors(self, ascending=True):
if ascending:
if not hasattr(self, "ancestors_ascending"):
self.ancestors_ascending = list(self.get_ancestors(ascending))
return self.ancestors_ascending
else:
if not hasattr(self, "ancestors_descending"):
self.ancestors_descending = list(self.get_ancestors(ascending))
return self.ancestors_descending
# ## Title object access
def get_title_obj(self, language=None, fallback=True, version_id=None, force_reload=False):
"""Helper function for accessing wanted / current title.
If wanted title doesn't exists, EmptyTitle instance will be returned.
If fallback=False is used, titlemodels.Title.DoesNotExist will be raised
when a language does not exist.
"""
language = self._get_title_cache(language, fallback, version_id, force_reload)
if language in self.title_cache:
return self.title_cache[language]
from cms.models.titlemodels import EmptyTitle
return EmptyTitle()
def get_title_obj_attribute(self, attrname, language=None, fallback=True, version_id=None, force_reload=False):
"""Helper function for getting attribute or None from wanted/current title.
"""
try:
attribute = getattr(self.get_title_obj(
language, fallback, version_id, force_reload), attrname)
return attribute
except AttributeError:
return None
def get_path(self, language=None, fallback=True, version_id=None, force_reload=False):
"""
get the path of the page depending on the given language
"""
return self.get_title_obj_attribute("path", language, fallback, version_id, force_reload)
def get_slug(self, language=None, fallback=True, version_id=None, force_reload=False):
"""
get the slug of the page depending on the given language
"""
return self.get_title_obj_attribute("slug", language, fallback, version_id, force_reload)
def get_title(self, language=None, fallback=True, version_id=None, force_reload=False):
"""
get the title of the page depending on the given language
"""
return self.get_title_obj_attribute("title", language, fallback, version_id, force_reload)
def get_menu_title(self, language=None, fallback=True, version_id=None, force_reload=False):
"""
get the menu title of the page depending on the given language
"""
menu_title = self.get_title_obj_attribute("menu_title", language, fallback, version_id, force_reload)
if not menu_title:
return self.get_title(language, True, version_id, force_reload)
return menu_title
def get_page_title(self, language=None, fallback=True, version_id=None, force_reload=False):
"""
get the page title of the page depending on the given language
"""
page_title = self.get_title_obj_attribute("page_title", language, fallback, version_id, force_reload)
if not page_title:
return self.get_title(language, True, version_id, force_reload)
return page_title
def get_meta_description(self, language=None, fallback=True, version_id=None, force_reload=False):
"""
get content for the description meta tag for the page depending on the given language
"""
return self.get_title_obj_attribute("meta_description", language, fallback, version_id, force_reload)
def get_meta_keywords(self, language=None, fallback=True, version_id=None, force_reload=False):
"""
get content for the keywords meta tag for the page depending on the given language
"""
return self.get_title_obj_attribute("meta_keywords", language, fallback, version_id, force_reload)
def get_application_urls(self, language=None, fallback=True, version_id=None, force_reload=False):
"""
get application urls conf for application hook
"""
return self.get_title_obj_attribute("application_urls", language, fallback, version_id, force_reload)
def get_redirect(self, language=None, fallback=True, version_id=None, force_reload=False):
"""
get redirect
"""
return self.get_title_obj_attribute("redirect", language, fallback, version_id, force_reload)
def _get_title_cache(self, language, fallback, version_id, force_reload):
if not language:
language = get_language()
load = False
if not hasattr(self, "title_cache") or force_reload:
load = True
self.title_cache = {}
elif not language in self.title_cache:
if fallback:
fallback_langs = i18n.get_fallback_languages(language)
for lang in fallback_langs:
if lang in self.title_cache:
return lang
load = True
if load:
from cms.models.titlemodels import Title
if version_id:
from reversion.models import Version
version = get_object_or_404(Version, pk=version_id)
revs = [related_version.object_version for related_version in version.revision.version_set.all()]
for rev in revs:
obj = rev.object
if obj.__class__ == Title:
self.title_cache[obj.language] = obj
else:
title = Title.objects.get_title(self, language, language_fallback=fallback)
if title:
self.title_cache[title.language] = title
language = title.language
return language
def get_template(self):
"""
get the template of this page if defined or if closer parent if
defined or DEFAULT_PAGE_TEMPLATE otherwise
"""
if hasattr(self, '_template_cache'):
return self._template_cache
template = None
if self.template:
if self.template != constants.TEMPLATE_INHERITANCE_MAGIC:
template = self.template
else:
try:
template = self.get_ancestors(ascending=True).exclude(
template=constants.TEMPLATE_INHERITANCE_MAGIC).values_list('template', flat=True)[0]
except IndexError:
pass
if not template:
template = get_cms_setting('TEMPLATES')[0][0]
self._template_cache = template
return template
def get_template_name(self):
"""
get the textual name (2nd parameter in get_cms_setting('TEMPLATES'))
of the template of this page or of the nearest
ancestor. failing to find that, return the name of the default template.
"""
template = self.get_template()
for t in get_cms_setting('TEMPLATES'):
if t[0] == template:
return t[1]
return _("default")
def has_view_permission(self, request):
from cms.models.permissionmodels import PagePermission, GlobalPagePermission
from cms.utils.plugins import current_site
if not self.publisher_is_draft:
return self.publisher_draft.has_view_permission(request)
# does any restriction exist?
# inherited and direct
is_restricted = PagePermission.objects.for_page(page=self).filter(can_view=True).exists()
if request.user.is_authenticated():
site = current_site(request)
global_perms_q = Q(can_view=True) & Q(
Q(sites__in=[site]) | Q(sites__isnull=True)
)
global_view_perms = GlobalPagePermission.objects.with_user(
request.user).filter(global_perms_q).exists()
# a global permission was given to the request's user
if global_view_perms:
return True
elif not is_restricted:
if ((get_cms_setting('PUBLIC_FOR') == 'all') or
(get_cms_setting('PUBLIC_FOR') == 'staff' and
request.user.is_staff)):
return True
# a restricted page and an authenticated user
elif is_restricted:
opts = self._meta
codename = '%s.view_%s' % (opts.app_label, opts.object_name.lower())
user_perm = request.user.has_perm(codename)
generic_perm = self.has_generic_permission(request, "view")
return (user_perm or generic_perm)
else:
#anonymous user
if is_restricted or not get_cms_setting('PUBLIC_FOR') == 'all':
# anyonymous user, page has restriction and global access is permitted
return False
else:
# anonymous user, no restriction saved in database
return True
# Authenticated user
# Django wide auth perms "can_view" or cms auth perms "can_view"
opts = self._meta
codename = '%s.view_%s' % (opts.app_label, opts.object_name.lower())
return (request.user.has_perm(codename) or
self.has_generic_permission(request, "view"))
def has_change_permission(self, request):
opts = self._meta
if request.user.is_superuser:
return True
return request.user.has_perm(opts.app_label + '.' + opts.get_change_permission()) and \
self.has_generic_permission(request, "change")
def has_delete_permission(self, request):
opts = self._meta
if request.user.is_superuser:
return True
return request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission()) and \
self.has_generic_permission(request, "delete")
def has_publish_permission(self, request):
if request.user.is_superuser:
return True
opts = self._meta
return request.user.has_perm(opts.app_label + '.' + "publish_page") and \
self.has_generic_permission(request, "publish")
has_moderate_permission = has_publish_permission
def has_advanced_settings_permission(self, request):
return self.has_generic_permission(request, "advanced_settings")
def has_change_permissions_permission(self, request):
"""
Has user ability to change permissions for current page?
"""
return self.has_generic_permission(request, "change_permissions")
def has_add_permission(self, request):
"""
Has user ability to add page under current page?
"""
return self.has_generic_permission(request, "add")
def has_move_page_permission(self, request):
"""Has user ability to move current page?
"""
return self.has_generic_permission(request, "move_page")
def has_generic_permission(self, request, perm_type):
"""
Return true if the current user has permission on the page.
Return the string 'All' if the user has all rights.
"""
att_name = "permission_%s_cache" % perm_type
if not hasattr(self, "permission_user_cache") or not hasattr(self, att_name) \
or request.user.pk != self.permission_user_cache.pk:
from cms.utils.permissions import has_generic_permission
self.permission_user_cache = request.user
setattr(self, att_name, has_generic_permission(
self.id, request.user, perm_type, self.site_id))
if getattr(self, att_name):
self.permission_edit_cache = True
return getattr(self, att_name)
def is_home(self):
if self.parent_id:
return False
else:
try:
return self.home_pk_cache == self.pk
except NoHomeFound:
pass
return False
def get_home_pk_cache(self):
attr = "%s_home_pk_cache_%s" % (self.publisher_is_draft and "draft" or "public", self.site_id)
if getattr(self, attr, None) is None:
setattr(self, attr, self.get_object_queryset().get_home(self.site).pk)
return getattr(self, attr)
def set_home_pk_cache(self, value):
attr = "%s_home_pk_cache_%s" % (self.publisher_is_draft and "draft" or "public", self.site_id)
setattr(self, attr, value)
home_pk_cache = property(get_home_pk_cache, set_home_pk_cache)
def clear_home_pk_cache(self):
self.home_pk_cache = None
def get_media_path(self, filename):
"""
Returns path (relative to MEDIA_ROOT/MEDIA_URL) to directory for storing page-scope files.
This allows multiple pages to contain files with identical names without namespace issues.
Plugins such as Picture can use this method to initialise the 'upload_to' parameter for
File-based fields. For example:
image = models.ImageField(_("image"), upload_to=CMSPlugin.get_media_path)
where CMSPlugin.get_media_path calls self.page.get_media_path
This location can be customised using the CMS_PAGE_MEDIA_PATH setting
"""
return join(get_cms_setting('PAGE_MEDIA_PATH'), "%d" % self.id, filename)
def last_page_states(self):
"""Returns last five page states, if they exist, optimized, calls sql
query only if some states available
"""
result = getattr(self, '_moderator_state_cache', None)
if result is None:
result = list(self.pagemoderatorstate_set.all().order_by('created'))
self._moderator_state_cache = result
return result[:5]
def delete_requested(self):
""" Checks whether there are any delete requests for this page.
Uses the same cache as last_page_states to minimize DB requests
"""
from cms.models import PageModeratorState
result = getattr(self, '_moderator_state_cache', None)
if result is None:
return self.pagemoderatorstate_set.get_delete_actions().exists()
for state in result:
if state.action == PageModeratorState.ACTION_DELETE:
return True
return False
def is_public_published(self):
"""Returns true if public model is published.
"""
if hasattr(self, '_public_published_cache'):
# if it was cached in change list, return cached value
return self._public_published_cache
# If we have a public version it will be published as well.
# If it isn't published, it should be deleted.
return self.published and self.publisher_public_id and self.publisher_public.published
def reload(self):
"""
Reload a page from the database
"""
return Page.objects.get(pk=self.pk)
def get_object_queryset(self):
"""Returns smart queryset depending on object type - draft / public
"""
qs = self.__class__.objects
return self.publisher_is_draft and qs.drafts() or qs.public().published()
def _publisher_can_publish(self):
"""Is parent of this object already published?
"""
if self.parent_id:
try:
return bool(self.parent.publisher_public_id)
except AttributeError:
raise MpttPublisherCantPublish
return True
def get_next_filtered_sibling(self, **filters):
"""Very similar to original mptt method, but adds support for filters.
Returns this model instance's next sibling in the tree, or
``None`` if it doesn't have a next sibling.
"""
opts = self._mptt_meta
if self.is_root_node():
filters.update({
'%s__isnull' % opts.parent_attr: True,
'%s__gt' % opts.tree_id_attr: getattr(self, opts.tree_id_attr),
})
else:
filters.update({
opts.parent_attr: getattr(self, '%s_id' % opts.parent_attr),
'%s__gt' % opts.left_attr: getattr(self, opts.right_attr),
})
# publisher stuff
filters.update({
'publisher_is_draft': self.publisher_is_draft
})
# multisite
filters.update({
'site__id': self.site_id
})
sibling = None
try:
sibling = self._tree_manager.filter(**filters)[0]
except IndexError:
pass
return sibling
def get_previous_filtered_sibling(self, **filters):
"""Very similar to original mptt method, but adds support for filters.
Returns this model instance's previous sibling in the tree, or
``None`` if it doesn't have a previous sibling.
"""
opts = self._mptt_meta
if self.is_root_node():
filters.update({
'%s__isnull' % opts.parent_attr: True,
'%s__lt' % opts.tree_id_attr: getattr(self, opts.tree_id_attr),
})
order_by = '-%s' % opts.tree_id_attr
else:
filters.update({
opts.parent_attr: getattr(self, '%s_id' % opts.parent_attr),
'%s__lt' % opts.right_attr: getattr(self, opts.left_attr),
})
order_by = '-%s' % opts.right_attr
# publisher stuff
filters.update({
'publisher_is_draft': self.publisher_is_draft
})
# multisite
filters.update({
'site__id': self.site_id
})
sibling = None
try:
sibling = self._tree_manager.filter(**filters).order_by(order_by)[0]
except IndexError:
pass
return sibling
def _publisher_save_public(self, obj):
"""Mptt specific stuff before the object can be saved, overrides original
publisher method.
Args:
obj - public variant of `self` to be saved.
"""
public_parent = self.parent.publisher_public if self.parent_id else None
filters = dict(publisher_public__isnull=False)
if public_parent:
filters['publisher_public__parent__in'] = [public_parent]
else:
filters['publisher_public__parent__isnull'] = True
prev_sibling = self.get_previous_filtered_sibling(**filters)
public_prev_sib = prev_sibling.publisher_public if prev_sibling else None
if not self.publisher_public_id: # first time published
# is there anybody on left side?
if public_prev_sib:
obj.insert_at(public_prev_sib, position='right', save=False)
else:
if public_parent:
obj.insert_at(public_parent, position='first-child', save=False)
else:
# check if object was moved / structural tree change
prev_public_sibling = obj.get_previous_filtered_sibling()
if self.level != obj.level or \
public_parent != obj.parent or \
public_prev_sib != prev_public_sibling:
if public_prev_sib:
obj.move_to(public_prev_sib, position="right")
elif public_parent:
# move as a first child to parent
obj.move_to(public_parent, position='first-child')
else:
# it is a move from the right side or just save
next_sibling = self.get_next_filtered_sibling(**filters)
if next_sibling and next_sibling.publisher_public_id:
obj.move_to(next_sibling.publisher_public, position="left")
return obj
def rescan_placeholders(self):
"""
Rescan and if necessary create placeholders in the current template.
"""
# inline import to prevent circular imports
from cms.utils.plugins import get_placeholders
placeholders = get_placeholders(self.get_template())
found = {}
for placeholder in self.placeholders.all():
if placeholder.slot in placeholders:
found[placeholder.slot] = placeholder
for placeholder_name in placeholders:
if not placeholder_name in found:
placeholder = Placeholder.objects.create(slot=placeholder_name)
self.placeholders.add(placeholder)
found[placeholder_name] = placeholder
def _reversion():
exclude_fields = ['publisher_is_draft', 'publisher_public', 'publisher_state']
reversion_register(
Page,
follow=["title_set", "placeholders", "pagepermission_set"],
exclude_fields=exclude_fields
)
_reversion()
|
|
"""Common IO api utilities"""
import bz2
from collections import abc
import dataclasses
import gzip
from io import BufferedIOBase, BytesIO, RawIOBase, StringIO, TextIOWrapper
import mmap
import os
from typing import IO, Any, AnyStr, Dict, List, Mapping, Optional, Tuple, Union, cast
from urllib.parse import (
urljoin,
urlparse as parse_url,
uses_netloc,
uses_params,
uses_relative,
)
import warnings
import zipfile
from pandas._typing import (
Buffer,
CompressionDict,
CompressionOptions,
FileOrBuffer,
FilePathOrBuffer,
StorageOptions,
)
from pandas.compat import get_lzma_file, import_lzma
from pandas.compat._optional import import_optional_dependency
from pandas.core.dtypes.common import is_file_like
lzma = import_lzma()
_VALID_URLS = set(uses_relative + uses_netloc + uses_params)
_VALID_URLS.discard("")
@dataclasses.dataclass
class IOArgs:
"""
Return value of io/common.py:_get_filepath_or_buffer.
Note (copy&past from io/parsers):
filepath_or_buffer can be Union[FilePathOrBuffer, s3fs.S3File, gcsfs.GCSFile]
though mypy handling of conditional imports is difficult.
See https://github.com/python/mypy/issues/1297
"""
filepath_or_buffer: FileOrBuffer
encoding: str
mode: str
compression: CompressionDict
should_close: bool = False
@dataclasses.dataclass
class IOHandles:
"""
Return value of io/common.py:get_handle
Can be used as a context manager.
This is used to easily close created buffers and to handle corner cases when
TextIOWrapper is inserted.
handle: The file handle to be used.
created_handles: All file handles that are created by get_handle
is_wrapped: Whether a TextIOWrapper needs to be detached.
"""
handle: Buffer
compression: CompressionDict
created_handles: List[Buffer] = dataclasses.field(default_factory=list)
is_wrapped: bool = False
is_mmap: bool = False
def close(self) -> None:
"""
Close all created buffers.
Note: If a TextIOWrapper was inserted, it is flushed and detached to
avoid closing the potentially user-created buffer.
"""
if self.is_wrapped:
assert isinstance(self.handle, TextIOWrapper)
self.handle.flush()
self.handle.detach()
self.created_handles.remove(self.handle)
try:
for handle in self.created_handles:
handle.close()
except (OSError, ValueError):
pass
self.created_handles = []
self.is_wrapped = False
def __enter__(self) -> "IOHandles":
return self
def __exit__(self, *args: Any) -> None:
self.close()
def is_url(url) -> bool:
"""
Check to see if a URL has a valid protocol.
Parameters
----------
url : str or unicode
Returns
-------
isurl : bool
If `url` has a valid protocol return True otherwise False.
"""
if not isinstance(url, str):
return False
return parse_url(url).scheme in _VALID_URLS
def _expand_user(filepath_or_buffer: FileOrBuffer[AnyStr]) -> FileOrBuffer[AnyStr]:
"""
Return the argument with an initial component of ~ or ~user
replaced by that user's home directory.
Parameters
----------
filepath_or_buffer : object to be converted if possible
Returns
-------
expanded_filepath_or_buffer : an expanded filepath or the
input if not expandable
"""
if isinstance(filepath_or_buffer, str):
return os.path.expanduser(filepath_or_buffer)
return filepath_or_buffer
def validate_header_arg(header) -> None:
if isinstance(header, bool):
raise TypeError(
"Passing a bool to header is invalid. Use header=None for no header or "
"header=int or list-like of ints to specify "
"the row(s) making up the column names"
)
def stringify_path(
filepath_or_buffer: FilePathOrBuffer[AnyStr],
convert_file_like: bool = False,
) -> FileOrBuffer[AnyStr]:
"""
Attempt to convert a path-like object to a string.
Parameters
----------
filepath_or_buffer : object to be converted
Returns
-------
str_filepath_or_buffer : maybe a string version of the object
Notes
-----
Objects supporting the fspath protocol (python 3.6+) are coerced
according to its __fspath__ method.
Any other object is passed through unchanged, which includes bytes,
strings, buffers, or anything else that's not even path-like.
"""
if not convert_file_like and is_file_like(filepath_or_buffer):
# GH 38125: some fsspec objects implement os.PathLike but have already opened a
# file. This prevents opening the file a second time. infer_compression calls
# this function with convert_file_like=True to infer the compression.
return cast(FileOrBuffer[AnyStr], filepath_or_buffer)
if isinstance(filepath_or_buffer, os.PathLike):
filepath_or_buffer = filepath_or_buffer.__fspath__()
return _expand_user(filepath_or_buffer)
def urlopen(*args, **kwargs):
"""
Lazy-import wrapper for stdlib urlopen, as that imports a big chunk of
the stdlib.
"""
import urllib.request
return urllib.request.urlopen(*args, **kwargs)
def is_fsspec_url(url: FilePathOrBuffer) -> bool:
"""
Returns true if the given URL looks like
something fsspec can handle
"""
return (
isinstance(url, str)
and "://" in url
and not url.startswith(("http://", "https://"))
)
def _get_filepath_or_buffer(
filepath_or_buffer: FilePathOrBuffer,
encoding: str = "utf-8",
compression: CompressionOptions = None,
mode: str = "r",
storage_options: StorageOptions = None,
) -> IOArgs:
"""
If the filepath_or_buffer is a url, translate and return the buffer.
Otherwise passthrough.
Parameters
----------
filepath_or_buffer : a url, filepath (str, py.path.local or pathlib.Path),
or buffer
compression : {{'gzip', 'bz2', 'zip', 'xz', None}}, optional
encoding : the encoding to use to decode bytes, default is 'utf-8'
mode : str, optional
storage_options : dict, optional
Extra options that make sense for a particular storage connection, e.g.
host, port, username, password, etc., if using a URL that will
be parsed by ``fsspec``, e.g., starting "s3://", "gcs://". An error
will be raised if providing this argument with a local path or
a file-like buffer. See the fsspec and backend storage implementation
docs for the set of allowed keys and values
.. versionadded:: 1.2.0
..versionchange:: 1.2.0
Returns the dataclass IOArgs.
"""
filepath_or_buffer = stringify_path(filepath_or_buffer)
# handle compression dict
compression_method, compression = get_compression_method(compression)
compression_method = infer_compression(filepath_or_buffer, compression_method)
# GH21227 internal compression is not used for non-binary handles.
if compression_method and hasattr(filepath_or_buffer, "write") and "b" not in mode:
warnings.warn(
"compression has no effect when passing a non-binary object as input.",
RuntimeWarning,
stacklevel=2,
)
compression_method = None
compression = dict(compression, method=compression_method)
# uniform encoding names
if encoding is not None:
encoding = encoding.replace("_", "-").lower()
# bz2 and xz do not write the byte order mark for utf-16 and utf-32
# print a warning when writing such files
if (
"w" in mode
and compression_method in ["bz2", "xz"]
and encoding in ["utf-16", "utf-32"]
):
warnings.warn(
f"{compression} will not write the byte order mark for {encoding}",
UnicodeWarning,
)
# Use binary mode when converting path-like objects to file-like objects (fsspec)
# except when text mode is explicitly requested. The original mode is returned if
# fsspec is not used.
fsspec_mode = mode
if "t" not in fsspec_mode and "b" not in fsspec_mode:
fsspec_mode += "b"
if isinstance(filepath_or_buffer, str) and is_url(filepath_or_buffer):
# TODO: fsspec can also handle HTTP via requests, but leaving this
# unchanged. using fsspec appears to break the ability to infer if the
# server responded with gzipped data
storage_options = storage_options or {}
# waiting until now for importing to match intended lazy logic of
# urlopen function defined elsewhere in this module
import urllib.request
# assuming storage_options is to be interpretted as headers
req_info = urllib.request.Request(filepath_or_buffer, headers=storage_options)
req = urlopen(req_info)
content_encoding = req.headers.get("Content-Encoding", None)
if content_encoding == "gzip":
# Override compression based on Content-Encoding header
compression = {"method": "gzip"}
reader = BytesIO(req.read())
req.close()
return IOArgs(
filepath_or_buffer=reader,
encoding=encoding,
compression=compression,
should_close=True,
mode=fsspec_mode,
)
if is_fsspec_url(filepath_or_buffer):
assert isinstance(
filepath_or_buffer, str
) # just to appease mypy for this branch
# two special-case s3-like protocols; these have special meaning in Hadoop,
# but are equivalent to just "s3" from fsspec's point of view
# cc #11071
if filepath_or_buffer.startswith("s3a://"):
filepath_or_buffer = filepath_or_buffer.replace("s3a://", "s3://")
if filepath_or_buffer.startswith("s3n://"):
filepath_or_buffer = filepath_or_buffer.replace("s3n://", "s3://")
fsspec = import_optional_dependency("fsspec")
# If botocore is installed we fallback to reading with anon=True
# to allow reads from public buckets
err_types_to_retry_with_anon: List[Any] = []
try:
import_optional_dependency("botocore")
from botocore.exceptions import ClientError, NoCredentialsError
err_types_to_retry_with_anon = [
ClientError,
NoCredentialsError,
PermissionError,
]
except ImportError:
pass
try:
file_obj = fsspec.open(
filepath_or_buffer, mode=fsspec_mode, **(storage_options or {})
).open()
# GH 34626 Reads from Public Buckets without Credentials needs anon=True
except tuple(err_types_to_retry_with_anon):
if storage_options is None:
storage_options = {"anon": True}
else:
# don't mutate user input.
storage_options = dict(storage_options)
storage_options["anon"] = True
file_obj = fsspec.open(
filepath_or_buffer, mode=fsspec_mode, **(storage_options or {})
).open()
return IOArgs(
filepath_or_buffer=file_obj,
encoding=encoding,
compression=compression,
should_close=True,
mode=fsspec_mode,
)
elif storage_options:
raise ValueError(
"storage_options passed with file object or non-fsspec file path"
)
if isinstance(filepath_or_buffer, (str, bytes, mmap.mmap)):
return IOArgs(
filepath_or_buffer=_expand_user(filepath_or_buffer),
encoding=encoding,
compression=compression,
should_close=False,
mode=mode,
)
if not is_file_like(filepath_or_buffer):
msg = f"Invalid file path or buffer object type: {type(filepath_or_buffer)}"
raise ValueError(msg)
return IOArgs(
filepath_or_buffer=filepath_or_buffer,
encoding=encoding,
compression=compression,
should_close=False,
mode=mode,
)
def file_path_to_url(path: str) -> str:
"""
converts an absolute native path to a FILE URL.
Parameters
----------
path : a path in native format
Returns
-------
a valid FILE URL
"""
# lazify expensive import (~30ms)
from urllib.request import pathname2url
return urljoin("file:", pathname2url(path))
_compression_to_extension = {"gzip": ".gz", "bz2": ".bz2", "zip": ".zip", "xz": ".xz"}
def get_compression_method(
compression: CompressionOptions,
) -> Tuple[Optional[str], CompressionDict]:
"""
Simplifies a compression argument to a compression method string and
a mapping containing additional arguments.
Parameters
----------
compression : str or mapping
If string, specifies the compression method. If mapping, value at key
'method' specifies compression method.
Returns
-------
tuple of ({compression method}, Optional[str]
{compression arguments}, Dict[str, Any])
Raises
------
ValueError on mapping missing 'method' key
"""
compression_method: Optional[str]
if isinstance(compression, Mapping):
compression_args = dict(compression)
try:
compression_method = compression_args.pop("method")
except KeyError as err:
raise ValueError("If mapping, compression must have key 'method'") from err
else:
compression_args = {}
compression_method = compression
return compression_method, compression_args
def infer_compression(
filepath_or_buffer: FilePathOrBuffer, compression: Optional[str]
) -> Optional[str]:
"""
Get the compression method for filepath_or_buffer. If compression='infer',
the inferred compression method is returned. Otherwise, the input
compression method is returned unchanged, unless it's invalid, in which
case an error is raised.
Parameters
----------
filepath_or_buffer : str or file handle
File path or object.
compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}
If 'infer' and `filepath_or_buffer` is path-like, then detect
compression from the following extensions: '.gz', '.bz2', '.zip',
or '.xz' (otherwise no compression).
Returns
-------
string or None
Raises
------
ValueError on invalid compression specified.
"""
if compression is None:
return None
# Infer compression
if compression == "infer":
# Convert all path types (e.g. pathlib.Path) to strings
filepath_or_buffer = stringify_path(filepath_or_buffer, convert_file_like=True)
if not isinstance(filepath_or_buffer, str):
# Cannot infer compression of a buffer, assume no compression
return None
# Infer compression from the filename/URL extension
for compression, extension in _compression_to_extension.items():
if filepath_or_buffer.lower().endswith(extension):
return compression
return None
# Compression has been specified. Check that it's valid
if compression in _compression_to_extension:
return compression
msg = f"Unrecognized compression type: {compression}"
valid = ["infer", None] + sorted(_compression_to_extension)
msg += f"\nValid compression types are {valid}"
raise ValueError(msg)
def get_handle(
path_or_buf: FilePathOrBuffer,
mode: str,
encoding: Optional[str] = None,
compression: CompressionOptions = None,
memory_map: bool = False,
is_text: bool = True,
errors: Optional[str] = None,
storage_options: StorageOptions = None,
) -> IOHandles:
"""
Get file handle for given path/buffer and mode.
Parameters
----------
path_or_buf : str or file handle
File path or object.
mode : str
Mode to open path_or_buf with.
encoding : str or None
Encoding to use.
compression : str or dict, default None
If string, specifies compression mode. If dict, value at key 'method'
specifies compression mode. Compression mode must be one of {'infer',
'gzip', 'bz2', 'zip', 'xz', None}. If compression mode is 'infer'
and `filepath_or_buffer` is path-like, then detect compression from
the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise
no compression). If dict and compression mode is one of
{'zip', 'gzip', 'bz2'}, or inferred as one of the above,
other entries passed as additional compression options.
.. versionchanged:: 1.0.0
May now be a dict with key 'method' as compression mode
and other keys as compression options if compression
mode is 'zip'.
.. versionchanged:: 1.1.0
Passing compression options as keys in dict is now
supported for compression modes 'gzip' and 'bz2' as well as 'zip'.
memory_map : boolean, default False
See parsers._parser_params for more information.
is_text : boolean, default True
Whether the type of the content passed to the file/buffer is string or
bytes. This is not the same as `"b" not in mode`. If a string content is
passed to a binary file/buffer, a wrapper is inserted.
errors : str, default 'strict'
Specifies how encoding and decoding errors are to be handled.
See the errors argument for :func:`open` for a full list
of options.
storage_options: StorageOptions = None
Passed to _get_filepath_or_buffer
.. versionchanged:: 1.2.0
Returns the dataclass IOHandles
"""
# Windows does not default to utf-8. Set to utf-8 for a consistent behavior
if encoding is None:
encoding = "utf-8"
# read_csv does not know whether the buffer is opened in binary/text mode
if _is_binary_mode(path_or_buf, mode) and "b" not in mode:
mode += "b"
# open URLs
ioargs = _get_filepath_or_buffer(
path_or_buf,
encoding=encoding,
compression=compression,
mode=mode,
storage_options=storage_options,
)
handle = ioargs.filepath_or_buffer
handles: List[Buffer]
# memory mapping needs to be the first step
handle, memory_map, handles = _maybe_memory_map(
handle, memory_map, ioargs.encoding, ioargs.mode, errors
)
is_path = isinstance(handle, str)
compression_args = dict(ioargs.compression)
compression = compression_args.pop("method")
if compression:
# compression libraries do not like an explicit text-mode
ioargs.mode = ioargs.mode.replace("t", "")
# GZ Compression
if compression == "gzip":
if is_path:
assert isinstance(handle, str)
handle = gzip.GzipFile(
filename=handle,
mode=ioargs.mode,
**compression_args,
)
else:
handle = gzip.GzipFile(
fileobj=handle, # type: ignore[arg-type]
mode=ioargs.mode,
**compression_args,
)
# BZ Compression
elif compression == "bz2":
handle = bz2.BZ2File(
handle, # type: ignore[arg-type]
mode=ioargs.mode,
**compression_args,
)
# ZIP Compression
elif compression == "zip":
handle = _BytesZipFile(handle, ioargs.mode, **compression_args)
if handle.mode == "r":
handles.append(handle)
zip_names = handle.namelist()
if len(zip_names) == 1:
handle = handle.open(zip_names.pop())
elif len(zip_names) == 0:
raise ValueError(f"Zero files found in ZIP file {path_or_buf}")
else:
raise ValueError(
"Multiple files found in ZIP file. "
f"Only one file per ZIP: {zip_names}"
)
# XZ Compression
elif compression == "xz":
handle = get_lzma_file(lzma)(handle, ioargs.mode)
# Unrecognized Compression
else:
msg = f"Unrecognized compression type: {compression}"
raise ValueError(msg)
assert not isinstance(handle, str)
handles.append(handle)
elif isinstance(handle, str):
# Check whether the filename is to be opened in binary mode.
# Binary mode does not support 'encoding' and 'newline'.
if ioargs.encoding and "b" not in ioargs.mode:
# Encoding
handle = open(
handle,
ioargs.mode,
encoding=ioargs.encoding,
errors=errors,
newline="",
)
else:
# Binary mode
handle = open(handle, ioargs.mode)
handles.append(handle)
# Convert BytesIO or file objects passed with an encoding
is_wrapped = False
if is_text and (compression or _is_binary_mode(handle, ioargs.mode)):
handle = TextIOWrapper(
handle, # type: ignore[arg-type]
encoding=ioargs.encoding,
errors=errors,
newline="",
)
handles.append(handle)
# only marked as wrapped when the caller provided a handle
is_wrapped = not (
isinstance(ioargs.filepath_or_buffer, str) or ioargs.should_close
)
handles.reverse() # close the most recently added buffer first
if ioargs.should_close:
assert not isinstance(ioargs.filepath_or_buffer, str)
handles.append(ioargs.filepath_or_buffer)
assert not isinstance(handle, str)
return IOHandles(
handle=handle,
created_handles=handles,
is_wrapped=is_wrapped,
is_mmap=memory_map,
compression=ioargs.compression,
)
# error: Definition of "__exit__" in base class "ZipFile" is incompatible with
# definition in base class "BytesIO" [misc]
# error: Definition of "__enter__" in base class "ZipFile" is incompatible with
# definition in base class "BytesIO" [misc]
# error: Definition of "__enter__" in base class "ZipFile" is incompatible with
# definition in base class "BinaryIO" [misc]
# error: Definition of "__enter__" in base class "ZipFile" is incompatible with
# definition in base class "IO" [misc]
# error: Definition of "read" in base class "ZipFile" is incompatible with
# definition in base class "BytesIO" [misc]
# error: Definition of "read" in base class "ZipFile" is incompatible with
# definition in base class "IO" [misc]
class _BytesZipFile(zipfile.ZipFile, BytesIO): # type: ignore[misc]
"""
Wrapper for standard library class ZipFile and allow the returned file-like
handle to accept byte strings via `write` method.
BytesIO provides attributes of file-like object and ZipFile.writestr writes
bytes strings into a member of the archive.
"""
# GH 17778
def __init__(
self,
file: FilePathOrBuffer,
mode: str,
archive_name: Optional[str] = None,
**kwargs,
):
mode = mode.replace("b", "")
self.archive_name = archive_name
self.multiple_write_buffer: Optional[Union[StringIO, BytesIO]] = None
kwargs_zip: Dict[str, Any] = {"compression": zipfile.ZIP_DEFLATED}
kwargs_zip.update(kwargs)
super().__init__(file, mode, **kwargs_zip) # type: ignore[arg-type]
def write(self, data):
# buffer multiple write calls, write on flush
if self.multiple_write_buffer is None:
self.multiple_write_buffer = (
BytesIO() if isinstance(data, bytes) else StringIO()
)
self.multiple_write_buffer.write(data)
def flush(self) -> None:
# write to actual handle and close write buffer
if self.multiple_write_buffer is None or self.multiple_write_buffer.closed:
return
# ZipFile needs a non-empty string
archive_name = self.archive_name or self.filename or "zip"
with self.multiple_write_buffer:
super().writestr(archive_name, self.multiple_write_buffer.getvalue())
def close(self):
self.flush()
super().close()
@property
def closed(self):
return self.fp is None
class _MMapWrapper(abc.Iterator):
"""
Wrapper for the Python's mmap class so that it can be properly read in
by Python's csv.reader class.
Parameters
----------
f : file object
File object to be mapped onto memory. Must support the 'fileno'
method or have an equivalent attribute
"""
def __init__(self, f: IO):
self.attributes = {}
for attribute in ("seekable", "readable", "writeable"):
if not hasattr(f, attribute):
continue
self.attributes[attribute] = getattr(f, attribute)()
self.mmap = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
def __getattr__(self, name: str):
if name in self.attributes:
return lambda: self.attributes[name]
return getattr(self.mmap, name)
def __iter__(self) -> "_MMapWrapper":
return self
def __next__(self) -> str:
newbytes = self.mmap.readline()
# readline returns bytes, not str, but Python's CSV reader
# expects str, so convert the output to str before continuing
newline = newbytes.decode("utf-8")
# mmap doesn't raise if reading past the allocated
# data but instead returns an empty string, so raise
# if that is returned
if newline == "":
raise StopIteration
return newline
def _maybe_memory_map(
handle: FileOrBuffer,
memory_map: bool,
encoding: str,
mode: str,
errors: Optional[str],
) -> Tuple[FileOrBuffer, bool, List[Buffer]]:
"""Try to memory map file/buffer."""
handles: List[Buffer] = []
memory_map &= hasattr(handle, "fileno") or isinstance(handle, str)
if not memory_map:
return handle, memory_map, handles
# need to open the file first
if isinstance(handle, str):
if encoding and "b" not in mode:
# Encoding
handle = open(handle, mode, encoding=encoding, errors=errors, newline="")
else:
# Binary mode
handle = open(handle, mode)
handles.append(handle)
try:
wrapped = cast(mmap.mmap, _MMapWrapper(handle)) # type: ignore[arg-type]
handle.close()
handles.remove(handle)
handles.append(wrapped)
handle = wrapped
except Exception:
# we catch any errors that may have occurred
# because that is consistent with the lower-level
# functionality of the C engine (pd.read_csv), so
# leave the file handler as is then
memory_map = False
return handle, memory_map, handles
def file_exists(filepath_or_buffer: FilePathOrBuffer) -> bool:
"""Test whether file exists."""
exists = False
filepath_or_buffer = stringify_path(filepath_or_buffer)
if not isinstance(filepath_or_buffer, str):
return exists
try:
exists = os.path.exists(filepath_or_buffer)
# gh-5874: if the filepath is too long will raise here
except (TypeError, ValueError):
pass
return exists
def _is_binary_mode(handle: FilePathOrBuffer, mode: str) -> bool:
"""Whether the handle is opened in binary mode"""
# classes that expect bytes
binary_classes = [BufferedIOBase, RawIOBase]
return isinstance(handle, tuple(binary_classes)) or "b" in getattr(
handle, "mode", mode
)
|
|
"""The basic dict based notebook format.
The Python representation of a notebook is a nested structure of
dictionary subclasses that support attribute access
(ipython_genutils.ipstruct.Struct). The functions in this module are merely
helpers to build the structs in the right form.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import pprint
import uuid
from ipython_genutils.ipstruct import Struct
from ipython_genutils.py3compat import cast_unicode
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
# Change this when incrementing the nbformat version
nbformat = 3
nbformat_minor = 0
nbformat_schema = {
(3, 0): 'nbformat.v3.schema.json'
}
class NotebookNode(Struct):
pass
def from_dict(d):
if isinstance(d, dict):
newd = NotebookNode()
for k,v in d.items():
newd[k] = from_dict(v)
return newd
elif isinstance(d, (tuple, list)):
return [from_dict(i) for i in d]
else:
return d
def new_output(output_type, output_text=None, output_png=None,
output_html=None, output_svg=None, output_latex=None, output_json=None,
output_javascript=None, output_jpeg=None, prompt_number=None,
ename=None, evalue=None, traceback=None, stream=None, metadata=None):
"""Create a new output, to go in the ``cell.outputs`` list of a code cell.
"""
output = NotebookNode()
output.output_type = str(output_type)
if metadata is None:
metadata = {}
if not isinstance(metadata, dict):
raise TypeError("metadata must be dict")
if output_type in {u'pyout', 'display_data'}:
output.metadata = metadata
if output_type != 'pyerr':
if output_text is not None:
output.text = cast_unicode(output_text)
if output_png is not None:
output.png = cast_unicode(output_png)
if output_jpeg is not None:
output.jpeg = cast_unicode(output_jpeg)
if output_html is not None:
output.html = cast_unicode(output_html)
if output_svg is not None:
output.svg = cast_unicode(output_svg)
if output_latex is not None:
output.latex = cast_unicode(output_latex)
if output_json is not None:
output.json = cast_unicode(output_json)
if output_javascript is not None:
output.javascript = cast_unicode(output_javascript)
if output_type == u'pyout':
if prompt_number is not None:
output.prompt_number = int(prompt_number)
if output_type == u'pyerr':
if ename is not None:
output.ename = cast_unicode(ename)
if evalue is not None:
output.evalue = cast_unicode(evalue)
if traceback is not None:
output.traceback = [cast_unicode(frame) for frame in list(traceback)]
if output_type == u'stream':
output.stream = 'stdout' if stream is None else cast_unicode(stream)
return output
def new_code_cell(input=None, prompt_number=None, outputs=None,
language=u'python', collapsed=False, metadata=None):
"""Create a new code cell with input and output"""
cell = NotebookNode()
cell.cell_type = u'code'
if language is not None:
cell.language = cast_unicode(language)
if input is not None:
cell.input = cast_unicode(input)
if prompt_number is not None:
cell.prompt_number = int(prompt_number)
if outputs is None:
cell.outputs = []
else:
cell.outputs = outputs
if collapsed is not None:
cell.collapsed = bool(collapsed)
cell.metadata = NotebookNode(metadata or {})
return cell
def new_text_cell(cell_type, source=None, rendered=None, metadata=None):
"""Create a new text cell."""
cell = NotebookNode()
# VERSIONHACK: plaintext -> raw
# handle never-released plaintext name for raw cells
if cell_type == 'plaintext':
cell_type = 'raw'
if source is not None:
cell.source = cast_unicode(source)
cell.metadata = NotebookNode(metadata or {})
cell.cell_type = cell_type
return cell
def new_heading_cell(source=None, level=1, rendered=None, metadata=None):
"""Create a new section cell with a given integer level."""
cell = NotebookNode()
cell.cell_type = u'heading'
if source is not None:
cell.source = cast_unicode(source)
cell.level = int(level)
cell.metadata = NotebookNode(metadata or {})
return cell
def new_worksheet(name=None, cells=None, metadata=None):
"""Create a worksheet by name with with a list of cells."""
ws = NotebookNode()
if cells is None:
ws.cells = []
else:
ws.cells = list(cells)
ws.metadata = NotebookNode(metadata or {})
return ws
def new_notebook(name=None, metadata=None, worksheets=None):
"""Create a notebook by name, id and a list of worksheets."""
nb = NotebookNode()
nb.nbformat = nbformat
nb.nbformat_minor = nbformat_minor
if worksheets is None:
nb.worksheets = []
else:
nb.worksheets = list(worksheets)
if metadata is None:
nb.metadata = new_metadata()
else:
nb.metadata = NotebookNode(metadata)
if name is not None:
nb.metadata.name = cast_unicode(name)
return nb
def new_metadata(name=None, authors=None, license=None, created=None,
modified=None, gistid=None):
"""Create a new metadata node."""
metadata = NotebookNode()
if name is not None:
metadata.name = cast_unicode(name)
if authors is not None:
metadata.authors = list(authors)
if created is not None:
metadata.created = cast_unicode(created)
if modified is not None:
metadata.modified = cast_unicode(modified)
if license is not None:
metadata.license = cast_unicode(license)
if gistid is not None:
metadata.gistid = cast_unicode(gistid)
return metadata
def new_author(name=None, email=None, affiliation=None, url=None):
"""Create a new author."""
author = NotebookNode()
if name is not None:
author.name = cast_unicode(name)
if email is not None:
author.email = cast_unicode(email)
if affiliation is not None:
author.affiliation = cast_unicode(affiliation)
if url is not None:
author.url = cast_unicode(url)
return author
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core import urlresolvers
from django.http import Http404 # noqa
from django.template.defaultfilters import title # noqa
from django.utils.http import urlencode # noqa
from django.utils.translation import ugettext_lazy as _
from horizon import messages
from horizon import tables
from horizon.utils import filters
from heatclient import exc
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.stacks import mappings
class LaunchStack(tables.LinkAction):
name = "launch"
verbose_name = _("Launch Stack")
url = "horizon:project:stacks:select_template"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("orchestration", "cloudformation:CreateStack"),)
class ChangeStackTemplate(tables.LinkAction):
name = "edit"
verbose_name = _("Change Stack Template")
url = "horizon:project:stacks:change_template"
classes = ("ajax-modal",)
icon = "pencil"
def get_link_url(self, stack):
return urlresolvers.reverse(self.url, args=[stack.id])
class DeleteStack(tables.DeleteAction):
data_type_singular = _("Stack")
data_type_plural = _("Stacks")
policy_rules = (("orchestration", "cloudformation:DeleteStack"),)
def delete(self, request, stack_id):
api.heat.stack_delete(request, stack_id)
def allowed(self, request, stack):
if stack is not None:
return stack.stack_status != 'DELETE_COMPLETE'
return True
class StacksUpdateRow(tables.Row):
ajax = True
def can_be_selected(self, datum):
return datum.stack_status != 'DELETE_COMPLETE'
def get_data(self, request, stack_id):
try:
return api.heat.stack_get(request, stack_id)
except exc.HTTPNotFound:
# returning 404 to the ajax call removes the
# row from the table on the ui
raise Http404
except Exception as e:
messages.error(request, e)
class StacksTable(tables.DataTable):
STATUS_CHOICES = (
("Complete", True),
("Failed", False),
)
name = tables.Column("stack_name",
verbose_name=_("Stack Name"),
link="horizon:project:stacks:detail",)
created = tables.Column("creation_time",
verbose_name=_("Created"),
filters=(filters.parse_isotime,
filters.timesince_or_never))
updated = tables.Column("updated_time",
verbose_name=_("Updated"),
filters=(filters.parse_isotime,
filters.timesince_or_never))
status = tables.Column("status",
filters=(title, filters.replace_underscores),
verbose_name=_("Status"),
status=True,
status_choices=STATUS_CHOICES)
def get_object_display(self, stack):
return stack.stack_name
class Meta:
name = "stacks"
verbose_name = _("Stacks")
pagination_param = 'stack_marker'
status_columns = ["status", ]
row_class = StacksUpdateRow
table_actions = (LaunchStack, DeleteStack,)
row_actions = (DeleteStack,
ChangeStackTemplate)
def get_resource_url(obj):
return urlresolvers.reverse('horizon:project:stacks:resource',
args=(obj.stack_id, obj.resource_name))
class EventsTable(tables.DataTable):
logical_resource = tables.Column('resource_name',
verbose_name=_("Stack Resource"),
link=get_resource_url)
physical_resource = tables.Column('physical_resource_id',
verbose_name=_("Resource"),
link=mappings.resource_to_url)
timestamp = tables.Column('event_time',
verbose_name=_("Time Since Event"),
filters=(filters.parse_isotime,
filters.timesince_or_never))
status = tables.Column("resource_status",
filters=(title, filters.replace_underscores),
verbose_name=_("Status"),)
statusreason = tables.Column("resource_status_reason",
verbose_name=_("Status Reason"),)
class Meta:
name = "events"
verbose_name = _("Stack Events")
class ResourcesUpdateRow(tables.Row):
ajax = True
def get_data(self, request, resource_name):
try:
stack = self.table.stack
stack_identifier = '%s/%s' % (stack.stack_name, stack.id)
return api.heat.resource_get(
request, stack_identifier, resource_name)
except exc.HTTPNotFound:
# returning 404 to the ajax call removes the
# row from the table on the ui
raise Http404
except Exception as e:
messages.error(request, e)
class ResourcesTable(tables.DataTable):
STATUS_CHOICES = (
("Create Complete", True),
("Create Failed", False),
)
logical_resource = tables.Column('resource_name',
verbose_name=_("Stack Resource"),
link=get_resource_url)
physical_resource = tables.Column('physical_resource_id',
verbose_name=_("Resource"),
link=mappings.resource_to_url)
resource_type = tables.Column("resource_type",
verbose_name=_("Stack Resource Type"),)
updated_time = tables.Column('updated_time',
verbose_name=_("Date Updated"),
filters=(filters.parse_isotime,
filters.timesince_or_never))
status = tables.Column("resource_status",
filters=(title, filters.replace_underscores),
verbose_name=_("Status"),
status=True,
status_choices=STATUS_CHOICES)
statusreason = tables.Column("resource_status_reason",
verbose_name=_("Status Reason"),)
def __init__(self, request, data=None,
needs_form_wrapper=None, **kwargs):
super(ResourcesTable, self).__init__(
request, data, needs_form_wrapper, **kwargs)
self.stack = kwargs['stack']
def get_object_id(self, datum):
return datum.resource_name
class Meta:
name = "resources"
verbose_name = _("Stack Resources")
status_columns = ["status", ]
row_class = ResourcesUpdateRow
|
|
# Bojan Nikolic <b.nikolic@mrao.cam.ac.uk>
"""Simulate data observed by an interferometer
USEFUL NOTES
---------------------------------------------------------------------------------
From http://casa.nrao.edu/Memos/CoordConvention.pdf :
UVW is a right-handed coordinate system, with W pointing towards the
source, and a baseline convention of :math:`ant2 - ant1` where
:math:`index(ant1) < index(ant2)`. Consider an XYZ Celestial
coordinate system centered at the location of the interferometer, with
:math:`X` towards the East, :math:`Z` towards the NCP and :math:`Y` to
complete a right-handed system. The UVW coordinate system is then
defined by the hour-angle and declination of the phase-reference
direction such that
1. when the direction of observation is the NCP (`ha=0,dec=90`),
the UVW coordinates are aligned with XYZ,
2. V, W and the NCP are always on a Great circle,
3. when W is on the local meridian, U points East
4. when the direction of observation is at zero declination, an
hour-angle of -6 hours makes W point due East.
The :math:`(l,m,n)` coordinates are parallel to :math:`(u,v,w)` such
that :math:`l` increases with Right-Ascension (or increasing longitude
coordinate), :math:`m` increases with Declination, and :math:`n` is
towards the source. With this convention, images will have Right
Ascension increasing from Right to Left, and Declination increasing
from Bottom to Top.
Changes
---------------------------------------------------------------------------------
[150323 - AMS] Correction to rot(), introduced -1 factor in v calculation.
Change to bsl(), to agree with casa convention.
"""
import numpy
from astropy.coordinates import SkyCoord, CartesianRepresentation
from astropy import units
# ---------------------------------------------------------------------------------
def xyz_at_latitude(local_xyz, lat):
"""
Rotate local XYZ coordinates into celestial XYZ coordinates. These
coordinate systems are very similar, with X pointing towards the
geographical east in both cases. However, before the rotation Z
points towards the zenith, whereas afterwards it will point towards
celestial north (parallel to the earth axis).
:param lat: target latitude (radians or astropy quantity)
:param local_xyz: Array of local XYZ coordinates
:returns: Celestial XYZ coordinates
"""
x, y, z = numpy.hsplit(local_xyz, 3)
lat2 = numpy.pi / 2 - lat
y2 = -z * numpy.sin(lat2) + y * numpy.cos(lat2)
z2 = z * numpy.cos(lat2) + y * numpy.sin(lat2)
return numpy.hstack([x, y2, z2])
# ---------------------------------------------------------------------------------
def xyz_to_uvw(xyz, ha, dec):
"""
Rotate :math:`(x,y,z)` positions in earth coordinates to
:math:`(u,v,w)` coordinates relative to astronomical source
position :math:`(ha, dec)`. Can be used for both antenna positions
as well as for baselines.
Hour angle and declination can be given as single values or arrays
of the same length. Angles can be given as radians or astropy
quantities with a valid conversion.
:param xyz: :math:`(x,y,z)` co-ordinates of antennas in array
:param ha: hour angle of phase tracking centre (:math:`ha = ra - lst`)
:param dec: declination of phase tracking centre.
"""
x, y, z = numpy.hsplit(xyz, 3)
# Two rotations:
# 1. by 'ha' along the z axis
# 2. by '90-dec' along the u axis
u = x * numpy.cos(ha) - y * numpy.sin(ha)
v0 = x * numpy.sin(ha) + y * numpy.cos(ha)
w = z * numpy.sin(dec) - v0 * numpy.cos(dec)
v = z * numpy.cos(dec) + v0 * numpy.sin(dec)
return numpy.hstack([u, v, w])
# ---------------------------------------------------------------------------------
def uvw_to_xyz(uvw, ha, dec):
"""
Rotate :math:`(x,y,z)` positions relative to a sky position at
:math:`(ha, dec)` to earth coordinates. Can be used for both
antenna positions as well as for baselines.
Hour angle and declination can be given as single values or arrays
of the same length. Angles can be given as radians or astropy
quantities with a valid conversion.
:param uvw: :math:`(u,v,w)` co-ordinates of antennas in array
:param ha: hour angle of phase tracking centre (:math:`ha = ra - lst`)
:param dec: declination of phase tracking centre
"""
u, v, w = numpy.hsplit(uvw, 3)
# Two rotations:
# 1. by 'dec-90' along the u axis
# 2. by '-ha' along the z axis
v0 = v * numpy.sin(dec) - w * numpy.cos(dec)
z = v * numpy.cos(dec) + w * numpy.sin(dec)
x = u * numpy.cos(ha) + v0 * numpy.sin(ha)
y = -u * numpy.sin(ha) + v0 * numpy.cos(ha)
return numpy.hstack([x, y, z])
# ---------------------------------------------------------------------------------
def baselines(ants_uvw):
"""
Compute baselines in uvw co-ordinate system from
uvw co-ordinate system station positions
:param ants_uvw: `(u,v,w)` co-ordinates of antennas in array
"""
res = []
for i in range(ants_uvw.shape[0]):
for j in range(i + 1, ants_uvw.shape[0]):
res.append(ants_uvw[j] - ants_uvw[i])
basel_uvw = numpy.array(res)
return basel_uvw
# ---------------------------------------------------------------------------------
def baseline_ids(ant_count, ha_count=1):
"""
Returns baseline antenna pairs in the order `baselines` and `xyz_to_baselines` generates them.
:param ants: Antenna IDs
:param ha_range: list of hour angle values for astronomical source as function of time
"""
ant1 = []
ant2 = []
for a1 in range(ant_count):
for a2 in range(a1 + 1, ant_count):
ant1.append(a1)
ant2.append(a2)
return numpy.tile(ant1, ha_count), numpy.tile(ant2, ha_count)
# ---------------------------------------------------------------------------------
def xyz_to_baselines(ants_xyz, ha_range, dec):
"""
Calculate baselines in :math:`(u,v,w)` co-ordinate system
for a range of hour angles (i.e. non-snapshot observation)
to create a uvw sampling distribution
:param ants_xyz: :math:`(x,y,z)` co-ordinates of antennas in array
:param ha_range: list of hour angle values for astronomical source as function of time
:param dec: declination of astronomical source [constant, not :math:`f(t)`]
"""
dist_uvw = numpy.concatenate([baselines(xyz_to_uvw(ants_xyz, hax, dec)) for hax in ha_range])
return dist_uvw
# ---------------------------------------------------------------------------------
def skycoord_to_lmn(pos: SkyCoord, phasecentre: SkyCoord):
"""
Convert astropy sky coordinates into the l,m,n coordinate system
relative to a phase centre.
The l,m,n is a RHS coordinate system with
* its origin on the sky sphere
* m,n and the celestial north on the same plane
* l,m a tangential plane of the sky sphere
Note that this means that l increases east-wards
"""
# Determine relative sky position
todc = pos.transform_to(phasecentre.skyoffset_frame())
dc = todc.represent_as(CartesianRepresentation)
# Do coordinate transformation - astropy's relative coordinates do
# not quite follow imaging conventions
return dc.y.value, dc.z.value, dc.x.value-1
# ---------------------------------------------------------------------------------
def simulate_point(dist_uvw, l, m):
"""
Simulate visibilities for unit amplitude point source at
direction cosines (l,m) relative to the phase centre.
This includes phase tracking to the centre of the field (hence the minus 1
in the exponent.)
Note that point source is delta function, therefore the
FT relationship becomes an exponential, evaluated at
(uvw.lmn)
:param dist_uvw: :math:`(u,v,w)` distribution of projected baselines (in wavelengths)
:param l: horizontal direction cosine relative to phase tracking centre
:param m: orthogonal directon cosine relative to phase tracking centre
"""
# vector direction to source
s = numpy.array([l, m, numpy.sqrt(1 - l ** 2 - m ** 2) - 1.0])
# complex valued Visibility data
return numpy.exp(-2j * numpy.pi * numpy.dot(dist_uvw, s))
# ---------------------------------------------------------------------------------
def visibility_rotate(uvw, vis, pc, new_pc):
"""
Rotate phase centre visibilities to the given new phase centre.
:param uvw: :math:`(u,v,w)` distribution of projected baselines (in wavelengths)
:param vis: Input visibilities
:param dl: Horizontal shift distance as directional cosine
:param dm: Vertical shift distance as directional cosine
:returns: New visibilities
"""
# Rotate UVW. We do not bother to calculate a proper hour angle
# here, as the reference point would just cancel out between
# uvw_to_xyz and xyz_to_uvw. Note that RA progresses eastwards
# whereas HA progresses westwards, that's why we need to negate.
xyz = uvw_to_xyz(uvw, -pc.ra.to(units.rad).value, pc.dec.to(units.rad).value)
uvw_rotated = xyz_to_uvw(xyz, -new_pc.ra.to(units.rad).value, new_pc.dec.to(units.rad).value)
# Determine phasor
l_p,m_p,n_p = skycoord_to_lmn(pc, new_pc)
phasor = simulate_point(uvw_rotated, l_p, m_p)
return uvw_rotated, vis * phasor
# ---------------------------------------------------------------------------------
def visibility_shift(uvw, vis, dl, dm):
"""
Shift visibilities by the given image-space distance. This is
based on simple FFT laws. It will require kernels to be suitably
shifted as well to work correctly.
:param uvw: :math:`(u,v,w)` distribution of projected baselines (in wavelengths)
:param vis: Input visibilities
:param dl: Horizontal shift distance as directional cosine
:param dm: Vertical shift distance as directional cosine
:returns: New visibilities
"""
s = numpy.array([dl, dm])
return vis * numpy.exp(-2j * numpy.pi * numpy.dot(uvw[:,0:2], s))
# ---------------------------------------------------------------------------------
def uvw_transform(uvw, T):
"""
Transforms UVW baseline coordinates such that the image is
transformed with the given matrix. Will require kernels to be
suitably transformed to work correctly.
Reference: Sault, R. J., L. Staveley-Smith, and W. N. Brouw. "An
approach to interferometric mosaicing." Astronomy and Astrophysics
Supplement Series 120 (1996): 375-384.
:param uvw: :math:`(u,v,w)` distribution of projected baselines (in wavelengths)
:param T: 2x2 matrix for image transformation
:returns: New baseline coordinates
"""
# Calculate transformation matrix (see Sault)
Tt = numpy.linalg.inv(numpy.transpose(T))
# Apply to uv coordinates
uv1 = numpy.dot(uvw[:,0:2], Tt)
# Restack with original w values
return numpy.hstack([uv1, uvw[:,2:3]])
# ---------------------------------------------------------------------------------
def combine_vis(viss, weights, axis=0):
"""Do a weighted linear combination of two visibility sets
:param viss: Input visibility sets
:param weights: Weights for visibility set
:returns: pair (vis, weight) with new weighted visibilities
"""
viss = numpy.array(viss)
weights = numpy.array(weights)
# Do a weighted sum of visibilities and update error terms
vis = numpy.sum(viss * weights, axis=axis)
weight = numpy.sqrt(numpy.sum(weights**2, axis=axis))
# Weight might be zero
non_zero = weight > 0.0
vis[non_zero] /= numpy.sum(weights, axis=axis)[non_zero]
vis[numpy.logical_not(non_zero)] = 0
return vis, weight
|
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2017-2022 F4PGA Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
""" Output utilities for FASM.
merge_features - Combines multiple FASM SetFasmFeature into one.
merge_and_sort - Groups and sorts FASM lines, useful for non-canonical output.
"""
import enum
from fasm import SetFasmFeature, FasmLine, ValueFormat
def is_only_comment(line):
""" Returns True if line is only a comment. """
return not line.set_feature and not line.annotations and line.comment
def is_only_annotation(line):
""" Returns True if line is only an annotations. """
return not line.set_feature and line.annotations and not line.comment
def is_blank_line(line):
""" Returns True if line is blank. """
return not line.set_feature and not line.annotations and not line.comment
def merge_features(features):
""" Combines features with varying addresses but same feature.
A[0] = 1
A[1] = 1
becomes
A[1:0] = 2'b11
and
A[5] = 1
A[7] = 1
A[7:0] = 8'b10100000
"""
# Ensure all features are for the same feature
assert len(set(feature.feature for feature in features)) == 1
set_bits = set()
cleared_bits = set()
for feature in features:
start = 0
end = 0
value = 1
if feature.start is None:
assert feature.end is None
else:
start = feature.start
if feature.end is not None:
end = feature.end
else:
end = start
if feature.value is not None:
value = feature.value
for bit in range(start, end + 1):
bit_is_set = ((value >> (bit - start)) & 1) != 0
if bit_is_set:
assert bit not in cleared_bits
set_bits.add(bit)
else:
assert bit not in set_bits
cleared_bits.add(bit)
max_bit = max(set_bits | cleared_bits)
final_value = 0
for bit in set_bits:
final_value |= (1 << bit)
return SetFasmFeature(
feature=features[0].feature,
start=0,
end=max_bit,
value=final_value,
value_format=ValueFormat.VERILOG_BINARY)
class MergeModel(object):
""" Groups and merges features.
Grouping logic:
- Consecutive comments will be grouped.
- Comments groups will attach to the next non-comment entry.
- Consecutive annotations will be grouped.
- Empty lines will be discarded
- Features will be grouped by their first feature part.
- Features within the same feature with different addresses will be
merged.
If a feature has a comment in its group, it is not eligable for address
merging.
"""
class State(enum.Enum):
""" State of grouper. """
NoGroup = 1
InCommentGroup = 2
InAnnotationGroup = 3
def __init__(self):
self.state = MergeModel.State.NoGroup
self.groups = []
self.current_group = None
def start_comment_group(self, line):
""" Start a new group of comments.
Requires that input line is a comment and not already in a comment
group.
"""
assert self.state != MergeModel.State.InCommentGroup
assert is_only_comment(line)
if self.current_group is not None:
self.groups.append(self.current_group)
self.state = MergeModel.State.InCommentGroup
self.current_group = [line]
def add_to_comment_group(self, line):
assert self.state == MergeModel.State.InCommentGroup
if is_only_comment(line):
self.current_group.append(line)
elif is_only_annotation(line):
self.current_group.append(line)
self.state = MergeModel.State.InAnnotationGroup
else:
if not is_blank_line(line):
self.current_group.append(line)
self.groups.append(self.current_group)
self.state = MergeModel.State.NoGroup
def start_annotation_group(self, line):
assert self.state != MergeModel.State.InAnnotationGroup
assert is_only_annotation(line)
if self.current_group is not None:
self.groups.append(self.current_group)
self.state = MergeModel.State.InAnnotationGroup
self.current_group = [line]
def add_to_annotation_group(self, line):
assert self.state == MergeModel.State.InAnnotationGroup
if is_only_comment(line):
self.start_comment_group(line)
elif is_only_annotation(line):
self.current_group.append(line)
self.state = MergeModel.State.InAnnotationGroup
else:
self.groups.append(self.current_group)
self.current_group = None
self.state = MergeModel.State.NoGroup
self.add_to_model(line)
def add_to_model(self, line):
""" Add a line to the MergeModel.
Will be grouped per MergeModel rules.
This method is stateful, so order of insert matters, per grouping
rules.
"""
if self.state == MergeModel.State.NoGroup:
if is_only_comment(line):
self.start_comment_group(line)
elif is_only_annotation(line):
self.start_annotation_group(line)
else:
if not is_blank_line(line):
self.groups.append([line])
elif self.state == MergeModel.State.InCommentGroup:
self.add_to_comment_group(line)
elif self.state == MergeModel.State.InAnnotationGroup:
self.add_to_annotation_group(line)
else:
assert False
def merge_addresses(self):
""" Merges address features when possible.
Call after all lines have been added to the model.
"""
for group in self.groups:
for line in group:
assert not is_blank_line(line)
def find_eligable_feature(group):
if len(group) > 1:
return None
if group[0].annotations:
return None
if group[0].comment:
return None
return group[0].set_feature
eligable_address_features = {}
non_eligable_groups = []
non_eligable_features = set()
for group in self.groups:
feature = find_eligable_feature(group)
if feature is None:
non_eligable_groups.append(group)
for line in group:
if line.set_feature is not None:
non_eligable_features.add(line.set_feature.feature)
else:
if feature.feature not in eligable_address_features:
eligable_address_features[feature.feature] = []
eligable_address_features[feature.feature].append(feature)
self.groups = non_eligable_groups
for feature_group in eligable_address_features.values():
if feature_group[0].feature in non_eligable_features:
for feature in feature_group:
self.groups.append(
[
FasmLine(
set_feature=feature,
annotations=None,
comment=None)
])
else:
if len(feature_group) > 1:
self.groups.append(
[
FasmLine(
set_feature=merge_features(feature_group),
annotations=None,
comment=None)
])
else:
for feature in feature_group:
self.groups.append(
[
FasmLine(
set_feature=feature,
annotations=None,
comment=None)
])
def output_sorted_lines(self, zero_function=None, sort_key=None):
""" Yields sorted FasmLine's.
zero_function - Function that takes a feature string, and returns true
that feature has no bits set. This allows tiles with
only zero features to be dropped.
sort_key - Function that takes a string argument and returns a key
for the first feature part. Example:
"""
feature_groups = {}
non_feature_groups = []
for group in self.groups:
is_feature_group = False
for line in group:
if line.set_feature:
group_id = line.set_feature.feature.split('.')[0]
if group_id not in feature_groups:
feature_groups[group_id] = []
feature_groups[group_id].append(group)
is_feature_group = True
break
if not is_feature_group:
non_feature_groups.append(group)
output_groups = []
def feature_group_key(group):
for line in group:
if line.set_feature:
assert line.set_feature.feature is not None
return line.set_feature.feature
assert False
if sort_key is None:
group_ids = sorted(feature_groups.keys())
else:
group_ids = sorted(feature_groups.keys(), key=sort_key)
for group_id in group_ids:
flattened_group = []
for group in sorted(feature_groups[group_id],
key=feature_group_key):
flattened_group.extend(group)
if zero_function is not None:
if all(zero_function(line.set_feature.feature)
for line in flattened_group
if line.set_feature):
continue
output_groups.append(flattened_group)
output_groups.extend(non_feature_groups)
for idx in range(len(output_groups)):
for line in output_groups[idx]:
yield line
if idx != len(output_groups) - 1:
yield FasmLine(
set_feature=None, annotations=None, comment=None)
def merge_and_sort(model, zero_function=None, sort_key=None):
""" Given a model, groups and sorts entries.
zero_function - Function that takes a feature string, and returns true
that feature has no bits set. This allows tiles with only
zero features to be dropped.
sort_key - Function that takes a string argument and returns a key
for the first feature part. Example:
A_X2Y1, A_X2Y100, A_X2Y2
could be sorted as
A_X2Y1, A_X2Y2, A_X2Y100
with if the key function returns (A, 2, 1) for A_X2Y1.
Yields FasmLine's.
Grouping logic:
- Consecutive comments will be grouped.
- Comments groups will attach to the next non-comment entry.
- Consecutive annotations will be grouped.
- Empty lines will be discarded
- Features will be grouped by their first feature part.
- Features within the same feature with different addresses will be
merged.
Sorting logic:
- Features will appear before raw annotations.
"""
merged_model = MergeModel()
for line in model:
merged_model.add_to_model(line)
# Add the last processed annotation or comment blocks to the model
if merged_model.state != MergeModel.State.NoGroup:
if merged_model.current_group is not None:
merged_model.groups.append(merged_model.current_group)
merged_model.merge_addresses()
return merged_model.output_sorted_lines(
zero_function=zero_function, sort_key=sort_key)
|
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
class undefined_subtlv(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/ipv6-reachability/prefixes/prefixes/undefined-subtlvs/undefined-subtlv. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Sub-TLVs that are not defined in the model or not recognised by
system.
"""
__slots__ = ("_path_helper", "_extmethods", "__type", "__state")
_yang_name = "undefined-subtlv"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__type = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"ipv6-reachability",
"prefixes",
"prefixes",
"undefined-subtlvs",
"undefined-subtlv",
]
def _get_type(self):
"""
Getter method for type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv6_reachability/prefixes/prefixes/undefined_subtlvs/undefined_subtlv/type (leafref)
YANG Description: A reference to a subTLV
"""
return self.__type
def _set_type(self, v, load=False):
"""
Setter method for type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv6_reachability/prefixes/prefixes/undefined_subtlvs/undefined_subtlv/type (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_type() directly.
YANG Description: A reference to a subTLV
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """type must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
}
)
self.__type = t
if hasattr(self, "_set"):
self._set()
def _unset_type(self):
self.__type = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv6_reachability/prefixes/prefixes/undefined_subtlvs/undefined_subtlv/state (container)
YANG Description: State parameters of the undefined sub-TLV.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv6_reachability/prefixes/prefixes/undefined_subtlvs/undefined_subtlv/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of the undefined sub-TLV.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
type = __builtin__.property(_get_type)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("type", type), ("state", state)])
from . import state
class undefined_subtlv(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/ipv6-reachability/prefixes/prefixes/undefined-subtlvs/undefined-subtlv. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Sub-TLVs that are not defined in the model or not recognised by
system.
"""
__slots__ = ("_path_helper", "_extmethods", "__type", "__state")
_yang_name = "undefined-subtlv"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__type = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"ipv6-reachability",
"prefixes",
"prefixes",
"undefined-subtlvs",
"undefined-subtlv",
]
def _get_type(self):
"""
Getter method for type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv6_reachability/prefixes/prefixes/undefined_subtlvs/undefined_subtlv/type (leafref)
YANG Description: A reference to a subTLV
"""
return self.__type
def _set_type(self, v, load=False):
"""
Setter method for type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv6_reachability/prefixes/prefixes/undefined_subtlvs/undefined_subtlv/type (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_type() directly.
YANG Description: A reference to a subTLV
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """type must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
}
)
self.__type = t
if hasattr(self, "_set"):
self._set()
def _unset_type(self):
self.__type = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv6_reachability/prefixes/prefixes/undefined_subtlvs/undefined_subtlv/state (container)
YANG Description: State parameters of the undefined sub-TLV.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv6_reachability/prefixes/prefixes/undefined_subtlvs/undefined_subtlv/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of the undefined sub-TLV.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
type = __builtin__.property(_get_type)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("type", type), ("state", state)])
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Estimator for Dynamic RNNs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
from tensorflow.contrib import framework as contrib_framework
from tensorflow.contrib import layers
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import rnn_cell
from tensorflow.python.training import momentum as momentum_opt
from tensorflow.python.training import optimizer as opt
# TODO(jamieas): move `_padding_mask` to array_ops.
def _padding_mask(sequence_lengths, padded_length):
"""Creates a mask used for calculating losses with padded input.
Args:
sequence_lengths: a `Tensor` of shape `[batch_size]` containing the unpadded
length of each sequence.
padded_length: a scalar `Tensor` indicating the length of the sequences
after padding
Returns:
A boolean `Tensor` M of shape `[batch_size, padded_length]` where
`M[i, j] == True` when `lengths[i] > j`.
"""
range_tensor = math_ops.range(padded_length)
return math_ops.less(array_ops.expand_dims(range_tensor, 0),
array_ops.expand_dims(sequence_lengths, 1))
def _mask_activations_and_targets(activations, targets, sequence_lengths):
"""Remove entries outside `sequence_lengths` and returned flattened results.
Args:
activations: output of the RNN, shape `[batch_size, padded_length, k]`.
targets: target values, shape `[batch_size, padded_length]`.
sequence_lengths: a `Tensor` of shape `[batch_size]` with the unpadded
length of each sequence. If `None`, then each sequence is unpadded.
Returns:
activations_masked: `logit` values with those beyond `sequence_lengths`
removed for each batch. Batches are then concatenated. Shape
`[tf.sum(sequence_lengths), k]` if `sequence_lengths` is not `None` and
shape `[batch_size * padded_length, k]` otherwise.
targets_masked: target values after removing unneeded entries. Shape
`[tf.sum(sequence_lengths)]` if `sequence_lengths` is not `None` and shape
`[batch_size * padded_length]` otherwise.
"""
with ops.name_scope('mask_activations_and_targets',
values=[activations, targets, sequence_lengths]):
targets_shape = array_ops.shape(targets)
batch_size = targets_shape[0]
padded_length = targets_shape[1]
if sequence_lengths is None:
flattened_dimension = padded_length * batch_size
activations_masked = array_ops.reshape(activations,
[flattened_dimension, -1])
targets_masked = array_ops.reshape(targets, [flattened_dimension])
else:
mask = _padding_mask(sequence_lengths, padded_length)
activations_masked = array_ops.boolean_mask(activations, mask)
targets_masked = array_ops.boolean_mask(targets, mask)
return activations_masked, targets_masked
def _select_last_activations(activations, sequence_lengths):
"""Selects the nth set of activations for each n in `sequence_length`.
Reuturns a `Tensor` of shape `[batch_size, k]`. If `sequence_length` is not
`None`, then `output[i, :] = activations[i, sequence_length[i], :]`. If
`sequence_length` is `None`, then `output[i, :] = activations[i, -1, :]`.
Args:
activations: a `Tensor` with shape `[batch_size, padded_length, k]`.
sequence_lengths: a `Tensor` with shape `[batch_size]` or `None`.
Returns:
A `Tensor` of shape `[batch_size, k]`.
"""
with ops.name_scope('select_last_activations',
values=[activations, sequence_lengths]):
activations_shape = array_ops.shape(activations)
batch_size = activations_shape[0]
padded_length = activations_shape[1]
num_label_columns = activations_shape[2]
if sequence_lengths is None:
sequence_lengths = padded_length
reshaped_activations = array_ops.reshape(activations,
[-1, num_label_columns])
indices = math_ops.range(batch_size) * padded_length + sequence_lengths - 1
last_activations = array_ops.gather(reshaped_activations, indices)
last_activations.set_shape(
[activations.get_shape()[0], activations.get_shape()[2]])
return last_activations
@six.add_metaclass(abc.ABCMeta)
class _DynamicRNNEstimator(estimator.BaseEstimator):
"""Estimator that uses a dynamic RNN for sequences."""
def __init__(self,
cell,
target_column,
optimizer,
model_dir=None,
config=None,
gradient_clipping_norm=None,
inputs_key='inputs',
sequence_length_key='sequence_length',
initial_state_key='initial_state',
dtype=None,
parallel_iterations=None,
swap_memory=False,
name=None):
"""Initialize `DynamicRNNEstimator`.
Args:
cell: an initialized `RNNCell` to be used in the RNN.
target_column: an initialized `TargetColumn`, used to calculate loss and
metrics.
optimizer: an initialized `tensorflow.Optimizer`.
model_dir: The directory in which to save and restore the model graph,
parameters, etc.
config: A `RunConfig` instance.
gradient_clipping_norm: parameter used for gradient clipping. If `None`,
then no clipping is performed.
inputs_key: the key for input values in the features dict passed to
`fit()`.
sequence_length_key: the key for the sequence length tensor in the
features dict passed to `fit()`.
initial_state_key: the key for input values in the features dict passed to
`fit()`.
dtype: Parameter passed ot `dynamic_rnn`. The dtype of the state and
output returned by `RNNCell`.
parallel_iterations: Parameter passed ot `dynamic_rnn`. The number of
iterations to run in parallel.
swap_memory: Parameter passed ot `dynamic_rnn`. Transparently swap the
tensors produced in forward inference but needed for back prop from GPU
to CPU.
name: Optional name for the `Estimator`.
"""
super(_DynamicRNNEstimator, self).__init__(
model_dir=model_dir, config=config)
self._cell = cell
self._target_column = target_column
self._optimizer = optimizer
self._gradient_clipping_norm = gradient_clipping_norm
self._inputs_key = inputs_key
self._sequence_length_key = sequence_length_key
self._initial_state_key = initial_state_key
self._dtype = dtype or dtypes.float32
self._parallel_iterations = parallel_iterations
self._swap_memory = swap_memory
self._name = name or 'DynamicRnnEstimator'
def _construct_rnn(self, features):
"""Apply an RNN to `features`.
The `features` dict must contain `self._inputs_key`, and the corresponding
input should be a `Tensor` of shape `[batch_size, padded_length, k]`
where `k` is the dimension of the input for each element of a sequence.
`activations` has shape `[batch_size, sequence_length, n]` where `n` is
`self._target_column.num_label_columns`. In the case of a multiclass
classifier, `n` is the number of classes.
`final_state` has shape determined by `self._cell` and its dtype must match
`self._dtype`.
Args:
features: a `dict` containing the input for the RNN and (optionally) an
initial state and information about sequence lengths.
Returns:
activations: the output of the RNN, projected to the appropriate number of
dimensions.
final_state: the final state output by the RNN.
Raises:
KeyError: if `features` does not contain `self._inputs_key`.
"""
with ops.name_scope('RNN'):
inputs = features.get(self._inputs_key)
if inputs is None:
raise KeyError('features must contain the key {}'.format(
self._inputs_key))
if inputs.dtype != self._dtype:
inputs = math_ops.cast(inputs, self._dtype)
initial_state = features.get(self._initial_state_key)
rnn_outputs, final_state = rnn.dynamic_rnn(
cell=self._cell,
inputs=inputs,
initial_state=initial_state,
dtype=self._dtype,
parallel_iterations=self._parallel_iterations,
swap_memory=self._swap_memory,
time_major=False)
activations = layers.fully_connected(
inputs=rnn_outputs,
num_outputs=self._target_column.num_label_columns,
activation_fn=None,
trainable=False)
return activations, final_state
@abc.abstractmethod
def _activations_to_loss(self, features, activations, targets):
"""Map `activations` and `targets` to a loss `Tensor`.
`activations` has shape `[batch_size, padded_length,
self._target_column.num_label_columns]`. It is the output of
`_construct_rnn`.
`targets` is a `Tensor` of shape `[batch_size, padded_length]`. The type
of `targets` depends on what type of `TargetColumn` is being used.
Args:
features: a `dict` containing the input and (optionally) sequence length
information and initial state. This is the same `features` passed to
`_construct_rnn`.
activations: a `Tensor` of activations representing the output of the RNN.
targets: a `Tensor` of target values.
Returns:
loss: A scalar `Tensor` representing the aggregated loss for the batch.
"""
raise NotImplementedError()
@abc.abstractmethod
def _activations_to_predictions(self, features, activations):
"""Map `activations` to predictions.
`activations` has shape [batch_size, time, num_labels]. `TargetColumn`s
require shape [n, num_labels]. `activations` is flattened before being
converted to labels. Afterwards, its shape is reconstituted.
Args:
features: a `dict` containing the input and (optionally) sequence length
information and initial state.
activations: logit values returned by `_construct_rnn`.
Returns:
A set of predictions. The type of prediction is dependent on
`_target_column`.
"""
raise NotImplementedError()
def _process_gradients(self, gradients_vars):
"""Process gradients (e.g. clipping) before applying them to weights."""
with ops.name_scope('process_gradients'):
gradients, variables = zip(*gradients_vars)
if self._gradient_clipping_norm is not None:
gradients, _ = clip_ops.clip_by_global_norm(
gradients, self._gradient_clipping_norm)
return zip(gradients, variables)
def _loss_to_train_op(self, loss):
"""Map `loss` to a training op."""
with ops.name_scope('loss_to_train_op'):
trainable_variables = ops.get_default_graph().get_collection(
ops.GraphKeys.TRAINABLE_VARIABLES)
global_step = contrib_framework.get_global_step()
gradients = self._optimizer.compute_gradients(
loss=loss, var_list=trainable_variables)
processed_gradients = self._process_gradients(gradients)
return self._optimizer.apply_gradients(
processed_gradients, global_step=global_step)
@abc.abstractmethod
def _activations_to_eval_ops(self, features, activations, targets, metrics):
"""Map `activations` to eval operations.
`activations` has shape [batch_size, time, num_labels]. `TargetColumn`s
require shape [n, num_labels]. `activations` is flattened before being
converted to labels. Afterwards, its shape is reconstituted.
Args:
features: a `dict` containing the input and (optionally) sequence length
information and initial state.
activations: logit values returned by `_construct_rnn`.
targets: a `Tensor` of target values.
metrics: a list of `Metric`s to evaluate. Possibly `None`.
Returns:
A dict of named eval ops.
"""
raise NotImplementedError()
def _get_train_ops(self, features, targets):
with ops.name_scope(self._name):
if isinstance(features, ops.Tensor):
features = {self._inputs_key: features}
activations, _ = self._construct_rnn(features)
loss = self._activations_to_loss(features, activations, targets)
train_op = self._loss_to_train_op(loss)
return train_op, loss
def _get_eval_ops(self, features, targets, metrics):
with ops.name_scope(self._name):
if isinstance(features, ops.Tensor):
features = {self._inputs_key: features}
activations, _ = self._construct_rnn(features)
return self._activations_to_eval_ops(features, activations, targets,
metrics)
def _get_predict_ops(self, features):
with ops.name_scope(self._name):
if isinstance(features, ops.Tensor):
features = {self._inputs_key: features}
activations, state = self._construct_rnn(features)
predictions = self._activations_to_predictions(features, activations)
return {'predictions': predictions, 'state': state}
class _MultiValueRNNEstimator(_DynamicRNNEstimator):
"""An `Estimator` that maps sequences of inputs to sequences of outputs."""
def _activations_to_loss(self, features, activations, targets):
sequence_length = features.get(self._sequence_length_key)
# Mask the activations and targets past `sequence_length`. Note that the
# `Tensor`s returned by `_mask_activations_and_targets` are flattened.
with ops.name_scope('activations_to_loss'):
activations_masked, targets_masked = _mask_activations_and_targets(
activations, targets, sequence_length)
return self._target_column.loss(activations_masked, targets_masked,
features)
def _activations_to_predictions(self, unused_features, activations):
with ops.name_scope('activations_to_predictions'):
activations_shape = array_ops.shape(activations)
flattened_activations = array_ops.reshape(activations,
[-1, activations_shape[2]])
predictions = self._target_column.activations_to_predictions(
flattened_activations, proba=False)
reshaped_predictions = array_ops.reshape(
predictions, [activations_shape[0], activations_shape[1], -1])
return array_ops.squeeze(reshaped_predictions, [2])
def _activations_to_eval_ops(self, features, activations, targets, metrics):
with ops.name_scope('activations_to_eval_ops'):
activations_masked, targets_masked = _mask_activations_and_targets(
activations, targets, features.get(self._sequence_length_key))
return self._target_column.get_eval_ops(features=features,
logits=activations_masked,
targets=targets_masked,
metrics=metrics)
class _SingleValueRNNEstimator(_DynamicRNNEstimator):
"""An `Estimator` that maps sequences of inputs to single outputs."""
def _activations_to_loss(self, features, activations, targets):
with ops.name_scope('activations_to_loss'):
sequence_lengths = features.get(self._sequence_length_key)
last_activations = _select_last_activations(activations, sequence_lengths)
return self._target_column.loss(last_activations, targets, features)
def _activations_to_predictions(self, features, activations):
with ops.name_scope('activations_to_predictions'):
sequence_lengths = features.get(self._sequence_length_key)
last_activations = _select_last_activations(activations, sequence_lengths)
return self._target_column.activations_to_predictions(
last_activations, proba=False)
def _activations_to_eval_ops(self, features, activations, targets, metrics):
with ops.name_scope('activations_to_eval_ops'):
sequence_lengths = features.get(self._sequence_length_key)
last_activations = _select_last_activations(activations, sequence_lengths)
return self._target_column.get_eval_ops(features=features,
logits=last_activations,
targets=targets,
metrics=metrics)
def _get_optimizer(optimizer_type, learning_rate, momentum):
"""Constructs and returns an `Optimizer`.
Args:
optimizer_type: either a string identifying the `Optimizer` type, or a
subclass of `Optimizer`.
learning_rate: the learning rate used to initialize the `Optimizer`.
momentum: used only when `optimizer_type` is 'Momentum'.
Returns:
An initialized `Optimizer`.
Raises:
ValueError: `optimizer_type` is an invalid optimizer name.
TypeError: `optimizer_type` is not a string or a subclass of `Optimizer`.
"""
if isinstance(optimizer_type, str):
optimizer_type = layers.OPTIMIZER_CLS_NAMES.get(optimizer_type)
if optimizer_type is None:
raise ValueError('optimizer must be one of {}; got "{}".'.format(
list(layers.OPTIMIZER_CLS_NAMES.keys()), optimizer_type))
if not issubclass(optimizer_type, opt.Optimizer):
raise TypeError(
'optimizer_type must be a subclass of Optimizer or one of {}'.format(
list(layers.OPTIMZIER.keys())))
if optimizer_type == momentum_opt.MomentumOptimizer:
return optimizer_type(learning_rate, momentum)
return optimizer_type(learning_rate)
_CELL_TYPES = {'basic_rnn': rnn_cell.BasicRNNCell,
'lstm': rnn_cell.LSTMCell,
'gru': rnn_cell.GRUCell,}
def _get_rnn_cell(cell_type, num_units, num_layers):
"""Constructs and return an `RNNCell`.
Args:
cell_type: either a string identifying the `RNNCell` type, or a subclass of
`RNNCell`.
num_units: the number of units in the `RNNCell`.
num_layers: the number of layers in the RNN.
Returns:
An initialized `RNNCell`.
Raises:
ValueError: `cell_type` is an invalid `RNNCell` name.
TypeError: `cell_type` is not a string or a subclass of `RNNCell`.
"""
if isinstance(cell_type, str):
cell_type = _CELL_TYPES.get(cell_type)
if cell_type is None:
raise ValueError('The supported cell types are {}; got {}'.format(
list(_CELL_TYPES.keys()), cell_type))
if not issubclass(cell_type, rnn_cell.RNNCell):
raise TypeError(
'cell_type must be a subclass of RNNCell or one of {}.'.format(
list(_CELL_TYPES.keys())))
cell = cell_type(num_units=num_units)
if num_layers > 1:
cell = rnn_cell.MultiRNNCell(
[cell] * num_layers, state_is_tuple=True)
return cell
def multi_value_rnn_regressor(num_units,
cell_type='basic_rnn',
cell_dtype=dtypes.float32,
num_rnn_layers=1,
optimizer_type='SGD',
learning_rate=0.1,
momentum=None,
gradient_clipping_norm=10.0,
model_dir=None,
config=None):
"""Creates a RNN `Estimator` that predicts sequences of values.
Args:
num_units: the size of the RNN cells.
cell_type: subclass of `RNNCell` or one of 'basic_rnn,' 'lstm' or 'gru'.
cell_dtype: the dtype of the state and output for the given `cell_type`.
num_rnn_layers: number of RNN layers.
optimizer_type: the type of optimizer to use. Either a subclass of
`Optimizer` or a string.
learning_rate: learning rate.
momentum: momentum value. Only used if `optimizer_type` is 'Momentum'.
gradient_clipping_norm: parameter used for gradient clipping. If `None`,
then no clipping is performed.
model_dir: directory to use for The directory in which to save and restore
the model graph, parameters, etc.
config: A `RunConfig` instance.
Returns:
An initialized instance of `_MultiValueRNNEstimator`.
"""
optimizer = _get_optimizer(optimizer_type, learning_rate, momentum)
cell = _get_rnn_cell(cell_type, num_units, num_rnn_layers)
target_column = layers.regression_target()
return _MultiValueRNNEstimator(cell,
target_column,
optimizer,
model_dir,
config,
gradient_clipping_norm,
dtype=cell_dtype)
def multi_value_rnn_classifier(num_classes,
num_units,
cell_type='basic_rnn',
cell_dtype=dtypes.float32,
num_rnn_layers=1,
optimizer_type='SGD',
learning_rate=0.1,
momentum=None,
gradient_clipping_norm=10.0,
model_dir=None,
config=None):
"""Creates a RNN `Estimator` that predicts sequences of labels.
Args:
num_classes: the number of classes for categorization.
num_units: the size of the RNN cells.
cell_type: subclass of `RNNCell` or one of 'basic_rnn,' 'lstm' or 'gru'.
cell_dtype: the dtype of the state and output for the given `cell_type`.
num_rnn_layers: number of RNN layers.
optimizer_type: the type of optimizer to use. Either a subclass of
`Optimizer` or a string.
learning_rate: learning rate.
momentum: momentum value. Only used if `optimizer_type` is 'Momentum'.
gradient_clipping_norm: parameter used for gradient clipping. If `None`,
then no clipping is performed.
model_dir: directory to use for The directory in which to save and restore
the model graph, parameters, etc.
config: A `RunConfig` instance.
Returns:
An initialized instance of `_MultiValueRNNEstimator`.
"""
optimizer = _get_optimizer(optimizer_type, learning_rate, momentum)
cell = _get_rnn_cell(cell_type, num_units, num_rnn_layers)
target_column = layers.multi_class_target(n_classes=num_classes)
return _MultiValueRNNEstimator(cell,
target_column,
optimizer,
model_dir,
config,
gradient_clipping_norm,
dtype=cell_dtype)
def single_value_rnn_regressor(num_units,
cell_type='basic_rnn',
cell_dtype=dtypes.float32,
num_rnn_layers=1,
optimizer_type='SGD',
learning_rate=0.1,
momentum=None,
gradient_clipping_norm=10.0,
model_dir=None,
config=None):
"""Create a RNN `Estimator` that predicts single values.
Args:
num_units: the size of the RNN cells.
cell_type: subclass of `RNNCell` or one of 'basic_rnn,' 'lstm' or 'gru'.
cell_dtype: the dtype of the state and output for the given `cell_type`.
num_rnn_layers: number of RNN layers.
optimizer_type: the type of optimizer to use. Either a subclass of
`Optimizer` or a string.
learning_rate: learning rate.
momentum: momentum value. Only used if `optimizer_type` is 'Momentum'.
gradient_clipping_norm: parameter used for gradient clipping. If `None`,
then no clipping is performed.
model_dir: directory to use for The directory in which to save and restore
the model graph, parameters, etc.
config: A `RunConfig` instance.
Returns:
An initialized instance of `_MultiValueRNNEstimator`.
"""
optimizer = _get_optimizer(optimizer_type, learning_rate, momentum)
cell = _get_rnn_cell(cell_type, num_units, num_rnn_layers)
target_column = layers.regression_target()
return _SingleValueRNNEstimator(cell,
target_column,
optimizer,
model_dir,
config,
gradient_clipping_norm,
dtype=cell_dtype)
def single_value_rnn_classifier(num_classes,
num_units,
cell_type='basic_rnn',
cell_dtype=dtypes.float32,
num_rnn_layers=1,
optimizer_type='SGD',
learning_rate=0.1,
momentum=None,
gradient_clipping_norm=10.0,
model_dir=None,
config=None):
"""Creates a RNN `Estimator` that predicts single labels.
Args:
num_classes: the number of classes for categorization.
num_units: the size of the RNN cells.
cell_type: subclass of `RNNCell` or one of 'basic_rnn,' 'lstm' or 'gru'.
cell_dtype: the dtype of the state and output for the given `cell_type`.
num_rnn_layers: number of RNN layers.
optimizer_type: the type of optimizer to use. Either a subclass of
`Optimizer` or a string.
learning_rate: learning rate.
momentum: momentum value. Only used if `optimizer_type` is 'Momentum'.
gradient_clipping_norm: parameter used for gradient clipping. If `None`,
then no clipping is performed.
model_dir: directory to use for The directory in which to save and restore
the model graph, parameters, etc.
config: A `RunConfig` instance.
Returns:
An initialized instance of `_MultiValueRNNEstimator`.
"""
optimizer = _get_optimizer(optimizer_type, learning_rate, momentum)
cell = _get_rnn_cell(cell_type, num_units, num_rnn_layers)
target_column = layers.multi_class_target(n_classes=num_classes)
return _SingleValueRNNEstimator(cell,
target_column,
optimizer,
model_dir,
config,
gradient_clipping_norm,
dtype=cell_dtype)
|
|
# Copyright (c) 2015 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Copyright (c) 2015 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import binascii
import testtools
from kmip.core import enums
from kmip.pie import objects
class TestPrivateKey(testtools.TestCase):
"""
Test suite for PrivateKey.
"""
def setUp(self):
super(TestPrivateKey, self).setUp()
# Key values taken from Sections 8.2 and 13.4 of the KMIP 1.1
# testing documentation.
self.bytes_1024 = (
b'\x30\x82\x02\x76\x02\x01\x00\x30\x0D\x06\x09\x2A\x86\x48\x86\xF7'
b'\x0D\x01\x01\x01\x05\x00\x04\x82\x02\x60\x30\x82\x02\x5C\x02\x01'
b'\x00\x02\x81\x81\x00\x93\x04\x51\xC9\xEC\xD9\x4F\x5B\xB9\xDA\x17'
b'\xDD\x09\x38\x1B\xD2\x3B\xE4\x3E\xCA\x8C\x75\x39\xF3\x01\xFC\x8A'
b'\x8C\xD5\xD5\x27\x4C\x3E\x76\x99\xDB\xDC\x71\x1C\x97\xA7\xAA\x91'
b'\xE2\xC5\x0A\x82\xBD\x0B\x10\x34\xF0\xDF\x49\x3D\xEC\x16\x36\x24'
b'\x27\xE5\x8A\xCC\xE7\xF6\xCE\x0F\x9B\xCC\x61\x7B\xBD\x8C\x90\xD0'
b'\x09\x4A\x27\x03\xBA\x0D\x09\xEB\x19\xD1\x00\x5F\x2F\xB2\x65\x52'
b'\x6A\xAC\x75\xAF\x32\xF8\xBC\x78\x2C\xDE\xD2\xA5\x7F\x81\x1E\x03'
b'\xEA\xF6\x7A\x94\x4D\xE5\xE7\x84\x13\xDC\xA8\xF2\x32\xD0\x74\xE6'
b'\xDC\xEA\x4C\xEC\x9F\x02\x03\x01\x00\x01\x02\x81\x80\x0B\x6A\x7D'
b'\x73\x61\x99\xEA\x48\xA4\x20\xE4\x53\x7C\xA0\xC7\xC0\x46\x78\x4D'
b'\xCB\xEA\xA6\x3B\xAE\xBC\x0B\xC1\x32\x78\x74\x49\xCD\xE8\xD7\xCA'
b'\xD0\xC0\xC8\x63\xC0\xFE\xFB\x06\xC3\x06\x2B\xEF\xC5\x00\x33\xEC'
b'\xF8\x7B\x4E\x33\xA9\xBE\x7B\xCB\xC8\xF1\x51\x1A\xE2\x15\xE8\x0D'
b'\xEB\x5D\x8A\xF2\xBD\x31\x31\x9D\x78\x21\x19\x66\x40\x93\x5A\x0C'
b'\xD6\x7C\x94\x59\x95\x79\xF2\x10\x0D\x65\xE0\x38\x83\x1F\xDA\xFB'
b'\x0D\xBE\x2B\xBD\xAC\x00\xA6\x96\xE6\x7E\x75\x63\x50\xE1\xC9\x9A'
b'\xCE\x11\xA3\x6D\xAB\xAC\x3E\xD3\xE7\x30\x96\x00\x59\x02\x41\x00'
b'\xDD\xF6\x72\xFB\xCC\x5B\xDA\x3D\x73\xAF\xFC\x4E\x79\x1E\x0C\x03'
b'\x39\x02\x24\x40\x5D\x69\xCC\xAA\xBC\x74\x9F\xAA\x0D\xCD\x4C\x25'
b'\x83\xC7\x1D\xDE\x89\x41\xA7\xB9\xAA\x03\x0F\x52\xEF\x14\x51\x46'
b'\x6C\x07\x4D\x4D\x33\x8F\xE6\x77\x89\x2A\xCD\x9E\x10\xFD\x35\xBD'
b'\x02\x41\x00\xA9\x8F\xBC\x3E\xD6\xB4\xC6\xF8\x60\xF9\x71\x65\xAC'
b'\x2F\x7B\xB6\xF2\xE2\xCB\x19\x2A\x9A\xBD\x49\x79\x5B\xE5\xBC\xF3'
b'\x7D\x8E\xE6\x9A\x6E\x16\x9C\x24\xE5\xC3\x2E\x4E\x7F\xA3\x32\x65'
b'\x46\x14\x07\xF9\x52\xBA\x49\xE2\x04\x81\x8A\x2F\x78\x5F\x11\x3F'
b'\x92\x2B\x8B\x02\x40\x25\x3F\x94\x70\x39\x0D\x39\x04\x93\x03\x77'
b'\x7D\xDB\xC9\x75\x0E\x9D\x64\x84\x9C\xE0\x90\x3E\xAE\x70\x4D\xC9'
b'\xF5\x89\xB7\x68\x0D\xEB\x9D\x60\x9F\xD5\xBC\xD4\xDE\xCD\x6F\x12'
b'\x05\x42\xE5\xCF\xF5\xD7\x6F\x2A\x43\xC8\x61\x5F\xB5\xB3\xA9\x21'
b'\x34\x63\x79\x7A\xA9\x02\x41\x00\xA1\xDD\xF0\x23\xC0\xCD\x94\xC0'
b'\x19\xBB\x26\xD0\x9B\x9E\x3C\xA8\xFA\x97\x1C\xB1\x6A\xA5\x8B\x9B'
b'\xAF\x79\xD6\x08\x1A\x1D\xBB\xA4\x52\xBA\x53\x65\x3E\x28\x04\xBA'
b'\x98\xFF\x69\xE8\xBB\x1B\x3A\x16\x1E\xA2\x25\xEA\x50\x14\x63\x21'
b'\x6A\x8D\xAB\x9B\x88\xA7\x5E\x5F\x02\x40\x61\x78\x64\x6E\x11\x2C'
b'\xF7\x9D\x92\x1A\x8A\x84\x3F\x17\xF6\xE7\xFF\x97\x4F\x68\x81\x22'
b'\x36\x5B\xF6\x69\x0C\xDF\xC9\x96\xE1\x89\x09\x52\xEB\x38\x20\xDD'
b'\x18\x90\xEC\x1C\x86\x19\xE8\x7A\x2B\xD3\x8F\x9D\x03\xB3\x7F\xAC'
b'\x74\x2E\xFB\x74\x8C\x78\x85\x94\x2C\x39')
self.bytes_2048 = (
b'\x30\x82\x04\xA5\x02\x01\x00\x02\x82\x01\x01\x00\xAB\x7F\x16\x1C'
b'\x00\x42\x49\x6C\xCD\x6C\x6D\x4D\xAD\xB9\x19\x97\x34\x35\x35\x77'
b'\x76\x00\x3A\xCF\x54\xB7\xAF\x1E\x44\x0A\xFB\x80\xB6\x4A\x87\x55'
b'\xF8\x00\x2C\xFE\xBA\x6B\x18\x45\x40\xA2\xD6\x60\x86\xD7\x46\x48'
b'\x34\x6D\x75\xB8\xD7\x18\x12\xB2\x05\x38\x7C\x0F\x65\x83\xBC\x4D'
b'\x7D\xC7\xEC\x11\x4F\x3B\x17\x6B\x79\x57\xC4\x22\xE7\xD0\x3F\xC6'
b'\x26\x7F\xA2\xA6\xF8\x9B\x9B\xEE\x9E\x60\xA1\xD7\xC2\xD8\x33\xE5'
b'\xA5\xF4\xBB\x0B\x14\x34\xF4\xE7\x95\xA4\x11\x00\xF8\xAA\x21\x49'
b'\x00\xDF\x8B\x65\x08\x9F\x98\x13\x5B\x1C\x67\xB7\x01\x67\x5A\xBD'
b'\xBC\x7D\x57\x21\xAA\xC9\xD1\x4A\x7F\x08\x1F\xCE\xC8\x0B\x64\xE8'
b'\xA0\xEC\xC8\x29\x53\x53\xC7\x95\x32\x8A\xBF\x70\xE1\xB4\x2E\x7B'
b'\xB8\xB7\xF4\xE8\xAC\x8C\x81\x0C\xDB\x66\xE3\xD2\x11\x26\xEB\xA8'
b'\xDA\x7D\x0C\xA3\x41\x42\xCB\x76\xF9\x1F\x01\x3D\xA8\x09\xE9\xC1'
b'\xB7\xAE\x64\xC5\x41\x30\xFB\xC2\x1D\x80\xE9\xC2\xCB\x06\xC5\xC8'
b'\xD7\xCC\xE8\x94\x6A\x9A\xC9\x9B\x1C\x28\x15\xC3\x61\x2A\x29\xA8'
b'\x2D\x73\xA1\xF9\x93\x74\xFE\x30\xE5\x49\x51\x66\x2A\x6E\xDA\x29'
b'\xC6\xFC\x41\x13\x35\xD5\xDC\x74\x26\xB0\xF6\x05\x02\x03\x01\x00'
b'\x01\x02\x82\x01\x00\x3B\x12\x45\x5D\x53\xC1\x81\x65\x16\xC5\x18'
b'\x49\x3F\x63\x98\xAA\xFA\x72\xB1\x7D\xFA\x89\x4D\xB8\x88\xA7\xD4'
b'\x8C\x0A\x47\xF6\x25\x79\xA4\xE6\x44\xF8\x6D\xA7\x11\xFE\xC8\x50'
b'\xCD\xD9\xDB\xBD\x17\xF6\x9A\x44\x3D\x2E\xC1\xDD\x60\xD3\xC6\x18'
b'\xFA\x74\xCD\xE5\xFD\xAF\xAB\xD6\xBA\xA2\x6E\xB0\xA3\xAD\xB4\xDE'
b'\xF6\x48\x0F\xB1\x21\x8C\xD3\xB0\x83\xE2\x52\xE8\x85\xB6\xF0\x72'
b'\x9F\x98\xB2\x14\x4D\x2B\x72\x29\x3E\x1B\x11\xD7\x33\x93\xBC\x41'
b'\xF7\x5B\x15\xEE\x3D\x75\x69\xB4\x99\x5E\xD1\xA1\x44\x25\xDA\x43'
b'\x19\xB7\xB2\x6B\x0E\x8F\xEF\x17\xC3\x75\x42\xAE\x5C\x6D\x58\x49'
b'\xF8\x72\x09\x56\x7F\x39\x25\xA4\x7B\x01\x6D\x56\x48\x59\x71\x7B'
b'\xC5\x7F\xCB\x45\x22\xD0\xAA\x49\xCE\x81\x6E\x5B\xE7\xB3\x08\x81'
b'\x93\x23\x6E\xC9\xEF\xFF\x14\x08\x58\x04\x5B\x73\xC5\xD7\x9B\xAF'
b'\x38\xF7\xC6\x7F\x04\xC5\xDC\xF0\xE3\x80\x6A\xD9\x82\xD1\x25\x90'
b'\x58\xC3\x47\x3E\x84\x71\x79\xA8\x78\xF2\xC6\xB3\xBD\x96\x8F\xB9'
b'\x9E\xA4\x6E\x91\x85\x89\x2F\x36\x76\xE7\x89\x65\xC2\xAE\xD4\x87'
b'\x7B\xA3\x91\x7D\xF0\x7C\x5E\x92\x74\x74\xF1\x9E\x76\x4B\xA6\x1D'
b'\xC3\x8D\x63\xBF\x29\x02\x81\x81\x00\xD5\xC6\x9C\x8C\x3C\xDC\x24'
b'\x64\x74\x4A\x79\x37\x13\xDA\xFB\x9F\x1D\xBC\x79\x9F\xF9\x64\x23'
b'\xFE\xCD\x3C\xBA\x79\x42\x86\xBC\xE9\x20\xF4\xB5\xC1\x83\xF9\x9E'
b'\xE9\x02\x8D\xB6\x21\x2C\x62\x77\xC4\xC8\x29\x7F\xCF\xBC\xE7\xF7'
b'\xC2\x4C\xA4\xC5\x1F\xC7\x18\x2F\xB8\xF4\x01\x9F\xB1\xD5\x65\x96'
b'\x74\xC5\xCB\xE6\xD5\xFA\x99\x20\x51\x34\x17\x60\xCD\x00\x73\x57'
b'\x29\xA0\x70\xA9\xE5\x4D\x34\x2B\xEB\xA8\xEF\x47\xEE\x82\xD3\xA0'
b'\x1B\x04\xCE\xC4\xA0\x0D\x4D\xDB\x41\xE3\x51\x16\xFC\x22\x1E\x85'
b'\x4B\x43\xA6\x96\xC0\xE6\x41\x9B\x1B\x02\x81\x81\x00\xCD\x5E\xA7'
b'\x70\x27\x89\x06\x4B\x67\x35\x40\xCB\xFF\x09\x35\x6A\xD8\x0B\xC3'
b'\xD5\x92\x81\x2E\xBA\x47\x61\x0B\x9F\xAC\x6A\xEC\xEF\xE2\x2A\xCA'
b'\xE4\x38\x45\x9C\xDA\x74\xE5\x96\x53\xD8\x8C\x04\x18\x9D\x34\x39'
b'\x9B\xF5\xB1\x4B\x92\x0E\x34\xEF\x38\xA7\xD0\x9F\xE6\x95\x93\x39'
b'\x6E\x8F\xE7\x35\xE6\xF0\xA6\xAE\x49\x90\x40\x10\x41\xD8\xA4\x06'
b'\xB6\xFD\x86\xA1\x16\x1E\x45\xF9\x5A\x3E\xAA\x5C\x10\x12\xE6\x66'
b'\x2E\x44\xF1\x5F\x33\x5A\xC9\x71\xE1\x76\x6B\x2B\xB9\xC9\x85\x10'
b'\x99\x74\x14\x1B\x44\xD3\x7E\x1E\x31\x98\x20\xA5\x5F\x02\x81\x81'
b'\x00\xB2\x87\x12\x37\xBF\x9F\xAD\x38\xC3\x31\x6A\xB7\x87\x7A\x6A'
b'\x86\x80\x63\xE5\x42\xA7\x18\x6D\x43\x1E\x8D\x27\xC1\x9A\xC0\x41'
b'\x45\x84\x03\x39\x42\xE9\xFF\x6E\x29\x73\xBB\x7B\x2D\x8B\x0E\x94'
b'\xAD\x1E\xE8\x21\x58\x10\x8F\xBC\x86\x64\x51\x7A\x5A\x46\x7F\xB9'
b'\x63\x01\x4B\xD5\xDC\xC2\xB4\xFB\x08\x7C\x23\x03\x9D\x11\x92\x0D'
b'\xBE\x22\xFD\x9F\x16\xB4\xD8\x9E\x23\x22\x5C\xD4\x55\xAD\xBA\xF3'
b'\x2E\xF4\x3F\x18\x58\x64\xA3\x6D\x63\x03\x09\xD6\x85\x3F\x77\x14'
b'\xB3\x9A\xAE\x1E\xBE\xE3\x93\x8F\x87\xC2\x70\x7E\x17\x8C\x73\x9F'
b'\x9F\x02\x81\x81\x00\x96\x90\xBE\xD1\x4B\x2A\xFA\xA2\x6D\x98\x6D'
b'\x59\x22\x31\xEE\x27\xD7\x1D\x49\x06\x5B\xD2\xBA\x1F\x78\x15\x7E'
b'\x20\x22\x98\x81\xFD\x9D\x23\x22\x7D\x0F\x84\x79\xEA\xEF\xA9\x22'
b'\xFD\x75\xD5\xB1\x6B\x1A\x56\x1F\xA6\x68\x0B\x04\x0C\xA0\xBD\xCE'
b'\x65\x0B\x23\xB9\x17\xA4\xB1\xBB\x79\x83\xA7\x4F\xAD\x70\xE1\xC3'
b'\x05\xCB\xEC\x2B\xFF\x1A\x85\xA7\x26\xA1\xD9\x02\x60\xE4\xF1\x08'
b'\x4F\x51\x82\x34\xDC\xD3\xFE\x77\x0B\x95\x20\x21\x5B\xD5\x43\xBB'
b'\x6A\x41\x17\x71\x87\x54\x67\x6A\x34\x17\x16\x66\xA7\x9F\x26\xE7'
b'\x9C\x14\x9C\x5A\xA1\x02\x81\x81\x00\xA0\xC9\x85\xA0\xA0\xA7\x91'
b'\xA6\x59\xF9\x97\x31\x13\x4C\x44\xF3\x7B\x2E\x52\x0A\x2C\xEA\x35'
b'\x80\x0A\xD2\x72\x41\xED\x36\x0D\xFD\xE6\xE8\xCA\x61\x4F\x12\x04'
b'\x7F\xD0\x8B\x76\xAC\x4D\x13\xC0\x56\xA0\x69\x9E\x2F\x98\xA1\xCA'
b'\xC9\x10\x11\x29\x4D\x71\x20\x8F\x4A\xBA\xB3\x3B\xA8\x7A\xA0\x51'
b'\x7F\x41\x5B\xAC\xA8\x8D\x6B\xAC\x00\x60\x88\xFA\x60\x1D\x34\x94'
b'\x17\xE1\xF0\xC9\xB2\x3A\xFF\xA4\xD4\x96\x61\x8D\xBC\x02\x49\x86'
b'\xED\x69\x0B\xBB\x7B\x02\x57\x68\xFF\x9D\xF8\xAC\x15\x41\x6F\x48'
b'\x9F\x81\x29\xC3\x23\x41\xA8\xB4\x4F')
def tearDown(self):
super(TestPrivateKey, self).tearDown()
def test_init(self):
"""
Test that a PrivateKey object can be instantiated.
"""
key = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
self.assertEqual(
key.cryptographic_algorithm, enums.CryptographicAlgorithm.RSA)
self.assertEqual(key.cryptographic_length, 1024)
self.assertEqual(key.value, self.bytes_1024)
self.assertEqual(key.key_format_type, enums.KeyFormatType.PKCS_8)
self.assertEqual(key.cryptographic_usage_masks, list())
self.assertEqual(key.names, ['Private Key'])
def test_init_with_args(self):
"""
Test that a PrivateKey object can be instantiated with all arguments.
"""
key = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA,
1024,
self.bytes_1024,
enums.KeyFormatType.PKCS_8,
masks=[enums.CryptographicUsageMask.ENCRYPT,
enums.CryptographicUsageMask.DECRYPT],
name='Test Private Key')
self.assertEqual(key.cryptographic_algorithm,
enums.CryptographicAlgorithm.RSA)
self.assertEqual(key.cryptographic_length, 1024)
self.assertEqual(key.value, self.bytes_1024)
self.assertEqual(key.key_format_type, enums.KeyFormatType.PKCS_8)
self.assertEqual(key.cryptographic_usage_masks,
[enums.CryptographicUsageMask.ENCRYPT,
enums.CryptographicUsageMask.DECRYPT])
self.assertEqual(key.names, ['Test Private Key'])
def test_get_object_type(self):
"""
Test that the object type can be retrieved from the PrivateKey.
"""
expected = enums.ObjectType.PRIVATE_KEY
key = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
observed = key.object_type
self.assertEqual(expected, observed)
def test_validate_on_invalid_algorithm(self):
"""
Test that a TypeError is raised when an invalid algorithm value is
used to construct a PrivateKey.
"""
args = ('invalid', 1024, self.bytes_1024, enums.KeyFormatType.PKCS_8)
self.assertRaises(TypeError, objects.PrivateKey, *args)
def test_validate_on_invalid_length(self):
"""
Test that a TypeError is raised when an invalid length value is used
to construct a PrivateKey.
"""
args = (enums.CryptographicAlgorithm.RSA, 'invalid', self.bytes_1024,
enums.KeyFormatType.PKCS_8)
self.assertRaises(TypeError, objects.PrivateKey, *args)
def test_validate_on_invalid_value(self):
"""
Test that a TypeError is raised when an invalid value is used to
construct a PrivateKey.
"""
args = (enums.CryptographicAlgorithm.RSA, 1024, 0,
enums.KeyFormatType.PKCS_8)
self.assertRaises(TypeError, objects.PrivateKey, *args)
def test_validate_on_invalid_format_type(self):
"""
Test that a TypeError is raised when an invalid value is used to
construct a PrivateKey.
"""
args = (enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
'invalid')
self.assertRaises(TypeError, objects.PrivateKey, *args)
def test_validate_on_invalid_format_type_value(self):
"""
Test that a ValueError is raised when an invalid format type is used to
construct a PrivateKey.
"""
args = (enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.OPAQUE)
self.assertRaises(ValueError, objects.PrivateKey, *args)
def test_validate_on_invalid_masks(self):
"""
Test that a TypeError is raised when an invalid masks value is used to
construct a PrivateKey.
"""
args = (enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
kwargs = {'masks': 'invalid'}
self.assertRaises(TypeError, objects.PrivateKey, *args, **kwargs)
def test_validate_on_invalid_mask(self):
"""
Test that a TypeError is raised when an invalid mask value is used to
construct a PrivateKey.
"""
args = (enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
kwargs = {'masks': ['invalid']}
self.assertRaises(TypeError, objects.PrivateKey, *args, **kwargs)
def test_validate_on_invalid_name(self):
"""
Test that a TypeError is raised when an invalid name value is used to
construct a PrivateKey.
"""
args = (enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
kwargs = {'name': 0}
self.assertRaises(TypeError, objects.PrivateKey, *args, **kwargs)
def test_repr(self):
"""
Test that repr can be applied to a PrivateKey.
"""
key = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
args = "algorithm={0}, length={1}, value={2}, format_type={3}".format(
enums.CryptographicAlgorithm.RSA, 1024,
binascii.hexlify(self.bytes_1024), enums.KeyFormatType.PKCS_8)
expected = "PrivateKey({0})".format(args)
observed = repr(key)
self.assertEqual(expected, observed)
def test_str(self):
"""
Test that str can be applied to a PrivateKey.
"""
key = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
expected = str(binascii.hexlify(self.bytes_1024))
observed = str(key)
self.assertEqual(expected, observed)
def test_equal_on_equal(self):
"""
Test that the equality operator returns True when comparing two
PrivateKey objects with the same data.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
b = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
self.assertTrue(a == b)
self.assertTrue(b == a)
def test_equal_on_not_equal_algorithm(self):
"""
Test that the equality operator returns False when comparing two
PrivateKey objects with different data.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
b = objects.PrivateKey(
enums.CryptographicAlgorithm.AES, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_not_equal_length(self):
"""
Test that the equality operator returns False when comparing two
PrivateKey objects with different data.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
b = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 2048, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_not_equal_value(self):
"""
Test that the equality operator returns False when comparing two
PrivateKey objects with different data.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
b = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_2048,
enums.KeyFormatType.PKCS_8)
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_not_equal_format_type(self):
"""
Test that the equality operator returns False when comparing two
PrivateKey objects with different data.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
b = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_1)
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_equal_on_type_mismatch(self):
"""
Test that the equality operator returns False when comparing a
PrivateKey object to a non-PrivateKey object.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
b = "invalid"
self.assertFalse(a == b)
self.assertFalse(b == a)
def test_not_equal_on_equal(self):
"""
Test that the inequality operator returns False when comparing
two PrivateKey objects with the same internal data.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 2048, self.bytes_2048,
enums.KeyFormatType.PKCS_1)
b = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 2048, self.bytes_2048,
enums.KeyFormatType.PKCS_1)
self.assertFalse(a != b)
self.assertFalse(b != a)
def test_not_equal_on_not_equal_algorithm(self):
"""
Test that the equality operator returns True when comparing two
PrivateKey objects with different data.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 2048, self.bytes_2048,
enums.KeyFormatType.PKCS_1)
b = objects.PrivateKey(
enums.CryptographicAlgorithm.AES, 2048, self.bytes_2048,
enums.KeyFormatType.PKCS_1)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_not_equal_length(self):
"""
Test that the equality operator returns True when comparing two
PrivateKey objects with different data.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 2048, self.bytes_2048,
enums.KeyFormatType.PKCS_8)
b = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 1024, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_not_equal_value(self):
"""
Test that the equality operator returns True when comparing two
PrivateKey objects with different data.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 2048, self.bytes_2048,
enums.KeyFormatType.PKCS_8)
b = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 2048, self.bytes_1024,
enums.KeyFormatType.PKCS_8)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_not_equal_format_type(self):
"""
Test that the equality operator returns True when comparing two
PrivateKey objects with different data.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 2048, self.bytes_2048,
enums.KeyFormatType.PKCS_8)
b = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 2048, self.bytes_2048,
enums.KeyFormatType.PKCS_1)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_not_equal_on_type_mismatch(self):
"""
Test that the equality operator returns True when comparing a
PrivateKey object to a non-PrivateKey object.
"""
a = objects.PrivateKey(
enums.CryptographicAlgorithm.RSA, 2048, self.bytes_2048,
enums.KeyFormatType.PKCS_1)
b = "invalid"
self.assertTrue(a != b)
self.assertTrue(b != a)
|
|
# Copyright (c) 2013 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import sys
# NOTE(arosen): done to avoid the fact that cloudfoundryclient
# isn't in the openstack global reqirements.
import mock
sys.modules['cloudfoundryclient.v2.client'] = mock.Mock()
sys.modules['cloudfoundryclient.v2'] = mock.Mock()
sys.modules['cloudfoundryclient'] = mock.Mock()
from congress.datasources import cloudfoundryv2_driver
from congress.tests import base
from congress.tests import helper
ORG1_GUID = '5187136c-ef7d-47e6-9e6b-ac7780bab3db'
ORG_DATA = (
{"total_results": 1,
"next_url": 'null',
"total_pages": 1,
"prev_url": 'null',
"resources": [{
"entity":
{"status": "active",
"spaces_url": "/v2/organizations/" + ORG1_GUID + "/spaces",
"private_domains_url":
"/v2/organizations/" + ORG1_GUID + "/private_domains",
"name": "foo.com",
"domains_url":
"/v2/organizations/" + ORG1_GUID + "/domains",
"billing_enabled": 'true',
"quota_definition_guid":
"b72b1acb-ff4f-468d-99c0-05cd91012b62",
"app_events_url":
"/v2/organizations/" + ORG1_GUID + "/app_events",
"space_quota_definitions_url":
"/v2/organizations/" + ORG1_GUID + "/space_quota_definitions",
"quota_definition_url":
"/v2/quota_definitions/b72b1acb-ff4f-468d-99c0-05cd91012b62",
"auditors_url":
"/v2/organizations/" + ORG1_GUID + "/auditors",
"managers_url":
"/v2/organizations/" + ORG1_GUID + "/managers",
"users_url":
"/v2/organizations/" + ORG1_GUID + "/users",
"billing_managers_url":
"/v2/organizations/" + ORG1_GUID + "/billing_managers"
},
"metadata":
{"url":
"/v2/organizations/5187136c-ef7d-47e6-9e6b-ac7780bab3db",
"created_at": "2015-01-21T02:17:28+00:00",
"guid": "5187136c-ef7d-47e6-9e6b-ac7780bab3db",
"updated_at": "2015-01-21T02:17:28+00:00"
}
}
]
}
)
SPACE1_GUID = "8da5477d-340e-4bb4-808a-54d9f72017d1"
SPACE2_GUID = "79479021-1e77-473a-8c63-28de9d2ca697"
ORG1_SPACES_DATA = (
{"total_results": 2,
"next_url": "null",
"total_pages": 1,
"prev_url": "null",
"resources": [{
"entity":
{"developers_url": "/v2/spaces/" + SPACE1_GUID + "/developers",
"service_instances_url":
"/v2/spaces/" + SPACE1_GUID + "/service_instances",
"events_url": "/v2/spaces/" + SPACE1_GUID + "/events",
"name": "development",
"domains_url": "/v2/spaces/" + SPACE1_GUID + "/domains",
"app_events_url": "/v2/spaces/" + SPACE1_GUID + "/app_events",
"routes_url": "/v2/spaces/" + SPACE1_GUID + "/routes",
"organization_guid": "5187136c-ef7d-47e6-9e6b-ac7780bab3db",
"space_quota_definition_guid": "null",
"apps_url": "/v2/spaces/" + SPACE1_GUID + "/apps",
"auditors_url": "/v2/spaces/" + SPACE1_GUID + "/auditors",
"managers_url": "/v2/spaces/" + SPACE1_GUID + "/managers",
"organization_url":
"/v2/organizations/5187136c-ef7d-47e6-9e6b-ac7780bab3db",
"security_groups_url":
"/v2/spaces/" + SPACE1_GUID + "/security_groups"
},
"metadata":
{"url": "/v2/spaces/" + SPACE1_GUID,
"created_at": "2015-01-21T02:17:28+00:00",
"guid": SPACE1_GUID,
"updated_at": "null"
}
},
{"entity":
{"developers_url": "/v2/spaces/" + SPACE2_GUID + "/developers",
"service_instances_url":
"/v2/spaces/" + SPACE2_GUID + "/service_instances",
"events_url": "/v2/spaces/" + SPACE2_GUID + "/events",
"name": "test2",
"domains_url": "/v2/spaces/" + SPACE2_GUID + "/domains",
"app_events_url": "/v2/spaces/" + SPACE2_GUID + "/app_events",
"routes_url": "/v2/spaces/" + SPACE2_GUID + "/routes",
"organization_guid": "5187136c-ef7d-47e6-9e6b-ac7780bab3db",
"space_quota_definition_guid": "null",
"apps_url": "/v2/spaces/" + SPACE2_GUID + "/apps",
"auditors_url": "/v2/spaces/" + SPACE2_GUID + "/auditors",
"managers_url": "/v2/spaces/" + SPACE2_GUID + "/managers",
"organization_url":
"/v2/organizations/5187136c-ef7d-47e6-9e6b-ac7780bab3db",
"security_groups_url":
"/v2/spaces/" + SPACE2_GUID + "/security_groups"
},
"metadata":
{"url": "/v2/spaces/" + SPACE2_GUID,
"created_at": "2015-01-22T19:02:32+00:00",
"guid": SPACE2_GUID,
"updated_at": "null"
}
}
]
}
)
APP1_GUID = "c3bd7fc1-73b4-4cc7-a6c8-9976c30edad5"
APP2_GUID = "f7039cca-95ac-49a6-b116-e32a53ddda69"
APPS_IN_SPACE1 = (
{"total_results": 2,
"next_url": "null",
"total_pages": 1,
"prev_url": "null",
"resources": [{
"entity":
{"version": "fec00ce7-a980-49e1-abec-beed5516618f",
"staging_failed_reason": "null",
"instances": 1,
"routes_url": "/v2/apps" + APP1_GUID + "routes",
"space_url": "/v2/spaces/8da5477d-340e-4bb4-808a-54d9f72017d1",
"docker_image": "null",
"console": "false",
"package_state": "STAGED",
"state": "STARTED",
"production": "false",
"detected_buildpack": "Ruby",
"memory": 256,
"package_updated_at": "2015-01-21T21:00:40+00:00",
"staging_task_id": "71f75ad3cad64884a92c4e7738eaae16",
"buildpack": "null",
"stack_url": "/v2/stacks/50688ae5-9bfc-4bf6-a4bf-caadb21a32c6",
"events_url": "/v2/apps" + APP1_GUID + "events",
"service_bindings_url":
"/v2/apps" + APP1_GUID + "service_bindings",
"detected_start_command":
"bundle exec rake db:migrate && bundle exec rails s -p $PORT",
"disk_quota": 1024,
"stack_guid": "50688ae5-9bfc-4bf6-a4bf-caadb21a32c6",
"space_guid": "8da5477d-340e-4bb4-808a-54d9f72017d1",
"name": "rails_sample_app",
"health_check_type": "port",
"command":
"bundle exec rake db:migrate && bundle exec rails s -p $PORT",
"debug": "null",
"environment_json": "null",
"health_check_timeout": "null"
},
"metadata":
{"url": "/v2/apps/c3bd7fc1-73b4-4cc7-a6c8-9976c30edad5",
"created_at": "2015-01-21T21:01:19+00:00",
"guid": "c3bd7fc1-73b4-4cc7-a6c8-9976c30edad5",
"updated_at": "2015-01-21T21:01:19+00:00"
}
},
{"entity":
{"version": "a1b52559-32f3-4765-9fd3-6e35293fb6d0",
"staging_failed_reason": "null",
"instances": 1,
"routes_url": "/v2/apps" + APP2_GUID + "routes",
"space_url": "/v2/spaces/8da5477d-340e-4bb4-808a-54d9f72017d1",
"docker_image": "null",
"console": "false",
"package_state": "PENDING",
"state": "STOPPED",
"production": "false",
"detected_buildpack": "null",
"memory": 1024,
"package_updated_at": "null",
"staging_task_id": "null",
"buildpack": "null",
"stack_url": "/v2/stacks/50688ae5-9bfc-4bf6-a4bf-caadb21a32c6",
"events_url": "/v2/apps" + APP2_GUID + "events",
"service_bindings_url":
"/v2/apps" + APP2_GUID + "service_bindings",
"detected_start_command": "",
"disk_quota": 1024,
"stack_guid": "50688ae5-9bfc-4bf6-a4bf-caadb21a32c6",
"space_guid": "8da5477d-340e-4bb4-808a-54d9f72017d1",
"name": "help",
"health_check_type": "port",
"command": "null",
"debug": "null",
"environment_json": "null",
"health_check_timeout": "null"
},
"metadata":
{"url": "/v2/apps/f7039cca-95ac-49a6-b116-e32a53ddda69",
"created_at": "2015-01-21T18:48:34+00:00",
"guid": "f7039cca-95ac-49a6-b116-e32a53ddda69",
"updated_at": "null"
}
}
]
}
)
APPS_IN_SPACE2 = {"total_results": 0,
"next_url": "null",
"total_pages": 1,
"prev_url": "null",
"resources": []}
SERVICES_IN_SPACE1 = {
"guid": "8da5477d-340e-4bb4-808a-54d9f72017d1",
"name": "development",
"services": [{
"bound_app_count": 0,
"guid": "88f61682-d78e-410f-88ee-1e0eabbbc7da",
"last_operation": None,
"name": "rails-postgres",
"service_plan": {
"guid": "fbcec3af-3e8d-4ee7-adfe-3f12a137ed66",
"name": "turtle",
"service": {
"guid": "34dbc753-34ed-4cf1-9a87-a224dfca569b",
"label": "elephantsql",
"provider": None,
"version": None
}
}
}]
}
EXPECTED_STATE = {
'organizations': set([
('5187136c-ef7d-47e6-9e6b-ac7780bab3db', 'foo.com',
'2015-01-21T02:17:28+00:00', '2015-01-21T02:17:28+00:00')]),
'spaces': set([
('8da5477d-340e-4bb4-808a-54d9f72017d1', 'development',
'2015-01-21T02:17:28+00:00', 'null'),
('79479021-1e77-473a-8c63-28de9d2ca697', 'test2',
'2015-01-22T19:02:32+00:00', 'null')]),
'apps': set([
('8da5477d-340e-4bb4-808a-54d9f72017d1',
'c3bd7fc1-73b4-4cc7-a6c8-9976c30edad5', 'null',
'bundle exec rake db:migrate && bundle exec rails s -p $PORT',
'false', 'null', 'Ruby',
'bundle exec rake db:migrate && bundle exec rails s -p $PORT',
1024, 'null', 'null', 'null', 1,
256, 'rails_sample_app', 'STAGED', '2015-01-21T21:00:40+00:00',
'false', 'null', '71f75ad3cad64884a92c4e7738eaae16', 'STARTED',
'fec00ce7-a980-49e1-abec-beed5516618f', '2015-01-21T21:01:19+00:00',
'2015-01-21T21:01:19+00:00'),
('8da5477d-340e-4bb4-808a-54d9f72017d1',
'f7039cca-95ac-49a6-b116-e32a53ddda69', 'null', 'null', 'false',
'null', 'null', '', 1024, 'null', 'null', 'null', 1, 1024,
'help', 'PENDING', 'null', 'false', 'null', 'null', 'STOPPED',
'a1b52559-32f3-4765-9fd3-6e35293fb6d0',
'2015-01-21T18:48:34+00:00', 'null')]),
'service_bindings': set([]),
'services': set([
('88f61682-d78e-410f-88ee-1e0eabbbc7da',
'8da5477d-340e-4bb4-808a-54d9f72017d1', 'rails-postgres',
0, 'None', 'turtle')]),
}
class TestCloudFoundryV2Driver(base.TestCase):
def setUp(self):
super(TestCloudFoundryV2Driver, self).setUp()
args = helper.datasource_openstack_args()
args['poll_time'] = 0
args['client'] = mock.MagicMock()
self.driver = cloudfoundryv2_driver.CloudFoundryV2Driver(args=args)
def test_update_from_datasource(self):
def _side_effect_get_org_spaces(org):
if org == ORG1_GUID:
return ORG1_SPACES_DATA
raise ValueError("This should occur...")
def _side_effect_get_apps_in_space(space):
if space == SPACE1_GUID:
return APPS_IN_SPACE1
elif space == SPACE2_GUID:
return APPS_IN_SPACE2
else:
raise ValueError("This should not occur....")
def _side_effect_get_spaces_summary(space):
if space == SPACE1_GUID:
return SERVICES_IN_SPACE1
else:
return {"guid": space,
"services": []}
def _side_effect_get_app_services(space):
return {'resources': []}
with base.nested(
mock.patch.object(self.driver.cloudfoundry,
"get_organizations",
return_value=ORG_DATA),
mock.patch.object(self.driver.cloudfoundry,
"get_organization_spaces",
side_effect=_side_effect_get_org_spaces),
mock.patch.object(self.driver.cloudfoundry,
"get_apps_in_space",
side_effect=_side_effect_get_apps_in_space),
mock.patch.object(self.driver.cloudfoundry,
"get_spaces_summary",
side_effect=_side_effect_get_spaces_summary),
mock.patch.object(self.driver.cloudfoundry,
"get_app_service_bindings",
side_effect=_side_effect_get_app_services),
) as (get_organizations, get_organization_spaces,
get_apps_in_space, get_spaces_summary,
get_app_services_guids):
self.driver.update_from_datasource()
self.assertEqual(self.driver.state, EXPECTED_STATE)
def test_execute(self):
class CloudfoundryClient(object):
def __init__(self):
self.testkey = None
def setServices(self, arg1):
self.testkey = 'arg1=%s' % arg1
cloudfoundry_client = CloudfoundryClient()
self.driver.cloudfoundry = cloudfoundry_client
api_args = {
'positional': ['1']
}
expected_ans = 'arg1=1'
self.driver.execute('setServices', api_args)
self.assertEqual(cloudfoundry_client.testkey, expected_ans)
|
|
"""Test script for ftplib module."""
# Modified by Giampaolo Rodola' to test FTP class, IPv6 and TLS
# environment
import ftplib
import asyncore
import asynchat
import socket
import StringIO
import errno
import os
try:
import ssl
except ImportError:
ssl = None
from unittest import TestCase, SkipTest, skipUnless
from test import test_support
from test.test_support import HOST, HOSTv6
threading = test_support.import_module('threading')
TIMEOUT = 3
# the dummy data returned by server over the data channel when
# RETR, LIST and NLST commands are issued
RETR_DATA = 'abcde12345\r\n' * 1000
LIST_DATA = 'foo\r\nbar\r\n'
NLST_DATA = 'foo\r\nbar\r\n'
class DummyDTPHandler(asynchat.async_chat):
dtp_conn_closed = False
def __init__(self, conn, baseclass):
asynchat.async_chat.__init__(self, conn)
self.baseclass = baseclass
self.baseclass.last_received_data = ''
def handle_read(self):
self.baseclass.last_received_data += self.recv(1024)
def handle_close(self):
# XXX: this method can be called many times in a row for a single
# connection, including in clear-text (non-TLS) mode.
# (behaviour witnessed with test_data_connection)
if not self.dtp_conn_closed:
self.baseclass.push('226 transfer complete')
self.close()
self.dtp_conn_closed = True
def handle_error(self):
raise
class DummyFTPHandler(asynchat.async_chat):
dtp_handler = DummyDTPHandler
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.set_terminator("\r\n")
self.in_buffer = []
self.dtp = None
self.last_received_cmd = None
self.last_received_data = ''
self.next_response = ''
self.rest = None
self.next_retr_data = RETR_DATA
self.push('220 welcome')
def collect_incoming_data(self, data):
self.in_buffer.append(data)
def found_terminator(self):
line = ''.join(self.in_buffer)
self.in_buffer = []
if self.next_response:
self.push(self.next_response)
self.next_response = ''
cmd = line.split(' ')[0].lower()
self.last_received_cmd = cmd
space = line.find(' ')
if space != -1:
arg = line[space + 1:]
else:
arg = ""
if hasattr(self, 'cmd_' + cmd):
method = getattr(self, 'cmd_' + cmd)
method(arg)
else:
self.push('550 command "%s" not understood.' %cmd)
def handle_error(self):
raise
def push(self, data):
asynchat.async_chat.push(self, data + '\r\n')
def cmd_port(self, arg):
addr = map(int, arg.split(','))
ip = '%d.%d.%d.%d' %tuple(addr[:4])
port = (addr[4] * 256) + addr[5]
s = socket.create_connection((ip, port), timeout=10)
self.dtp = self.dtp_handler(s, baseclass=self)
self.push('200 active data connection established')
def cmd_pasv(self, arg):
sock = socket.socket()
sock.bind((self.socket.getsockname()[0], 0))
sock.listen(5)
sock.settimeout(10)
ip, port = sock.getsockname()[:2]
ip = ip.replace('.', ',')
p1, p2 = divmod(port, 256)
self.push('227 entering passive mode (%s,%d,%d)' %(ip, p1, p2))
conn, addr = sock.accept()
self.dtp = self.dtp_handler(conn, baseclass=self)
def cmd_eprt(self, arg):
af, ip, port = arg.split(arg[0])[1:-1]
port = int(port)
s = socket.create_connection((ip, port), timeout=10)
self.dtp = self.dtp_handler(s, baseclass=self)
self.push('200 active data connection established')
def cmd_epsv(self, arg):
sock = socket.socket(socket.AF_INET6)
sock.bind((self.socket.getsockname()[0], 0))
sock.listen(5)
sock.settimeout(10)
port = sock.getsockname()[1]
self.push('229 entering extended passive mode (|||%d|)' %port)
conn, addr = sock.accept()
self.dtp = self.dtp_handler(conn, baseclass=self)
def cmd_echo(self, arg):
# sends back the received string (used by the test suite)
self.push(arg)
def cmd_user(self, arg):
self.push('331 username ok')
def cmd_pass(self, arg):
self.push('230 password ok')
def cmd_acct(self, arg):
self.push('230 acct ok')
def cmd_rnfr(self, arg):
self.push('350 rnfr ok')
def cmd_rnto(self, arg):
self.push('250 rnto ok')
def cmd_dele(self, arg):
self.push('250 dele ok')
def cmd_cwd(self, arg):
self.push('250 cwd ok')
def cmd_size(self, arg):
self.push('250 1000')
def cmd_mkd(self, arg):
self.push('257 "%s"' %arg)
def cmd_rmd(self, arg):
self.push('250 rmd ok')
def cmd_pwd(self, arg):
self.push('257 "pwd ok"')
def cmd_type(self, arg):
self.push('200 type ok')
def cmd_quit(self, arg):
self.push('221 quit ok')
self.close()
def cmd_stor(self, arg):
self.push('125 stor ok')
def cmd_rest(self, arg):
self.rest = arg
self.push('350 rest ok')
def cmd_retr(self, arg):
self.push('125 retr ok')
if self.rest is not None:
offset = int(self.rest)
else:
offset = 0
self.dtp.push(self.next_retr_data[offset:])
self.dtp.close_when_done()
self.rest = None
def cmd_list(self, arg):
self.push('125 list ok')
self.dtp.push(LIST_DATA)
self.dtp.close_when_done()
def cmd_nlst(self, arg):
self.push('125 nlst ok')
self.dtp.push(NLST_DATA)
self.dtp.close_when_done()
def cmd_setlongretr(self, arg):
# For testing. Next RETR will return long line.
self.next_retr_data = 'x' * int(arg)
self.push('125 setlongretr ok')
class DummyFTPServer(asyncore.dispatcher, threading.Thread):
handler = DummyFTPHandler
def __init__(self, address, af=socket.AF_INET):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(af, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.active = False
self.active_lock = threading.Lock()
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
self.active_lock.acquire()
asyncore.loop(timeout=0.1, count=1)
self.active_lock.release()
asyncore.close_all(ignore_all=True)
def stop(self):
assert self.active
self.active = False
self.join()
def handle_accept(self):
conn, addr = self.accept()
self.handler_instance = self.handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
if ssl is not None:
CERTFILE = os.path.join(os.path.dirname(__file__), "keycert3.pem")
CAFILE = os.path.join(os.path.dirname(__file__), "pycacert.pem")
class SSLConnection(object, asyncore.dispatcher):
"""An asyncore.dispatcher subclass supporting TLS/SSL."""
_ssl_accepting = False
_ssl_closing = False
def secure_connection(self):
socket = ssl.wrap_socket(self.socket, suppress_ragged_eofs=False,
certfile=CERTFILE, server_side=True,
do_handshake_on_connect=False,
ssl_version=ssl.PROTOCOL_SSLv23)
self.del_channel()
self.set_socket(socket)
self._ssl_accepting = True
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
except socket.error as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self._ssl_accepting = False
def _do_ssl_shutdown(self):
self._ssl_closing = True
try:
self.socket = self.socket.unwrap()
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
except socket.error as err:
# Any "socket error" corresponds to a SSL_ERROR_SYSCALL return
# from OpenSSL's SSL_shutdown(), corresponding to a
# closed socket condition. See also:
# http://www.mail-archive.com/openssl-users@openssl.org/msg60710.html
pass
self._ssl_closing = False
if getattr(self, '_ccc', False) is False:
super(SSLConnection, self).close()
else:
pass
def handle_read_event(self):
if self._ssl_accepting:
self._do_ssl_handshake()
elif self._ssl_closing:
self._do_ssl_shutdown()
else:
super(SSLConnection, self).handle_read_event()
def handle_write_event(self):
if self._ssl_accepting:
self._do_ssl_handshake()
elif self._ssl_closing:
self._do_ssl_shutdown()
else:
super(SSLConnection, self).handle_write_event()
def send(self, data):
try:
return super(SSLConnection, self).send(data)
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN,
ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return 0
raise
def recv(self, buffer_size):
try:
return super(SSLConnection, self).recv(buffer_size)
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return b''
if err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN):
self.handle_close()
return b''
raise
def handle_error(self):
raise
def close(self):
if (isinstance(self.socket, ssl.SSLSocket) and
self.socket._sslobj is not None):
self._do_ssl_shutdown()
else:
super(SSLConnection, self).close()
class DummyTLS_DTPHandler(SSLConnection, DummyDTPHandler):
"""A DummyDTPHandler subclass supporting TLS/SSL."""
def __init__(self, conn, baseclass):
DummyDTPHandler.__init__(self, conn, baseclass)
if self.baseclass.secure_data_channel:
self.secure_connection()
class DummyTLS_FTPHandler(SSLConnection, DummyFTPHandler):
"""A DummyFTPHandler subclass supporting TLS/SSL."""
dtp_handler = DummyTLS_DTPHandler
def __init__(self, conn):
DummyFTPHandler.__init__(self, conn)
self.secure_data_channel = False
def cmd_auth(self, line):
"""Set up secure control channel."""
self.push('234 AUTH TLS successful')
self.secure_connection()
def cmd_pbsz(self, line):
"""Negotiate size of buffer for secure data transfer.
For TLS/SSL the only valid value for the parameter is '0'.
Any other value is accepted but ignored.
"""
self.push('200 PBSZ=0 successful.')
def cmd_prot(self, line):
"""Setup un/secure data channel."""
arg = line.upper()
if arg == 'C':
self.push('200 Protection set to Clear')
self.secure_data_channel = False
elif arg == 'P':
self.push('200 Protection set to Private')
self.secure_data_channel = True
else:
self.push("502 Unrecognized PROT type (use C or P).")
class DummyTLS_FTPServer(DummyFTPServer):
handler = DummyTLS_FTPHandler
class TestFTPClass(TestCase):
def setUp(self):
self.server = DummyFTPServer((HOST, 0))
self.server.start()
self.client = ftplib.FTP(timeout=10)
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
self.client.close()
self.server.stop()
def test_getwelcome(self):
self.assertEqual(self.client.getwelcome(), '220 welcome')
def test_sanitize(self):
self.assertEqual(self.client.sanitize('foo'), repr('foo'))
self.assertEqual(self.client.sanitize('pass 12345'), repr('pass *****'))
self.assertEqual(self.client.sanitize('PASS 12345'), repr('PASS *****'))
def test_exceptions(self):
self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 400')
self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 499')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'echo 500')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'echo 599')
self.assertRaises(ftplib.error_proto, self.client.sendcmd, 'echo 999')
def test_all_errors(self):
exceptions = (ftplib.error_reply, ftplib.error_temp, ftplib.error_perm,
ftplib.error_proto, ftplib.Error, IOError, EOFError)
for x in exceptions:
try:
raise x('exception not included in all_errors set')
except ftplib.all_errors:
pass
def test_set_pasv(self):
# passive mode is supposed to be enabled by default
self.assertTrue(self.client.passiveserver)
self.client.set_pasv(True)
self.assertTrue(self.client.passiveserver)
self.client.set_pasv(False)
self.assertFalse(self.client.passiveserver)
def test_voidcmd(self):
self.client.voidcmd('echo 200')
self.client.voidcmd('echo 299')
self.assertRaises(ftplib.error_reply, self.client.voidcmd, 'echo 199')
self.assertRaises(ftplib.error_reply, self.client.voidcmd, 'echo 300')
def test_login(self):
self.client.login()
def test_acct(self):
self.client.acct('passwd')
def test_rename(self):
self.client.rename('a', 'b')
self.server.handler_instance.next_response = '200'
self.assertRaises(ftplib.error_reply, self.client.rename, 'a', 'b')
def test_delete(self):
self.client.delete('foo')
self.server.handler_instance.next_response = '199'
self.assertRaises(ftplib.error_reply, self.client.delete, 'foo')
def test_size(self):
self.client.size('foo')
def test_mkd(self):
dir = self.client.mkd('/foo')
self.assertEqual(dir, '/foo')
def test_rmd(self):
self.client.rmd('foo')
def test_cwd(self):
dir = self.client.cwd('/foo')
self.assertEqual(dir, '250 cwd ok')
def test_pwd(self):
dir = self.client.pwd()
self.assertEqual(dir, 'pwd ok')
def test_quit(self):
self.assertEqual(self.client.quit(), '221 quit ok')
# Ensure the connection gets closed; sock attribute should be None
self.assertEqual(self.client.sock, None)
def test_retrbinary(self):
received = []
self.client.retrbinary('retr', received.append)
self.assertEqual(''.join(received), RETR_DATA)
def test_retrbinary_rest(self):
for rest in (0, 10, 20):
received = []
self.client.retrbinary('retr', received.append, rest=rest)
self.assertEqual(''.join(received), RETR_DATA[rest:],
msg='rest test case %d %d %d' % (rest,
len(''.join(received)),
len(RETR_DATA[rest:])))
def test_retrlines(self):
received = []
self.client.retrlines('retr', received.append)
self.assertEqual(''.join(received), RETR_DATA.replace('\r\n', ''))
def test_storbinary(self):
f = StringIO.StringIO(RETR_DATA)
self.client.storbinary('stor', f)
self.assertEqual(self.server.handler_instance.last_received_data, RETR_DATA)
# test new callback arg
flag = []
f.seek(0)
self.client.storbinary('stor', f, callback=lambda x: flag.append(None))
self.assertTrue(flag)
def test_storbinary_rest(self):
f = StringIO.StringIO(RETR_DATA)
for r in (30, '30'):
f.seek(0)
self.client.storbinary('stor', f, rest=r)
self.assertEqual(self.server.handler_instance.rest, str(r))
def test_storlines(self):
f = StringIO.StringIO(RETR_DATA.replace('\r\n', '\n'))
self.client.storlines('stor', f)
self.assertEqual(self.server.handler_instance.last_received_data, RETR_DATA)
# test new callback arg
flag = []
f.seek(0)
self.client.storlines('stor foo', f, callback=lambda x: flag.append(None))
self.assertTrue(flag)
def test_nlst(self):
self.client.nlst()
self.assertEqual(self.client.nlst(), NLST_DATA.split('\r\n')[:-1])
def test_dir(self):
l = []
self.client.dir(lambda x: l.append(x))
self.assertEqual(''.join(l), LIST_DATA.replace('\r\n', ''))
def test_makeport(self):
self.client.makeport()
# IPv4 is in use, just make sure send_eprt has not been used
self.assertEqual(self.server.handler_instance.last_received_cmd, 'port')
def test_makepasv(self):
host, port = self.client.makepasv()
conn = socket.create_connection((host, port), 10)
conn.close()
# IPv4 is in use, just make sure send_epsv has not been used
self.assertEqual(self.server.handler_instance.last_received_cmd, 'pasv')
def test_line_too_long(self):
self.assertRaises(ftplib.Error, self.client.sendcmd,
'x' * self.client.maxline * 2)
def test_retrlines_too_long(self):
self.client.sendcmd('SETLONGRETR %d' % (self.client.maxline * 2))
received = []
self.assertRaises(ftplib.Error,
self.client.retrlines, 'retr', received.append)
def test_storlines_too_long(self):
f = StringIO.StringIO('x' * self.client.maxline * 2)
self.assertRaises(ftplib.Error, self.client.storlines, 'stor', f)
@skipUnless(socket.has_ipv6, "IPv6 not enabled")
class TestIPv6Environment(TestCase):
@classmethod
def setUpClass(cls):
try:
DummyFTPServer((HOST, 0), af=socket.AF_INET6)
except socket.error:
raise SkipTest("IPv6 not enabled")
def setUp(self):
self.server = DummyFTPServer((HOSTv6, 0), af=socket.AF_INET6)
self.server.start()
self.client = ftplib.FTP()
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
self.client.close()
self.server.stop()
def test_af(self):
self.assertEqual(self.client.af, socket.AF_INET6)
def test_makeport(self):
self.client.makeport()
self.assertEqual(self.server.handler_instance.last_received_cmd, 'eprt')
def test_makepasv(self):
host, port = self.client.makepasv()
conn = socket.create_connection((host, port), 10)
conn.close()
self.assertEqual(self.server.handler_instance.last_received_cmd, 'epsv')
def test_transfer(self):
def retr():
received = []
self.client.retrbinary('retr', received.append)
self.assertEqual(''.join(received), RETR_DATA)
self.client.set_pasv(True)
retr()
self.client.set_pasv(False)
retr()
@skipUnless(ssl, "SSL not available")
class TestTLS_FTPClassMixin(TestFTPClass):
"""Repeat TestFTPClass tests starting the TLS layer for both control
and data connections first.
"""
def setUp(self):
self.server = DummyTLS_FTPServer((HOST, 0))
self.server.start()
self.client = ftplib.FTP_TLS(timeout=10)
self.client.connect(self.server.host, self.server.port)
# enable TLS
self.client.auth()
self.client.prot_p()
@skipUnless(ssl, "SSL not available")
class TestTLS_FTPClass(TestCase):
"""Specific TLS_FTP class tests."""
def setUp(self):
self.server = DummyTLS_FTPServer((HOST, 0))
self.server.start()
self.client = ftplib.FTP_TLS(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
self.client.close()
self.server.stop()
def test_control_connection(self):
self.assertNotIsInstance(self.client.sock, ssl.SSLSocket)
self.client.auth()
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
def test_data_connection(self):
# clear text
sock = self.client.transfercmd('list')
self.assertNotIsInstance(sock, ssl.SSLSocket)
sock.close()
self.assertEqual(self.client.voidresp(), "226 transfer complete")
# secured, after PROT P
self.client.prot_p()
sock = self.client.transfercmd('list')
self.assertIsInstance(sock, ssl.SSLSocket)
sock.close()
self.assertEqual(self.client.voidresp(), "226 transfer complete")
# PROT C is issued, the connection must be in cleartext again
self.client.prot_c()
sock = self.client.transfercmd('list')
self.assertNotIsInstance(sock, ssl.SSLSocket)
sock.close()
self.assertEqual(self.client.voidresp(), "226 transfer complete")
def test_login(self):
# login() is supposed to implicitly secure the control connection
self.assertNotIsInstance(self.client.sock, ssl.SSLSocket)
self.client.login()
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
# make sure that AUTH TLS doesn't get issued again
self.client.login()
def test_auth_issued_twice(self):
self.client.auth()
self.assertRaises(ValueError, self.client.auth)
def test_auth_ssl(self):
try:
self.client.ssl_version = ssl.PROTOCOL_SSLv23
self.client.auth()
self.assertRaises(ValueError, self.client.auth)
finally:
self.client.ssl_version = ssl.PROTOCOL_TLSv1
def test_context(self):
self.client.quit()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(ValueError, ftplib.FTP_TLS, keyfile=CERTFILE,
context=ctx)
self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE,
context=ctx)
self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE,
keyfile=CERTFILE, context=ctx)
self.client = ftplib.FTP_TLS(context=ctx, timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.assertNotIsInstance(self.client.sock, ssl.SSLSocket)
self.client.auth()
self.assertIs(self.client.sock.context, ctx)
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
self.client.prot_p()
sock = self.client.transfercmd('list')
try:
self.assertIs(sock.context, ctx)
self.assertIsInstance(sock, ssl.SSLSocket)
finally:
sock.close()
def test_check_hostname(self):
self.client.quit()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.check_hostname = True
ctx.load_verify_locations(CAFILE)
self.client = ftplib.FTP_TLS(context=ctx, timeout=TIMEOUT)
# 127.0.0.1 doesn't match SAN
self.client.connect(self.server.host, self.server.port)
with self.assertRaises(ssl.CertificateError):
self.client.auth()
# exception quits connection
self.client.connect(self.server.host, self.server.port)
self.client.prot_p()
with self.assertRaises(ssl.CertificateError):
self.client.transfercmd("list").close()
self.client.quit()
self.client.connect("localhost", self.server.port)
self.client.auth()
self.client.quit()
self.client.connect("localhost", self.server.port)
self.client.prot_p()
self.client.transfercmd("list").close()
class TestTimeouts(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(10)
self.port = test_support.bind_port(self.sock)
threading.Thread(target=self.server, args=(self.evt,self.sock)).start()
# Wait for the server to be ready.
self.evt.wait()
self.evt.clear()
ftplib.FTP.port = self.port
def tearDown(self):
self.evt.wait()
def server(self, evt, serv):
# This method sets the evt 3 times:
# 1) when the connection is ready to be accepted.
# 2) when it is safe for the caller to close the connection
# 3) when we have closed the socket
serv.listen(5)
# (1) Signal the caller that we are ready to accept the connection.
evt.set()
try:
conn, addr = serv.accept()
except socket.timeout:
pass
else:
conn.send("1 Hola mundo\n")
# (2) Signal the caller that it is safe to close the socket.
evt.set()
conn.close()
finally:
serv.close()
# (3) Signal the caller that we are done.
evt.set()
def testTimeoutDefault(self):
# default -- use global socket timeout
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
ftp = ftplib.FTP(HOST)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutNone(self):
# no timeout -- do not use global socket timeout
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
ftp = ftplib.FTP(HOST, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(ftp.sock.gettimeout())
self.evt.wait()
ftp.close()
def testTimeoutValue(self):
# a value
ftp = ftplib.FTP(HOST, timeout=30)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutConnect(self):
ftp = ftplib.FTP()
ftp.connect(HOST, timeout=30)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutDifferentOrder(self):
ftp = ftplib.FTP(timeout=30)
ftp.connect(HOST)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutDirectAccess(self):
ftp = ftplib.FTP()
ftp.timeout = 30
ftp.connect(HOST)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def test_main():
tests = [TestFTPClass, TestTimeouts,
TestIPv6Environment,
TestTLS_FTPClassMixin, TestTLS_FTPClass]
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
finally:
test_support.threading_cleanup(*thread_info)
if __name__ == '__main__':
test_main()
|
|
# Copyright 2013 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import os
from oslo_log import log as logging
from trove.common import cfg
from trove.common.i18n import _
from trove.common import instance as trove_instance
from trove.common.notification import EndNotification
from trove.guestagent import backup
from trove.guestagent.datastore.cassandra import service
from trove.guestagent.datastore import manager
from trove.guestagent import guest_log
from trove.guestagent import volume
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
MANAGER = CONF.datastore_manager if CONF.datastore_manager else 'cassandra'
class Manager(manager.Manager):
GUEST_LOG_DEFS_SYSTEM_LABEL = 'system'
def __init__(self, manager_name='cassandra'):
super(Manager, self).__init__(manager_name)
self._app = None
self._admin = None
@property
def status(self):
return self.app.status
@property
def app(self):
if self._app is None:
self._app = self.build_app()
return self._app
def build_app(self):
return service.CassandraApp()
@property
def admin(self):
if self._admin is None:
self._admin = self.app.build_admin()
return self._admin
@property
def configuration_manager(self):
return self.app.configuration_manager
@property
def datastore_log_defs(self):
system_log_file = self.validate_log_file(
self.app.cassandra_system_log_file, self.app.cassandra_owner)
return {
self.GUEST_LOG_DEFS_SYSTEM_LABEL: {
self.GUEST_LOG_TYPE_LABEL: guest_log.LogType.USER,
self.GUEST_LOG_USER_LABEL: self.app.cassandra_owner,
self.GUEST_LOG_FILE_LABEL: system_log_file
}
}
def guest_log_enable(self, context, log_name, disable):
if disable:
self.app.set_logging_level('OFF')
else:
log_level = CONF.get(self.manager_name).get('system_log_level')
self.app.set_logging_level(log_level)
return False
def pre_upgrade(self, context):
LOG.debug('Preparing Cassandra for upgrade.')
self.app.status.begin_restart()
self.app.stop_db()
mount_point = self.app.cassandra_working_dir
upgrade_info = self.app.save_files_pre_upgrade(mount_point)
upgrade_info['mount_point'] = mount_point
return upgrade_info
def post_upgrade(self, context, upgrade_info):
LOG.debug('Finalizing Cassandra upgrade.')
self.app.stop_db()
if 'device' in upgrade_info:
self.mount_volume(context, mount_point=upgrade_info['mount_point'],
device_path=upgrade_info['device'])
self.app.restore_files_post_upgrade(upgrade_info)
# cqlshrc has been restored at this point, need to refresh the
# credentials stored in the app by resetting the app.
self._app = None
self.app.start_db()
def restart(self, context):
self.app.restart()
def start_db_with_conf_changes(self, context, config_contents):
self.app.start_db_with_conf_changes(config_contents)
def stop_db(self, context, do_not_start_on_reboot=False):
self.app.stop_db(do_not_start_on_reboot=do_not_start_on_reboot)
def reset_configuration(self, context, configuration):
self.app.reset_configuration(configuration)
def do_prepare(self, context, packages, databases, memory_mb, users,
device_path, mount_point, backup_info,
config_contents, root_password, overrides,
cluster_config, snapshot):
"""This is called from prepare in the base class."""
self.app.install_if_needed(packages)
self.app.init_storage_structure(mount_point)
if config_contents or device_path or backup_info:
# FIXME(pmalik) Once the cassandra bug
# https://issues.apache.org/jira/browse/CASSANDRA-2356
# is fixed, this code may have to be revisited.
#
# Cassandra generates system keyspaces on the first start.
# The stored properties include the 'cluster_name', which once
# saved cannot be easily changed without removing the system
# tables. It is crucial that the service does not boot up in
# the middle of the configuration procedure.
# We wait here for the service to come up, stop it properly and
# remove the generated keyspaces before proceeding with
# configuration. If it does not start up within the time limit
# we assume it is not going to and proceed with configuration
# right away.
LOG.debug("Waiting for database first boot.")
if (self.app.status.wait_for_real_status_to_change_to(
trove_instance.ServiceStatuses.RUNNING,
CONF.state_change_wait_time,
False)):
LOG.debug("Stopping database prior to initial configuration.")
self.app.stop_db()
self.app._remove_system_tables()
LOG.debug("Starting initial configuration.")
if config_contents:
LOG.debug("Applying configuration.")
self.app.configuration_manager.save_configuration(
config_contents)
cluster_name = None
if cluster_config:
cluster_name = cluster_config.get('id', None)
self.app.apply_initial_guestagent_configuration(
cluster_name=cluster_name)
if cluster_config:
self.app.write_cluster_topology(
cluster_config['dc'], cluster_config['rack'],
prefer_local=True)
if device_path:
LOG.debug("Preparing data volume.")
device = volume.VolumeDevice(device_path)
# unmount if device is already mounted
device.unmount_device(device_path)
device.format()
if os.path.exists(mount_point):
# rsync exiting data
LOG.debug("Migrating existing data.")
device.migrate_data(mount_point)
# mount the volume
LOG.debug("Mounting new volume.")
device.mount(mount_point)
if not cluster_config:
if backup_info:
self._perform_restore(backup_info, context, mount_point)
LOG.debug("Starting database with configuration changes.")
self.app.start_db(update_db=False)
if not self.app.has_user_config():
LOG.debug("Securing superuser access.")
self.app.secure()
self.app.restart()
self._admin = self.app.build_admin()
if not cluster_config and self.is_root_enabled(context):
self.status.report_root(context, self.app.default_superuser_name)
def change_passwords(self, context, users):
with EndNotification(context):
self.admin.change_passwords(context, users)
def update_attributes(self, context, username, hostname, user_attrs):
with EndNotification(context):
self.admin.update_attributes(context, username, hostname,
user_attrs)
def create_database(self, context, databases):
with EndNotification(context):
self.admin.create_database(context, databases)
def create_user(self, context, users):
with EndNotification(context):
self.admin.create_user(context, users)
def delete_database(self, context, database):
with EndNotification(context):
self.admin.delete_database(context, database)
def delete_user(self, context, user):
with EndNotification(context):
self.admin.delete_user(context, user)
def get_user(self, context, username, hostname):
return self.admin.get_user(context, username, hostname)
def grant_access(self, context, username, hostname, databases):
self.admin.grant_access(context, username, hostname, databases)
def revoke_access(self, context, username, hostname, database):
self.admin.revoke_access(context, username, hostname, database)
def list_access(self, context, username, hostname):
return self.admin.list_access(context, username, hostname)
def list_databases(self, context, limit=None, marker=None,
include_marker=False):
return self.admin.list_databases(context, limit, marker,
include_marker)
def list_users(self, context, limit=None, marker=None,
include_marker=False):
return self.admin.list_users(context, limit, marker, include_marker)
def enable_root(self, context):
return self.app.enable_root()
def enable_root_with_password(self, context, root_password=None):
return self.app.enable_root(root_password=root_password)
def disable_root(self, context):
self.app.enable_root(root_password=None)
def is_root_enabled(self, context):
return self.app.is_root_enabled()
def _perform_restore(self, backup_info, context, restore_location):
LOG.info(_("Restoring database from backup %s.") % backup_info['id'])
try:
backup.restore(context, backup_info, restore_location)
self.app._apply_post_restore_updates(backup_info)
except Exception as e:
LOG.error(e)
LOG.error(_("Error performing restore from backup %s.") %
backup_info['id'])
self.app.status.set_status(trove_instance.ServiceStatuses.FAILED)
raise
LOG.info(_("Restored database successfully."))
def create_backup(self, context, backup_info):
"""
Entry point for initiating a backup for this instance.
The call currently blocks guestagent until the backup is finished.
:param backup_info: a dictionary containing the db instance id of the
backup task, location, type, and other data.
"""
with EndNotification(context):
backup.backup(context, backup_info)
def update_overrides(self, context, overrides, remove=False):
LOG.debug("Updating overrides.")
if remove:
self.app.remove_overrides()
else:
self.app.update_overrides(context, overrides, remove)
def apply_overrides(self, context, overrides):
"""Configuration changes are made in the config YAML file and
require restart, so this is a no-op.
"""
pass
def get_data_center(self, context):
return self.app.get_data_center()
def get_rack(self, context):
return self.app.get_rack()
def set_seeds(self, context, seeds):
self.app.set_seeds(seeds)
def get_seeds(self, context):
return self.app.get_seeds()
def set_auto_bootstrap(self, context, enabled):
self.app.set_auto_bootstrap(enabled)
def node_cleanup_begin(self, context):
self.app.node_cleanup_begin()
def node_cleanup(self, context):
self.app.node_cleanup()
def node_decommission(self, context):
self.app.node_decommission()
def cluster_secure(self, context, password):
os_admin = self.app.cluster_secure(password)
self._admin = self.app.build_admin()
return os_admin
def get_admin_credentials(self, context):
return self.app.get_admin_credentials()
def store_admin_credentials(self, context, admin_credentials):
self.app.store_admin_credentials(admin_credentials)
self._admin = self.app.build_admin()
|
|
"""TcEx testing profile Class."""
# standard library
import json
import logging
import os
import re
import sys
from collections import OrderedDict
from random import randint
# third-party
import colorama as c
from ..app_config_object import InstallJson, LayoutJson, Permutations
from ..env_store import EnvStore
from ..sessions import TcSession
from ..utils import Utils
from .migrate import Migrate
from .populate import Populate
from .rules import Rules
from .session_manager import SessionManager
# autoreset colorama
c.init(autoreset=True, strip=False)
class Profile:
"""Testing Profile Class.
Args:
default_args (dict, optional): The default Args for the profile.
feature (str, optional): The feature name.
name (str, optional): The filename of the profile in the profile.d director.
redis_client (redis.client.Redis, optional): An instance of Redis client.
pytestconfig (?, optional): Pytest config object.
monkeypatch (?, optional): Pytest monkeypatch object.
tcex_testing_context (str, optional): The current context for this profile.
logger (logging.Logger, optional): An logging instance.
options (dict, optional): ?
"""
def __init__(
self,
default_args=None,
feature=None,
name=None,
redis_client=None,
pytestconfig=None,
monkeypatch=None,
tcex_testing_context=None,
logger=None,
options=None,
):
"""Initialize Class properties."""
self._default_args = default_args or {}
self._feature = feature
self._name = name
self.log = logger or logging.getLogger('profile')
self.redis_client = redis_client
self.pytestconfig = pytestconfig
self.monkeypatch = monkeypatch
self.tcex_testing_context = tcex_testing_context
self.test_options = options
# properties
self._app_path = os.getcwd()
self._context_tracker = []
self._data = None
self._output_variables = None
self._pytest_args = None
self._session_manager = None
self._session = None
self.env_store = EnvStore(logger=self.log)
self.ij = InstallJson(logger=self.log)
self.lj = LayoutJson(logger=self.log)
self.permutations = Permutations(logger=self.log)
self.populate = Populate(self)
self.rules = Rules(self)
self.tc_staged_data = {}
self.utils = Utils()
@staticmethod
def _flatten_inputs(inputs):
"""Flatten the inputs dict."""
inputs_flattened = dict(inputs.get('defaults', {}))
inputs_flattened.update(inputs.get('optional', {}))
inputs_flattened.update(inputs.get('required', {}))
return inputs_flattened
@staticmethod
def _sorted(data):
"""Return a sorted dict as an OrderedDict."""
return json.loads(json.dumps(data, sort_keys=True), object_pairs_hook=OrderedDict)
@property
def _test_case_data(self):
"""Return partially parsed test case data."""
return os.getenv('PYTEST_CURRENT_TEST').split(' ')[0].split('::')
@property
def _test_case_name(self):
"""Return partially parsed test case data."""
return self._test_case_data[-1].replace('/', '-').replace('[', '-').replace(']', '')
def add(self, profile_data=None, profile_name=None, permutation_id=None):
"""Add a profile.
Args:
profile_data (dict, optional): The profile data.
profile_name (str, optional): The name of the profile.
permutation_id (int, optional): The index of the permutation id. Defaults to None.
"""
profile_data = profile_data or {}
if profile_name is not None:
# profile_name is only used for profile migrations
self.name = profile_name
# get input permutations when a permutation_id is passed
input_permutations = None
if permutation_id is not None:
try:
input_permutations = self.permutations.input_dict(permutation_id)
except Exception:
# catch any error
print(f'{c.Fore.RED}Invalid permutation id provided.')
sys.exit(1)
# this should not hit since tctest also check for duplicates
if os.path.isfile(self.filename): # pragma: no cover
print(f'{c.Fore.RED}A profile with the name already exists.')
sys.exit(1)
profile = OrderedDict()
profile['_comments_'] = []
profile['environments'] = ['build']
profile['stage'] = profile_data.get('stage', {'kvstore': {}})
profile['configs'] = [] # add config here and remove later to ensure proper order
profile['inputs'] = {} # add inputs here and remove later to ensure proper order
profile['trigger'] = {}
profile['webhook_event'] = {
'body': '',
'headers': [],
'method': 'GET',
'query_params': [],
'trigger_id': '',
}
profile['validation_criteria'] = {}
profile['outputs'] = profile_data.get('outputs')
profile['options'] = profile_data.get(
'options', {'session': {'enabled': False, 'blur': []}},
)
if self.ij.runtime_level.lower() in ['triggerservice', 'webhooktriggerservice']:
profile['configs'].extend(
[
{
'trigger_id': str(randint(1000, 9999)), # nosec
'config': profile_data.get(
'inputs',
{
'optional': self.ij.params_to_args(
input_permutations=input_permutations,
required=False,
service_config=False,
),
'required': self.ij.params_to_args(
input_permutations=input_permutations,
required=True,
service_config=False,
),
},
),
}
]
)
del profile['inputs'] # inputs are for non service Apps
del profile['validation_criteria'] # validation_criteria is for job Apps only
elif self.ij.runtime_level.lower() in ['organization', 'playbook']:
profile['exit_codes'] = profile_data.get('exit_codes', [0])
profile['exit_message'] = None
profile['inputs'].update(
profile_data.get(
'inputs',
{
'optional': self.ij.params_to_args(
required=False, input_permutations=input_permutations
),
'required': self.ij.params_to_args(
required=True, input_permutations=input_permutations
),
},
)
)
del profile['configs'] # inputs are for non service Apps
del profile['trigger'] # trigger is for service Apps only
del profile['webhook_event'] # webhook_event is for service Apps only
if self.ij.runtime_level.lower() == 'organization':
profile['stage']['threatconnect'] = {}
profile['validation_criteria'] = profile_data.get('validation_criteria', {'percent': 5})
del profile['outputs'] # outputs are not used in job Apps
elif self.ij.runtime_level.lower() == 'playbook':
del profile['validation_criteria'] # validation_criteria is for job Apps only
elif self.ij.runtime_level.lower() == 'triggerservice':
del profile['webhook_event'] # webhook_event is for webhooktriggerservice Apps only
elif self.ij.runtime_level.lower() == 'webhooktriggerservice':
del profile['trigger'] # trigger is for triggerservice Apps only
# write the new profile to disk
self.write(profile)
def add_context(self, context):
"""Add a context to the context tracker for this profile.
Args:
context (str): The context (session_id) for this profile.
"""
self._context_tracker.append(context)
def clear_context(self, context):
"""Clear all context data in redis.
Args:
context (str): The context (session_id) to clear in KV store.
"""
keys = self.redis_client.hkeys(context)
if keys:
return self.redis_client.hdel(context, *keys)
return 0
@property
def contents(self):
"""Return mutable copy of profile JSON contents."""
try:
with open(self.filename) as fh:
return json.load(fh, object_pairs_hook=OrderedDict)
except (OSError, ValueError):
print(f'{c.Fore.RED}Could not open/read profile {self.filename}.')
@property
def context_tracker(self):
"""Return the current context trackers for Service Apps."""
if not self._context_tracker:
if self.tcex_testing_context:
self._context_tracker = json.loads(
self.redis_client.hget(self.tcex_testing_context, '_context_tracker') or '[]'
)
return self._context_tracker
@property
def data(self):
"""Return single instance copy of current profile."""
if self._data is None:
self._data = self.contents
self.remove_comments(self._data)
# APP-618 - used in custom test cases
if self._data:
self._data['name'] = self.name
return self._data
@data.setter
def data(self, profile_data):
"""Set profile_data dict."""
self._data = profile_data
self.remove_comments(self._data)
def delete(self):
"""Delete an existing profile."""
raise NotImplementedError('The delete method is not currently implemented.')
@property
def directory(self):
"""Return fully qualified profile directory."""
return os.path.join(self._app_path, 'tests', self.feature, 'profiles.d')
@property
def feature(self):
"""Return the current feature."""
if self._feature is None:
# when called in testing framework will get the feature from pytest env var.
self._feature = self._test_case_data[0].split('/')[1].replace('/', '-')
return self._feature
@property
def feature_directory(self):
"""Return fully qualified feature directory."""
return os.path.join(self._app_path, 'tests', self.feature)
@property
def filename(self):
"""Return profile fully qualified filename."""
return os.path.join(self.directory, f'{self.name}.json')
# return os.path.join(self.directory, f'{self.name}.json')
def init(self):
"""Return the Data (dict) from the current profile."""
if self.data is None:
self.log.error('Profile init failed; loaded profile data is None')
# Now can initialize anything that needs initializing
self.session_manager.init() # initialize session recording/playback
def merge_inputs(self):
"""Merge new inputs and remove undefined inputs."""
if not self.pytest_args.get('merge_inputs'):
return
updated_params = []
# handle non-layout and layout based App appropriately
for profile_inputs, params in self.profile_inputs_params:
profile_inputs_flattened = self._flatten_inputs(profile_inputs)
inputs = {}
merged_inputs = {
'optional': {},
'required': {},
}
for name, data in params.items():
# inputs that are serviceConfig are not applicable for profiles
if data.get('serviceConfig'):
continue
# each non hidden input will be checked for permutations if the App has layout
if not data.get('hidden'):
if not self.permutations.validate_input_variable(name, inputs):
continue
# get the value from the current profile or use default value from install.json
value = profile_inputs_flattened.get(name)
if name not in profile_inputs_flattened and data.get('type').lower() != 'boolean':
# set the value to the default in the install.json file unless the type
# is boolean. changing a boolean value to True when not there will change
# the logic of the test case.
value = data.get('default', None)
# get input type based on install.json required field
input_type = 'optional'
if data.get('required'):
input_type = 'required'
# APP-87 - ensure boolean inputs don't have null values
if data.get('type').lower() == 'boolean':
if not isinstance(value, bool):
value = False
# update inputs for next permutation check
inputs[name] = value
# store merged/updated inputs for writing back to profile
merged_inputs[input_type][name] = value
# ADI-1376 - handle tcex default args (prevent removing)
if profile_inputs.get('defaults'):
merged_inputs['defaults'] = profile_inputs.get('defaults')
updated_params.append(merged_inputs)
# update the profile with merged config/inputs
profile_data = self.contents
if self.ij.runtime_level.lower() in ['triggerservice', 'webhooktriggerservice']:
for index, config_item in enumerate(profile_data.get('configs', [])):
config_item['config'] = updated_params[index]
else:
profile_data['inputs'] = updated_params[0]
# write updated profile
self.write(profile_data)
def migrate(self):
"""Migration the profile to the latest schema"""
migrate = Migrate(self)
self.data = migrate.data
@property
def name(self):
"""Return partially parsed test case data."""
if self._name is None:
name_pattern = r'^test_[a-zA-Z0-9_]+\[(.+)\]$'
self._name = re.search(name_pattern, self._test_case_data[-1]).group(1)
return self._name
@name.setter
def name(self, name):
"""Set the profile name"""
self._name = name
def order_profile(self, profile_data):
"""Order the profile data properly."""
comments = profile_data.pop('_comments_', None)
environments = profile_data.pop('environments', None)
exit_codes = profile_data.pop('exit_codes', None)
exit_message = profile_data.pop('exit_message', None)
configs = profile_data.pop('configs', None)
inputs = profile_data.pop('inputs', None)
options = profile_data.pop('options', None)
outputs = profile_data.pop('outputs', None)
stage = profile_data.pop('stage', None)
trigger = profile_data.pop('trigger', None)
validation_criteria = profile_data.pop('validation_criteria', None)
webhook_event = profile_data.pop('webhook_event', None)
profile = OrderedDict()
if comments is not None:
profile['_comments_'] = comments
if environments is not None:
profile['environments'] = environments
if stage is not None:
profile['stage'] = self._sorted(stage)
if configs is not None:
profile['configs'] = self._sorted(configs)
if inputs is not None:
profile['inputs'] = self._sorted(inputs)
if trigger is not None:
profile['trigger'] = self._sorted(trigger)
if webhook_event is not None:
profile['webhook_event'] = self._sorted(webhook_event)
if validation_criteria is not None:
profile['validation_criteria'] = validation_criteria
if exit_message is not None:
profile['exit_message'] = self._sorted(exit_message)
if outputs is not None:
profile['outputs'] = self._sorted(outputs)
if options is not None:
profile['options'] = self._sorted(options)
if exit_codes is not None:
profile['exit_codes'] = self._sorted(exit_codes)
# add any additional fields not covered above
for k, v in profile_data.items():
profile[k] = v
return profile
@property
def profile_inputs(self):
"""Return the appropriate inputs (config) for the current App type.
Service App use config and others use inputs.
"inputs": {
"default": {}
"optional": {}
"required": {}
}
"""
if self.ij.runtime_level.lower() in ['triggerservice', 'webhooktriggerservice']:
for config_data in self.configs:
yield config_data.get('config')
else:
yield self.inputs
@property
def profile_inputs_params(self):
"""Return params for inputs."""
# handle non-layout and layout based App appropriately
for profile_inputs in self.profile_inputs:
params = self.ij.params_dict # params section of install.json build as dict
if self.lj.has_layout:
# using inputs from layout.json since they are required to be in order
# (display field can only use inputs previously defined)
params = {}
for name in self.lj.params_dict:
# get data from install.json based on name
params[name] = self.ij.params_dict.get(name)
# hidden fields will not be in layout.json so they need to be included manually
params.update(self.ij.filter_params_dict(hidden=True))
yield profile_inputs, params
# TODO: BCS - move this
@property
def pytest_args(self):
"""Return dict of pytest config args."""
if self._pytest_args is None:
self._pytest_args = {}
if self.pytestconfig:
args = self.pytestconfig.option # argparse.Namespace
self._pytest_args = {
'merge_inputs': args.merge_inputs or False,
'merge_outputs': args.merge_outputs or False,
'replace_exit_message': args.replace_exit_message or False,
'replace_outputs': args.replace_outputs or False,
'record_session': args.record_session or False,
'ignore_session': args.ignore_session or False,
}
return self._pytest_args
def remove_comments(self, data):
"""Iterate through data and remove any dict field with a value of "comments"
Args:
data (dict): The profile dictionary.
"""
data = data or {}
for _, v in list(data.items()):
try:
del data['_comments_']
except KeyError:
pass
if isinstance(v, dict):
self.remove_comments(v)
@property
def session(self):
"""Return a instance of the session manager."""
if self._session is None:
self._session = TcSession(
self.env_store.getenv('/ninja/tc/tci/exchange_admin/api_access_id'),
self.env_store.getenv('/ninja/tc/tci/exchange_admin/api_secret_key'),
os.getenv('TC_API_PATH'),
)
return self._session
@property
def session_manager(self):
"""Return a instance of the session manager."""
if not self._session_manager:
self._session_manager = SessionManager(self)
return self._session_manager
@property
def test_directory(self):
"""Return fully qualified test directory."""
return os.path.join(self._app_path, 'tests')
def update_exit_message(self):
"""Update validation rules from exit_message section of profile."""
message_tc = ''
if os.path.isfile(self.message_tc_filename):
with open(self.message_tc_filename) as mh:
message_tc = mh.read()
profile_data = self.contents
if (
profile_data.get('exit_message') is None
or isinstance(profile_data.get('exit_message'), str)
or self.pytest_args.get('replace_exit_message')
):
# update the profile
profile_data['exit_message'] = {'expected_output': message_tc, 'op': 'eq'}
self.write(profile_data, 'updating exit message')
def update_outputs(self):
"""Update the validation rules for outputs section of a profile.
By default this method will only update if the current value is null. If the
flag --replace_outputs is passed to pytest (e.g., pytest --replace_outputs)
the outputs will replaced regardless of their current value. If the flag
--merge_outputs is passed to pytest (e.g., pytest --merge_outputs) any new
outputs will be added and any outputs that are not longer valid will be
removed.
"""
if self.redis_client is None:
# redis_client is only available for children of TestCasePlaybookCommon
print(f'{c.Fore.RED}An instance of redis_client is not set.')
sys.exit(1)
outputs = {}
trigger_id = None
for context in self.context_tracker:
# get all current keys in current context
redis_data = self.redis_client.hgetall(context)
trigger_id = self.redis_client.hget(context, '_trigger_id')
# updated outputs with validation data
self.update_outputs_variables(outputs, redis_data, trigger_id)
# cleanup redis
self.clear_context(context)
if not self.outputs or self.pytest_args.get('replace_outputs'):
# update profile if current profile is not or user specifies --replace_outputs
profile_data = self.contents
profile_data['outputs'] = outputs
self.write(profile_data)
elif self.pytest_args.get('merge_outputs'):
if trigger_id is not None:
# service Apps have a different structure with id: data
merged_outputs = {}
for id_, data in outputs.items():
merged_outputs[id_] = {}
for key in list(data):
if key in self.outputs.get(id_, {}):
# use current profile output value if exists
merged_outputs[id_][key] = self.outputs[id_][key]
else:
merged_outputs[id_][key] = outputs[id_][key]
else:
# update playbook App profile outputs
merged_outputs = {}
for key in list(outputs):
if key in self.outputs:
# use current profile output value if exists
merged_outputs[key] = self.outputs[key]
else:
merged_outputs[key] = outputs[key]
profile_data = self.contents
profile_data['outputs'] = merged_outputs
self.write(profile_data)
def update_outputs_variables(self, outputs, redis_data, trigger_id):
"""Return the outputs section of a profile.
Args:
outputs (dict): The dict to add outputs.
redis_data (dict): The data from KV store for this profile.
trigger_id (str): The current trigger_id (service Apps).
"""
for variable in self.tc_playbook_out_variables:
# TODO: investigate moving to output rules validator
# APP-219 - check for "bad" output variable names
if 'raw.json' in variable:
self.log.data(
'validate',
'Suspect Value',
'Output variable matched a suspect value (raw.json).',
'warning',
)
# get data from redis for current context
data = redis_data.get(variable.encode('utf-8'))
# validate redis variables
if data is None:
if 1 not in self.exit_codes:
# TODO: add feature in testing framework to allow writing null and
# then check if variables exist instead of null value.
# log error for missing output data if not a fail test case (exit code of 1)
self.log.data(
'validate',
'Missing Data',
f'possible missing KV Store data for variable {variable}',
'info',
)
else:
data = json.loads(data.decode('utf-8'))
# validate validation variables
validation_data = (self.outputs or {}).get(variable)
if trigger_id is None and validation_data is None and self.outputs:
self.log.error(f'[{self.name}] Missing validations rule: {variable}')
# make business rules based on data type or content
output_data = {'expected_output': data, 'op': 'eq'}
if 1 not in self.exit_codes:
output_data = self.rules.data(data)
# get trigger id for service Apps
if trigger_id is not None:
if isinstance(trigger_id, bytes):
trigger_id = trigger_id.decode('utf-8')
outputs.setdefault(trigger_id, {})
outputs[trigger_id][variable] = output_data
else:
outputs[variable] = output_data
def validate_inputs(self):
"""Validate required inputs.
This method will also merge input if --merge_inputs is passed to pytest.
"""
errors = []
status = True
# handle non-layout and layout based App appropriately
for profile_inputs, params in self.profile_inputs_params:
profile_inputs_flattened = self._flatten_inputs(profile_inputs)
inputs = {}
for name, data in params.items():
if data.get('serviceConfig'):
# inputs that are serviceConfig are not applicable for profiles
continue
if not data.get('hidden'):
# each non hidden input will be checked for permutations if the App has layout
if not self.permutations.validate_input_variable(name, inputs):
continue
# get the value from the current profile or use default value from install.json
value = profile_inputs_flattened.get(name)
if name not in profile_inputs_flattened:
value = data.get('default', None)
if data.get('required'):
if value in [None, '']: # exclude 0 or False from check
# validation step
errors.append(f'- Missing/Invalid value for required arg ({name})')
status = False
# APP-87 - ensure boolean inputs don't have null values
if data.get('type').lower() == 'boolean':
if not isinstance(value, bool):
# validation step
errors.append(f'- Invalid value for boolean arg ({name})')
status = False
# update inputs
inputs[name] = value
errors = '\n'.join(errors) # convert error to string for assert message
return status, f'\n{errors}'
def write(self, json_data, reason=None):
"""Write updated profile file.
Args:
json_data (dict): The profile data.
reason (str, default:None): The reason for the update.
"""
# Permuted test cases set options to a true value, so disable writeback
if self.test_options:
return
# order the profile data appropriately
json_data = self.order_profile(json_data)
if reason is not None:
self.log.data(
'profile', 'Profile Update', f'writing updated profile for {reason}', 'info'
)
with open(self.filename, 'w') as fh:
json.dump(json_data, fh, indent=2, sort_keys=False)
fh.write('\n')
#
# Properties
#
@property
def args(self):
"""Return combined/flattened args."""
return self.inputs_flattened
@property
def configs(self):
"""Return environments."""
return list(self.data.get('configs', []))
@property
def environments(self):
"""Return environments."""
return self.data.get('environments', ['build'])
@property
def exit_codes(self):
"""Return exit codes."""
return self.data.get('exit_codes', [])
@property
def exit_message(self):
"""Return exit message dict."""
return self.data.get('exit_message', {})
@property
def inputs(self):
"""Return inputs dict."""
return self.data.get('inputs', {})
@property
def inputs_defaults(self):
"""Return required inputs dict."""
return self.inputs.get('defaults', {})
@property
def inputs_flattened(self):
"""Return inputs dict."""
return self._flatten_inputs(self.inputs)
@property
def inputs_optional(self):
"""Return required inputs dict."""
return self.inputs.get('optional', {})
@property
def inputs_required(self):
"""Return required inputs dict."""
return self.inputs.get('required', {})
@property
def message_tc_filename(self):
"""Return the fqpn for message_tc file relative to profile."""
return os.path.join(
self._default_args.get('tc_out_path'), self.feature, self._test_case_name, 'message.tc'
)
@property
def options(self):
"""Return options dict."""
if self.data.get('options') is None:
self.data['options'] = {}
return self.data.get('options')
@property
def owner(self):
"""Return the owner value."""
return self.inputs_flattened.get('owner')
@property
def outputs(self):
"""Return outputs dict."""
return self.data.get('outputs')
@property
def rargs(self):
"""Return combined/flattened args with value from staging data if required."""
rargs = {}
for arg, value in self.args.items():
if re.match(self.utils.variable_match, value):
# look for value in staging data
if self.stage_kvstore.get(value) is not None:
value = self.stage_kvstore.get(value)
rargs[arg] = value
return rargs
@property
def stage(self):
"""Return stage dict."""
if self.data.get('stage') is None:
self.data['stage'] = {}
return self.data.get('stage', {})
@property
def stage_kvstore(self):
"""Return stage kv store dict."""
return self.stage.get('kvstore', {})
@property
def stage_threatconnect(self):
"""Return stage threatconnect dict."""
return self.stage.get('threatconnect', {})
@property
def tc_in_path(self):
"""Return fqpn tc_in_path arg relative to profile."""
if self.ij.runtime_level.lower() in [
'apiservice',
'triggerservice',
'webhooktriggerservice',
]:
tc_in_path = os.path.join(self._default_args.get('tc_in_path'), self.feature)
else:
tc_in_path = os.path.join(
self._default_args.get('tc_in_path'), self.feature, self._test_case_name
)
return tc_in_path
@property
def tc_log_path(self):
"""Return fqpn tc_log_path arg relative to profile."""
if self.ij.runtime_level.lower() in [
'apiservice',
'triggerservice',
'webhooktriggerservice',
]:
tc_log_path = os.path.join(self._default_args.get('tc_log_path'), self.feature)
else:
tc_log_path = os.path.join(
self._default_args.get('tc_log_path'), self.feature, self._test_case_name
)
return tc_log_path
@property
def tc_out_path(self):
"""Return fqpn tc_out_path arg relative to profile."""
if self.ij.runtime_level.lower() in [
'apiservice',
'triggerservice',
'webhooktriggerservice',
]:
tc_out_path = os.path.join(self._default_args.get('tc_out_path'), self.feature)
else:
tc_out_path = os.path.join(
self._default_args.get('tc_out_path'), self.feature, self._test_case_name
)
return tc_out_path
@property
def tc_playbook_out_variables(self):
"""Return calculated output variables.
* iterate over all inputs:
* if input key has exposePlaybookKeyAs defined
* if value a variable
* lookup value in stage.kvstore data
* for each key add to output variables
"""
output_variables = self.ij.tc_playbook_out_variables
if self.lj.has_layout:
# if layout based App get valid outputs
output_variables = self.ij.create_output_variables(
self.permutations.outputs_by_inputs(self.inputs_flattened)
)
for arg, value in self.inputs_flattened.items():
# get full input data from install.json
input_data = self.ij.params_dict.get(arg, {})
# check to see if it support dynamic output variables
if 'exposePlaybookKeyAs' not in input_data:
continue
# get the output variable type from install.json input data
variable_type = input_data.get('exposePlaybookKeyAs')
# staged data for this dynamic input must be a KeyValueArray
for data in self.stage_kvstore.get(value, []):
# create a variable using key value
variable = self.ij.create_variable(data.get('key'), variable_type, job_id=9876)
output_variables.append(variable)
# APP-77 - add _fired for service Apps
if self.ij.runtime_level.lower() in ['triggerservice', 'webhooktriggerservice']:
output_variables.append('#Trigger:9876:_fired!String')
return output_variables
@property
def tc_temp_path(self):
"""Return fqpn tc_temp_path arg relative to profile."""
if self.ij.runtime_level.lower() in [
'apiservice',
'triggerservice',
'webhooktriggerservice',
]:
tc_temp_path = os.path.join(self._default_args.get('tc_temp_path'), self.feature)
else:
tc_temp_path = os.path.join(
self._default_args.get('tc_temp_path'), self.feature, self._test_case_name
)
return tc_temp_path
@property
def validation_criteria(self):
"""Return the validation_criteria value."""
return self.data.get('validation_criteria', {})
@property
def webhook_event(self):
"""Return webhook event dict."""
return self.data.get('webhook_event', {})
@property
def webhook_marshall_event(self):
"""Return webhook event dict."""
return self.data.get('webhook_marshall_event', {})
|
|
import requests
import datetime
import json
class XboxApi(object):
def __init__(self, api_key=None, language=None, base_url=""):
self.api_key = api_key
if not self.api_key:
import os
self.api_key = os.environ.get("XBOXAPIKEY", None)
if not self.api_key:
raise ValueError("You must provide an XboxAPI.com API key or set XBOXAPIKEY environment variable")
self.language = language
self.base_url = base_url
if not self.base_url:
self.base_url = "https://xboxapi.com/v2"
def get(self, url):
"""
Make a GET request to the Xbox API
:param url: Endpoint and parameters to append to self.base_url
:return: XboxApiResponse
"""
# Build headers
headers = {"X-AUTH": self.api_key}
if self.language:
headers["Accept-Language"] = self.language
# Make request and build response
response = requests.get("{}{}".format(self.base_url, url), headers=headers)
return XboxApiResponse(self, url, response)
def post(self, url, data):
"""
Make POST request to the Xbox API for sending messages
:param url: Endpoint and parameters to append to self.base_url
:param data: Message payload to send
:return: Success boolean
"""
headers = {
"X-AUTH": self.api_key,
"Content-Type": "application/json"
}
response = requests.post("{}{}".format(self.base_url, url), headers=headers, data=json.dumps(data))
return response.status_code == 200
def get_profile(self):
"""This is your profile information"""
return self.get("/profile")
def get_xuid(self):
"""This is your account XUID (Xbox Account User ID)"""
return self.get("/accountXuid")
def get_messages(self):
"""These are your message with full preview"""
return self.get("/messages")
def get_conversations(self):
"""These are your conversations with full preview of the last message sent/received"""
return self.get("/conversations")
def get_xuid_by_gamertag(self, gamertag):
"""This is the XUID for a specified Gamertag (Xbox Account User ID)"""
return self.get("/xuid/{}".format(gamertag))
def get_gamertag_by_xuid(self, xuid):
"""This is the Gamertag for a specified XUID (Xbox Account User ID)"""
return self.get("/gamertag/{}".format(xuid))
def get_user_profile(self, xuid):
"""This is the Profile for a specified XUID"""
return self.get("/{}/profile".format(xuid))
def get_user_gamercard(self, xuid):
"""This is the Gamercard information for a specified XUID"""
return self.get("/{}/gamercard".format(xuid))
def get_user_presence(self, xuid):
"""This is the current presence information for a specified XUID"""
return self.get("/{}/presence".format(xuid))
def get_user_activity(self, xuid):
"""This is the current activity information for a specified XUID"""
return self.get("/{}/activity".format(xuid))
def get_user_activity_recent(self, xuid):
"""This is the recent activity information for a specified XUID"""
return self.get("/{}/activity/recent".format(xuid))
def get_user_friends(self, xuid):
"""This is the friends information for a specified XUID"""
return self.get("/{}/friends".format(xuid))
def get_user_followers(self, xuid):
"""This is the followers information for a specified XUID"""
return self.get("/{}/followers".format(xuid))
def get_recent_players(self):
"""This is accounts recent players information"""
return self.get("/recent-players")
def get_user_gameclips(self, xuid):
"""This is the game clips for a specified XUID"""
return self.get("/{}/game-clips".format(xuid))
def get_user_saved_gameclips(self, xuid):
"""This is the saved game clips for a specified XUID"""
return self.get("/{}/game-clips/saved".format(xuid))
def get_user_saved_gameclips_by_title(self, xuid, title_id):
"""This is the saved game clips for a specified XUID, and Game (titleId)"""
return self.get("/{}/game-clips/{}".format(xuid, title_id))
def get_saved_gameclips(self, title_id):
"""This is the saved game clips for a specified Game (titleId)"""
return self.get("/game-clips/{}".format(title_id))
def get_user_screenshots(self, xuid):
"""This is the screenshots for a specified XUID"""
return self.get("/{}/screenshots".format(xuid))
def get_user_saved_screenshots(self, xuid, title_id):
"""This is the saved screenshots for a specified XUID, and Game (titleId)"""
return self.get("/{}/screenshots/{}".format(xuid, title_id))
def get_saved_screenshots(self, title_id):
"""This is the saved screenshots for a specified Game (titleId)"""
return self.get("/screenshots/{}".format(title_id))
def get_user_game_stats(self, xuid, title_id):
"""This is the game stats for a specified XUID, on a specified game. (i.e. Driver Level on Forza etc.)"""
return self.get("/{}/game-stats/{}".format(xuid, title_id))
def get_user_xbox360games(self, xuid):
"""This is the Xbox 360 Games List for a specified XUID"""
return self.get("/{}/xbox360games".format(xuid))
def get_user_xboxonegames(self, xuid):
"""This is the Xbox One Games List for a specified XUID"""
return self.get("/{}/xboxonegames".format(xuid))
def get_user_achievements(self, xuid, title_id):
"""This is the Xbox Games Achievements for a specified XUID"""
return self.get("/{}/achievements/{}".format(xuid, title_id))
def get_game_info_hex(self, game_id):
"""This is the Xbox Game Information (using the game id in hex format)"""
return self.get("/game-details-hex/{}".format(game_id))
def get_game_info(self, product_id):
"""This is the Xbox Game Information (using the product id)"""
return self.get("/game-details/{}".format(product_id))
def get_game_addons(self, product_id):
"""This is the Xbox Game Information (using the product id)"""
return self.get("/game-details/{}/addons".format(product_id))
def get_game_related(self, product_id):
"""This is the Xbox Game Information (using the product id)"""
return self.get("/game-details/{}/related".format(product_id))
def get_latest_xbox360games(self):
"""This gets the latest Xbox 360 Games from the Xbox LIVE marketplace"""
return self.get("/latest-xbox360-games")
def get_latest_xboxonegames(self):
"""This gets the latest Xbox One Games from the Xbox LIVE marketplace"""
return self.get("/latest-xboxone-games")
def get_latest_xboxoneapps(self):
"""This gets the latest Xbox One Apps from the Xbox LIVE marketplace"""
return self.get("/latest-xboxone-apps")
def get_xboxone_gold(self):
"""These are the free "Games with Gold", and "Deals with Gold" from the Xbox LIVE marketplace"""
return self.get("/xboxone-gold-lounge")
def get_xbox360games(self):
"""Browse the Xbox LIVE marketplace for Xbox 360 content."""
return self.get("/browse-marketplace/xbox360/1?sort=releaseDate")
def get_xboxonegames(self):
"""Browse the Xbox LIVE marketplace for Xbox One Game content."""
return self.get("/browse-marketplace/games/1?sort=releaseDate")
def get_xboxoneapps(self):
"""Browse the Xbox LIVE marketplace for Xbox One App content."""
return self.get("/browse-marketplace/apps/1?sort=releaseDate")
def get_user_activity_feed(self):
"""Show your activity feed."""
return self.get("/activity-feed")
def get_user_titlehub_achievements(self, xuid):
"""Show your achievements list by game with friends who also play. (New TitleHub endpoint)"""
return self.get("/{}/titlehub-achievement-list".format(xuid))
def send_message(self, message, xuid=None, xuids=None):
"""Send a message from your account to other users"""
payload = {
"message": message,
"to": []
}
if not xuid and not xuids:
raise ValueError("You must provide an xuid or list of xuids to send a message to.")
if not xuids:
xuids = []
if xuid:
xuids.append(xuid)
for xuid in xuids:
payload["to"].append(xuid)
return self.post("/messages", payload)
def send_activity_feed(self, message):
"""Send a post to your activity feed"""
payload = {
"message": message
}
return self.post("activity-feed", payload)
class XboxApiResponse(object):
def __init__(self, api, url, response):
self.api = api
self.url = url
self.response = response
try:
json = response.json()
except ValueError:
json = response.content
self.data = json
self._set_metadata(response)
def _set_metadata(self, response):
self.next_url = None
self.status_code = response.status_code
self.continuation_token = response.headers.get("X-Continuation-Token")
self.rate_limit = int(response.headers.get("X-RateLimit-Limit"))
self.rate_remaining = int(response.headers.get("X-RateLimit-Remaining"))
self.rate_reset = int(response.headers.get("X-RateLimit-Reset"))
self.rate_reset_datetime = datetime.datetime.now() + datetime.timedelta(seconds=self.rate_reset)
if self.continuation_token:
self.next_url = "{}{}continuationToken={}".format(self.url,
"&" if "?" in self.url else "?",
self.continuation_token)
def __iter__(self):
return self
def next(self):
if self.continuation_token:
return self.api.get(self.next_url)
else:
raise StopIteration()
def fill_data(self, maximum=None):
"""
Make additional API calls and aggregate subsequent records up to `maximum` or
until no continuation token is returned.
:param maximum: Maximum number of records to return
:return: XboxApiResponse
"""
while self.continuation_token and type(self.data) == list:
next_response = self.next()
# Merge new list with existing list
self.data.extend(next_response.data)
# Overwrite this responses metadata with latest
self._set_metadata(next_response.response)
if maximum and len(self.data) >= maximum:
self.data = self.data[:maximum]
break
|
|
import hashlib
import json
import os
from random import randrange
import falcon
import mock
from mock import patch
from deuce import conf
from deuce.tests import ControllerTest
from deuce.util.misc import relative_uri
class TestVaults(ControllerTest):
def setUp(self):
super(TestVaults, self).setUp()
self._hdrs = {"x-project-id": self.create_project_id()}
def tearDown(self):
super(TestVaults, self).tearDown()
def test_vault_leaf(self):
hdrs = self._hdrs
vault_path = 'http://localhost/v1.0/vaults/'
# Create an empty root path in the storage.
self.helper_create_vault('vault_0', hdrs)
self.helper_delete_vault('vault_0', hdrs)
response = self.simulate_get('/v1.0/vaults/',
headers=hdrs)
self.assertEqual(str(response[0].decode()), str('{}'))
# Prepare several vaults in the storage.
for cnt in range(5):
self.helper_create_vault('vault_{0}'.format(cnt), hdrs)
# No limit nor marker
response = self.simulate_get('/v1.0/vaults/',
headers=hdrs)
self.assertEqual(json.loads(response[0].decode()),
{"vault_3": {"url": vault_path + "vault_3"},
"vault_4": {"url": vault_path + "vault_4"},
"vault_2": {"url": vault_path + "vault_2"},
"vault_1": {"url": vault_path + "vault_1"},
"vault_0": {"url": vault_path + "vault_0"}}
)
response = self.simulate_get('/v1.0/vaults/',
query_string='marker=vault_0',
headers=hdrs)
self.assertEqual(json.loads(response[0].decode()),
{"vault_4": {"url": vault_path + "vault_4"},
"vault_2": {"url": vault_path + "vault_2"},
"vault_3": {"url": vault_path + "vault_3"},
"vault_0": {"url": vault_path + "vault_0"},
"vault_1": {"url": vault_path + "vault_1"}}
)
# Only limit
response = self.simulate_get('/v1.0/vaults/',
query_string='limit=99',
headers=hdrs)
self.assertEqual(json.loads(response[0].decode()),
{"vault_4": {"url": vault_path + "vault_4"},
"vault_2": {"url": vault_path + "vault_2"},
"vault_3": {"url": vault_path + "vault_3"},
"vault_0": {"url": vault_path + "vault_0"},
"vault_1": {"url": vault_path + "vault_1"}}
)
response = self.simulate_get('/v1.0/vaults/',
query_string='limit=1',
headers=hdrs)
self.assertEqual(json.loads(response[0].decode()),
{"vault_0": {"url": vault_path + "vault_0"}}
)
next_url = self.srmock.headers_dict["X-Next-Batch"]
uri, querystring = relative_uri(next_url)
new_querystring = querystring.replace('limit=1', 'limit=99')
response = self.simulate_get(uri,
query_string=new_querystring,
headers=hdrs)
self.assertEqual(json.loads(response[0].decode()),
{"vault_4": {"url": vault_path + "vault_4"},
"vault_2": {"url": vault_path + "vault_2"},
"vault_3": {"url": vault_path + "vault_3"},
"vault_1": {"url": vault_path + "vault_1"}}
)
response = self.simulate_get(uri,
query_string=querystring,
headers=hdrs)
self.assertEqual(json.loads(response[0].decode()),
{"vault_1": {"url": vault_path + "vault_1"}}
)
response = self.simulate_get('/v1.0/vaults/',
query_string='marker=vault_not_exists'
'&limit=99',
headers=hdrs)
self.assertEqual(str(response[0].decode()), str('{}'))
# Cleanup
for cnt in range(5):
self.helper_delete_vault('vault_{0}'.format(cnt), hdrs)
def test_invalid_vault_id(self):
vault_name = '@#$@#$@$'
vault_path = '/v1.0/vaults/{0}'.format(vault_name)
# regex validation.
response = self.simulate_put(vault_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_400)
response = self.simulate_head(vault_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_404)
response = self.simulate_get('/v1.0/vaults',
query_string='marker=*',
headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_404)
def test_vault_health(self):
# Get health of non-existent vault
vault_health_path = '/v1.0/vaults/{0}/health'.format('vault_health')
response = self.simulate_get(vault_health_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_404)
self.helper_create_vault('vault_health', self._hdrs)
# Get health of existing vault
response = self.simulate_get(vault_health_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_200)
resp_body = json.loads(response[0].decode())
self.assertEqual(resp_body['Vault'], 'vault_health')
self.assertEqual(resp_body['Bad Blocks'], 0)
self.assertEqual(resp_body['Bad Files'], 0)
def test_vault_deletion(self):
# 1. Delete non-existent vault
vault_name = self.create_vault_id()
vault_path = '/v1.0/vaults/{0}'.format(vault_name)
response = self.simulate_delete(vault_path,
headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_404)
# 2. Create Vault and Delete it (Empty Vault)
vault_name = self.create_vault_id()
vault_path = '/v1.0/vaults/{0}'.format(vault_name)
response = self.simulate_put(vault_path,
headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_201)
response = self.simulate_delete(vault_path,
headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_204)
# 3. Create Vault, Add a Block, and Delete It (Non-Empty Vault)
vault_name = self.create_vault_id()
vault_path = '/v1.0/vaults/{0}'.format(vault_name)
response = self.simulate_put(vault_path,
headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_201)
# Build a dummy block
block_data = os.urandom(randrange(1, 2000))
block_hash = hashlib.sha1()
block_hash.update(block_data)
block_id = block_hash.hexdigest()
block_path = '{0:}/blocks/{1:}'.format(vault_path, block_id)
# Upload a dummy block
headers = {}
headers.update(self._hdrs)
headers['content-type'] = 'application/binary'
headers['content-length'] = str(len(block_data))
response = self.simulate_put(block_path, headers=headers,
body=block_data)
self.assertEqual(self.srmock.status, falcon.HTTP_201)
# Delete the vault
response = self.simulate_delete(vault_path,
headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_409)
# Delete the dummy block
response = self.simulate_delete(block_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_204)
# Delete the vault
response = self.simulate_delete(vault_path,
headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_204)
def test_vault_crud(self):
vault_name = self.create_vault_id()
vault_path = '/v1.0/vaults/{0}'.format(vault_name)
# If we try to head the vault before it exists, it should
# return a 404
response = self.simulate_head(vault_path,
headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_404)
# If we try to get the statistics on the vault before it
# exists, it should return a 404
response = self.simulate_get(vault_path,
headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_404)
# Now we create the vault, which should return a 201 (created)
response = self.simulate_put(vault_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_201)
# Now verify the vault exists
response = self.simulate_head(vault_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_204)
# Now get the statistics, what do we get?
# Base statistics:
# metadata (file count = 0, file-block count = 0, blocks = 0)
# storage (size = 0,...)
# For now, just enforce we get a 200
response = self.simulate_get(vault_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_200)
# Now delete the vault (this should be OK since it
# contains nothing in it.
response = self.simulate_delete(vault_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_204)
# Now we should get a 404 when trying to head the vault
response = self.simulate_head(vault_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_404)
# Try to delete again, this time it should be a 404
response = self.simulate_delete(vault_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_404)
# Delete a non-empty vault.
response = self.simulate_put(vault_path, headers=self._hdrs)
# Add a real block to it.
block_data = os.urandom(2000) # Data size : 2000.
sha1 = hashlib.sha1()
sha1.update(block_data)
blockid = sha1.hexdigest()
block_path = '{0}/blocks/{1}'.format(vault_path, blockid)
block_headers = {
"Content-Type": "application/binary",
"Content-Length": "2000",
}
block_headers.update(self._hdrs)
response = self.simulate_put(block_path, headers=block_headers,
body=block_data)
# Delete should fail.
response = self.simulate_delete(vault_path, headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_409)
def test_vault_error(self):
from deuce.model import Vault
with patch.object(Vault, 'create', return_value=False):
self.simulate_put('/v1.0/vaults/error_vault', headers=self._hdrs)
self.assertEqual(self.srmock.status, falcon.HTTP_500)
|
|
# Copyright 2013 Rackspace Development Company, L.P.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
from mock import Mock, MagicMock, patch
from trove.common import remote
from trove.common.strategies.storage.swift import StreamReader
from trove.common.strategies.storage.swift \
import SwiftDownloadIntegrityError
from trove.common.strategies.storage.swift import SwiftStorage
from trove.tests.fakes.swift import FakeSwiftConnection
from trove.tests.unittests.backup.test_backupagent \
import MockBackup as MockBackupRunner
from trove.tests.unittests import trove_testtools
class SwiftStorageSaveChecksumTests(trove_testtools.TestCase):
"""SwiftStorage.save is used to save a backup to Swift."""
def setUp(self):
super(SwiftStorageSaveChecksumTests, self).setUp()
def tearDown(self):
super(SwiftStorageSaveChecksumTests, self).tearDown()
def test_swift_checksum_save(self):
"""This tests that SwiftStorage.save returns the swift checksum."""
context = trove_testtools.TroveTestContext(self)
backup_id = '123'
user = 'user'
password = 'password'
swift_client = FakeSwiftConnection()
with patch.object(remote, 'create_swift_client',
return_value=swift_client):
storage_strategy = SwiftStorage(context)
with MockBackupRunner(filename=backup_id,
user=user,
password=password) as runner:
(success,
note,
checksum,
location) = storage_strategy.save(runner.manifest, runner)
self.assertTrue(success, "The backup should have been successful.")
self.assertIsNotNone(note, "A note should have been returned.")
self.assertEqual('http://mockswift/v1/database_backups/123.gz.enc',
location,
"Incorrect swift location was returned.")
@patch('trove.common.strategies.storage.swift.LOG')
def test_swift_segment_checksum_etag_mismatch(self, mock_logging):
"""This tests that when etag doesn't match segment uploaded checksum
False is returned and None for checksum and location
"""
context = trove_testtools.TroveTestContext(self)
# this backup_id will trigger fake swift client with calculate_etag
# enabled to spit out a bad etag when a segment object is uploaded
backup_id = 'bad_segment_etag_123'
user = 'user'
password = 'password'
swift_client = FakeSwiftConnection()
with patch.object(remote, 'create_swift_client',
return_value=swift_client):
storage_strategy = SwiftStorage(context)
with MockBackupRunner(filename=backup_id,
user=user,
password=password) as runner:
(success,
note,
checksum,
location) = storage_strategy.save(runner.manifest, runner)
self.assertFalse(success, "The backup should have failed!")
self.assertTrue(note.startswith("Error saving data to Swift!"))
self.assertIsNone(checksum,
"Swift checksum should be None for failed backup.")
self.assertEqual('http://mockswift/v1/database_backups/'
'bad_segment_etag_123.gz.enc',
location,
"Incorrect swift location was returned.")
@patch('trove.common.strategies.storage.swift.LOG')
def test_swift_checksum_etag_mismatch(self, mock_logging):
"""This tests that when etag doesn't match swift checksum False is
returned and None for checksum and location
"""
context = trove_testtools.TroveTestContext(self)
# this backup_id will trigger fake swift client with calculate_etag
# enabled to spit out a bad etag when a segment object is uploaded
backup_id = 'bad_manifest_etag_123'
user = 'user'
password = 'password'
swift_client = FakeSwiftConnection()
with patch.object(remote, 'create_swift_client',
return_value=swift_client):
storage_strategy = SwiftStorage(context)
with MockBackupRunner(filename=backup_id,
user=user,
password=password) as runner:
(success,
note,
checksum,
location) = storage_strategy.save(runner.manifest, runner)
self.assertFalse(success, "The backup should have failed!")
self.assertTrue(note.startswith("Error saving data to Swift!"))
self.assertIsNone(checksum,
"Swift checksum should be None for failed backup.")
self.assertEqual('http://mockswift/v1/database_backups/'
'bad_manifest_etag_123.gz.enc',
location,
"Incorrect swift location was returned.")
class SwiftStorageUtils(trove_testtools.TestCase):
def setUp(self):
super(SwiftStorageUtils, self).setUp()
self.context = trove_testtools.TroveTestContext(self)
self.swift_client = FakeSwiftConnection()
self.create_swift_client_patch = patch.object(
remote, 'create_swift_client',
MagicMock(return_value=self.swift_client))
self.create_swift_client_mock = self.create_swift_client_patch.start()
self.addCleanup(self.create_swift_client_patch.stop)
self.swift = SwiftStorage(self.context)
def tearDown(self):
super(SwiftStorageUtils, self).tearDown()
def test_explode_location(self):
location = 'http://mockswift.com/v1/545433/backups/mybackup.tar'
url, container, filename = self.swift._explodeLocation(location)
self.assertEqual('http://mockswift.com/v1/545433', url)
self.assertEqual('backups', container)
self.assertEqual('mybackup.tar', filename)
def test_validate_checksum_good(self):
match = self.swift._verify_checksum('"my-good-etag"', 'my-good-etag')
self.assertTrue(match)
@patch('trove.common.strategies.storage.swift.LOG')
def test_verify_checksum_bad(self, mock_logging):
self.assertRaises(SwiftDownloadIntegrityError,
self.swift._verify_checksum,
'"THE-GOOD-THE-BAD"',
'AND-THE-UGLY')
class SwiftStorageLoad(trove_testtools.TestCase):
"""SwiftStorage.load is used to return SwiftDownloadStream which is used
to download a backup object from Swift
"""
def setUp(self):
super(SwiftStorageLoad, self).setUp()
def tearDown(self):
super(SwiftStorageLoad, self).tearDown()
def test_run_verify_checksum(self):
"""This tests that swift download cmd runs if original backup checksum
matches swift object etag
"""
context = trove_testtools.TroveTestContext(self)
location = "/backup/location/123"
backup_checksum = "fake-md5-sum"
swift_client = FakeSwiftConnection()
with patch.object(remote, 'create_swift_client',
return_value=swift_client):
storage_strategy = SwiftStorage(context)
download_stream = storage_strategy.load(location, backup_checksum)
self.assertIsNotNone(download_stream)
@patch('trove.common.strategies.storage.swift.LOG')
def test_run_verify_checksum_mismatch(self, mock_logging):
"""This tests that SwiftDownloadIntegrityError is raised and swift
download cmd does not run when original backup checksum
does not match swift object etag
"""
context = trove_testtools.TroveTestContext(self)
location = "/backup/location/123"
backup_checksum = "checksum_different_then_fake_swift_etag"
swift_client = FakeSwiftConnection()
with patch.object(remote, 'create_swift_client',
return_value=swift_client):
storage_strategy = SwiftStorage(context)
self.assertRaises(SwiftDownloadIntegrityError,
storage_strategy.load,
location,
backup_checksum)
class MockBackupStream(MockBackupRunner):
def read(self, chunk_size):
return 'X' * chunk_size
class StreamReaderTests(trove_testtools.TestCase):
def setUp(self):
super(StreamReaderTests, self).setUp()
self.runner = MockBackupStream(filename='123.xbstream.enc.gz',
user='user',
password='password')
self.stream = StreamReader(self.runner,
self.runner.manifest,
'database_backups',
max_file_size=100)
def test_base_filename(self):
self.assertEqual('123', self.stream.base_filename)
def test_base_filename_no_extension(self):
stream_reader = StreamReader(self.runner, 'foo', 'database_backups')
self.assertEqual('foo', stream_reader.base_filename)
def test_prefix(self):
self.assertEqual('database_backups/123_', self.stream.prefix)
def test_segment(self):
self.assertEqual('123_00000000', self.stream.segment)
def test_end_of_file(self):
self.assertFalse(self.stream.end_of_file)
def test_end_of_segment(self):
self.assertFalse(self.stream.end_of_segment)
def test_segment_almost_complete(self):
self.stream.segment_length = 98
results = self.stream.read(2)
self.assertEqual('XX', results)
self.assertEqual('123_00000000', self.stream.segment,
"The Segment should still be the same")
self.assertEqual(100, self.stream.segment_length)
checksum = hashlib.md5('XX')
checksum = checksum.hexdigest()
segment_checksum = self.stream.segment_checksum.hexdigest()
self.assertEqual(checksum, segment_checksum,
"Segment checksum did not match")
def test_segment_complete(self):
self.stream.segment_length = 99
results = self.stream.read(2)
self.assertEqual('', results, "Results should be empty.")
self.assertEqual('123_00000001', self.stream.segment)
def test_stream_complete(self):
results = self.stream.read(0)
self.assertEqual('', results, "Results should be empty.")
self.assertTrue(self.stream.end_of_file)
class SwiftMetadataTests(trove_testtools.TestCase):
def setUp(self):
super(SwiftMetadataTests, self).setUp()
self.swift_client = FakeSwiftConnection()
self.context = trove_testtools.TroveTestContext(self)
self.create_swift_client_patch = patch.object(
remote, 'create_swift_client',
MagicMock(return_value=self.swift_client))
self.create_swift_client_mock = self.create_swift_client_patch.start()
self.addCleanup(self.create_swift_client_patch.stop)
self.swift = SwiftStorage(self.context)
def tearDown(self):
super(SwiftMetadataTests, self).tearDown()
def test__get_attr(self):
normal_header = self.swift._get_attr('content-type')
self.assertEqual('content_type', normal_header)
meta_header = self.swift._get_attr('x-object-meta-foo')
self.assertEqual('foo', meta_header)
meta_header_two = self.swift._get_attr('x-object-meta-foo-bar')
self.assertEqual('foo_bar', meta_header_two)
def test__set_attr(self):
meta_header = self.swift._set_attr('foo')
self.assertEqual('X-Object-Meta-foo', meta_header)
meta_header_two = self.swift._set_attr('foo_bar')
self.assertEqual('X-Object-Meta-foo-bar', meta_header_two)
def test_load_metadata(self):
location = 'http://mockswift.com/v1/545433/backups/mybackup.tar'
headers = {
'etag': '"fake-md5-sum"',
'x-object-meta-lsn': '1234567'
}
with patch.object(self.swift_client, 'head_object',
return_value=headers):
metadata = self.swift.load_metadata(location, 'fake-md5-sum')
self.assertEqual({'lsn': '1234567'}, metadata)
def test_save_metadata(self):
location = 'http://mockswift.com/v1/545433/backups/mybackup.tar'
metadata = {'lsn': '1234567'}
self.swift_client.post_object = Mock()
self.swift.save_metadata(location, metadata=metadata)
headers = {
'X-Object-Meta-lsn': '1234567',
'X-Object-Manifest': None
}
self.swift_client.post_object.assert_called_with(
'backups', 'mybackup.tar', headers=headers)
|
|
import imp
import logging
import os
import sys
from mock import Mock, patch
from gparray import GpDB, GpArray
from gppylib.operations.startSegments import StartSegmentsResult
from gppylib.test.unit.gp_unittest import GpTestCase, run_tests
class GpStart(GpTestCase):
def setUp(self):
# because gpstart does not have a .py extension,
# we have to use imp to import it
# if we had a gpstart.py, this is equivalent to:
# import gpstart
# self.subject = gpstart
gpstart_file = os.path.abspath(os.path.dirname(__file__) + "/../../../gpstart")
self.subject = imp.load_source('gpstart', gpstart_file)
self.subject.logger = Mock(
spec=['log', 'warn', 'info', 'debug', 'error', 'warning', 'fatal', 'warning_to_file_only'])
self.os_environ = dict(MASTER_DATA_DIRECTORY='/tmp/mdd', GPHOME='/tmp/gphome', GP_MGMT_PROCESS_COUNT=1,
LANGUAGE=None)
self.gparray = self._createGpArrayWith2Primary2Mirrors()
self.segments_by_content_id = GpArray.getSegmentsByContentId(self.gparray.getSegDbList())
start_result = StartSegmentsResult()
start_result.addSuccess(self.primary0)
self.apply_patches([
patch('os.getenv', side_effect=self._get_env),
patch('gpstart.os.path.exists'),
patch('gpstart.gp'),
patch('gpstart.pgconf'),
patch('gpstart.unix'),
patch('gpstart.dbconn.DbURL'),
patch('gpstart.dbconn.connect'),
patch('gpstart.GpArray.initFromCatalog', return_value=self.gparray),
patch('gpstart.GpArray.getSegmentsByContentId', return_value=self.segments_by_content_id),
patch('gpstart.GpArray.getSegmentsGroupedByValue',
side_effect=[{2: self.primary0, 3: self.primary1}, [], []]),
patch('gpstart.catalog.getCollationSettings', return_value=("x", "x", "x")),
patch('gpstart.GpDbidFile'),
patch('gpstart.GpEraFile'),
patch('gpstart.userinput'),
patch('gpstart.HeapChecksum'),
patch('gpstart.log_to_file_only'),
patch("gpstart.is_filespace_configured", return_value=True),
patch("gpstart.CheckFilespaceConsistency"),
patch("gpstart.StartSegmentsOperation"),
patch("gpstart.base.WorkerPool"),
patch("gpstart.gp.MasterStart.local"),
patch("gpstart.pg.DbStatus.local"),
patch("gpstart.TableLogger"),
patch('gpstart.PgControlData'),
])
self.mockFilespaceConsistency = self.get_mock_from_apply_patch("CheckFilespaceConsistency")
self.mockFilespaceConsistency.return_value.run.return_value = True
self.mock_start_result = self.get_mock_from_apply_patch('StartSegmentsOperation')
self.mock_start_result.return_value.startSegments.return_value.getSuccessfulSegments.return_value = start_result.getSuccessfulSegments()
self.mock_os_path_exists = self.get_mock_from_apply_patch('exists')
self.mock_gp = self.get_mock_from_apply_patch('gp')
self.mock_pgconf = self.get_mock_from_apply_patch('pgconf')
self.mock_userinput = self.get_mock_from_apply_patch('userinput')
self.mock_heap_checksum = self.get_mock_from_apply_patch('HeapChecksum')
self.mock_heap_checksum.return_value.get_segments_checksum_settings.return_value = ([1], [1])
self.mock_heap_checksum.return_value.are_segments_consistent.return_value = True
self.mock_heap_checksum.return_value.check_segment_consistency.return_value = ([], [], None)
self.mock_pgconf.readfile.return_value = Mock()
self.mock_gplog_log_to_file_only = self.get_mock_from_apply_patch("log_to_file_only")
self.mock_gp.get_masterdatadir.return_value = 'masterdatadir'
self.mock_gp.GpCatVersion.local.return_value = 1
self.mock_gp.GpCatVersionDirectory.local.return_value = 1
sys.argv = ["gpstart"] # reset to relatively empty args list
def tearDown(self):
super(GpStart, self).tearDown()
def test_option_master_success_without_auto_accept(self):
sys.argv = ["gpstart", "-m"]
self.mock_userinput.ask_yesno.return_value = True
self.subject.unix.PgPortIsActive.local.return_value = False
self.mock_os_path_exists.side_effect = os_exists_check
parser = self.subject.GpStart.createParser()
options, args = parser.parse_args()
gpstart = self.subject.GpStart.createProgram(options, args)
return_code = gpstart.run()
self.assertEqual(self.mock_userinput.ask_yesno.call_count, 1)
self.mock_userinput.ask_yesno.assert_called_once_with(None, '\nContinue with master-only startup', 'N')
self.subject.logger.info.assert_any_call('Starting Master instance in admin mode')
self.subject.logger.info.assert_any_call('Master Started...')
self.assertEqual(return_code, 0)
def test_option_master_success_with_auto_accept(self):
sys.argv = ["gpstart", "-m", "-a"]
self.mock_userinput.ask_yesno.return_value = True
self.subject.unix.PgPortIsActive.local.return_value = False
self.mock_os_path_exists.side_effect = os_exists_check
parser = self.subject.GpStart.createParser()
options, args = parser.parse_args()
gpstart = self.subject.GpStart.createProgram(options, args)
return_code = gpstart.run()
self.assertEqual(self.mock_userinput.ask_yesno.call_count, 0)
self.subject.logger.info.assert_any_call('Starting Master instance in admin mode')
self.subject.logger.info.assert_any_call('Master Started...')
self.assertEqual(return_code, 0)
def test_output_to_stdout_and_log_for_master_only_happens_before_heap_checksum(self):
sys.argv = ["gpstart", "-m"]
self.mock_userinput.ask_yesno.return_value = True
self.subject.unix.PgPortIsActive.local.return_value = False
self.mock_os_path_exists.side_effect = os_exists_check
parser = self.subject.GpStart.createParser()
options, args = parser.parse_args()
gpstart = self.subject.GpStart.createProgram(options, args)
return_code = gpstart.run()
self.assertEqual(return_code, 0)
self.assertEqual(self.mock_userinput.ask_yesno.call_count, 1)
self.mock_userinput.ask_yesno.assert_called_once_with(None, '\nContinue with master-only startup', 'N')
self.subject.logger.info.assert_any_call('Starting Master instance in admin mode')
self.subject.logger.info.assert_any_call('Master Started...')
self.assertEquals(self.mock_gplog_log_to_file_only.call_count, 0)
def test_output_to_stdout_and_log_differs_for_heap_checksum(self):
sys.argv = ["gpstart", "-a"]
self.mock_heap_checksum.return_value.are_segments_consistent.return_value = False
self.subject.unix.PgPortIsActive.local.return_value = False
self.mock_os_path_exists.side_effect = os_exists_check
self.primary1.heap_checksum = 0
self.master.heap_checksum = '1'
self.mock_heap_checksum.return_value.check_segment_consistency.return_value = (
[self.primary0], [self.primary1], self.master.heap_checksum)
parser = self.subject.GpStart.createParser()
options, args = parser.parse_args()
gpstart = self.subject.GpStart.createProgram(options, args)
return_code = gpstart.run()
self.assertEqual(return_code, 1)
self.subject.logger.fatal.assert_any_call('Cluster heap checksum setting differences reported.')
self.mock_gplog_log_to_file_only.assert_any_call('Failed checksum consistency validation:', logging.WARN)
self.mock_gplog_log_to_file_only.assert_any_call('dbid: %s '
'checksum set to %s differs from '
'master checksum set to %s' %
(self.primary1.getSegmentDbId(), 0, 1), logging.WARN)
self.subject.logger.fatal.assert_any_call("Shutting down master")
self.assertEquals(self.mock_gp.GpStop.call_count, 1)
def test_failed_to_contact_segments_causes_logging_and_failure(self):
sys.argv = ["gpstart", "-a"]
self.mock_heap_checksum.return_value.get_segments_checksum_settings.return_value = ([], [1])
self.subject.unix.PgPortIsActive.local.return_value = False
self.mock_os_path_exists.side_effect = os_exists_check
parser = self.subject.GpStart.createParser()
options, args = parser.parse_args()
gpstart = self.subject.GpStart.createProgram(options, args)
return_code = gpstart.run()
self.assertEqual(return_code, 1)
self.subject.logger.fatal.assert_any_call(
'No segments responded to ssh query for heap checksum. Not starting the array.')
def test_checksum_consistent(self):
sys.argv = ["gpstart", "-a"]
self.mock_heap_checksum.return_value.get_segments_checksum_settings.return_value = ([1], [1])
self.subject.unix.PgPortIsActive.local.return_value = False
self.mock_os_path_exists.side_effect = os_exists_check
parser = self.subject.GpStart.createParser()
options, args = parser.parse_args()
gpstart = self.subject.GpStart.createProgram(options, args)
return_code = gpstart.run()
self.assertEqual(return_code, 0)
self.subject.logger.info.assert_any_call('Heap checksum setting is consistent across the cluster')
def test_skip_checksum_validation_succeeds(self):
sys.argv = ["gpstart", "-a", "--skip-heap-checksum-validation"]
self.mock_heap_checksum.return_value.get_segments_checksum_settings.return_value = ([1], [1])
self.subject.unix.PgPortIsActive.local.return_value = False
self.mock_os_path_exists.side_effect = os_exists_check
parser = self.subject.GpStart.createParser()
options, args = parser.parse_args()
gpstart = self.subject.GpStart.createProgram(options, args)
return_code = gpstart.run()
self.assertEqual(return_code, 0)
messages = [msg[0][0] for msg in self.subject.logger.info.call_args_list]
self.assertNotIn('Heap checksum setting is consistent across the cluster', messages)
self.subject.logger.warning.assert_any_call('Because of --skip-heap-checksum-validation, '
'the GUC for data_checksums '
'will not be checked between master and segments')
def test_gpstart_fails_if_standby_heap_checksum_doesnt_match_master(self):
sys.argv = ["gpstart", "-a"]
self.gparray = GpArray([self.master, self.primary0, self.primary1, self.mirror0, self.mirror1, self.standby])
self.segments_by_content_id = GpArray.getSegmentsByContentId(self.gparray.getSegDbList())
self.mock_os_path_exists.side_effect = os_exists_check
self.subject.unix.PgPortIsActive.local.return_value = False
self.mock_heap_checksum.return_value.get_master_value.return_value = 1
self.mock_heap_checksum.return_value.get_standby_value.return_value = 0
parser = self.subject.GpStart.createParser()
options, args = parser.parse_args()
gpstart = self.subject.GpStart.createProgram(options, args)
with patch("gpstart.GpArray.initFromCatalog", return_value=self.gparray):
return_code = gpstart.run()
self.assertEqual(return_code, 1)
self.subject.logger.warning.assert_any_call("Heap checksum settings on standby master do not match master <<<<<<<<")
self.subject.logger.error.assert_any_call("gpstart error: Heap checksum settings are not consistent across the cluster.")
def _createGpArrayWith2Primary2Mirrors(self):
self.master = GpDB.initFromString(
"1|-1|p|p|s|u|mdw|mdw|5432|5532|/data/master||/data/master/base/10899,/data/master/base/1,/data/master/base/10898,/data/master/base/25780,/data/master/base/34782")
self.primary0 = GpDB.initFromString(
"2|0|p|p|s|u|sdw1|sdw1|40000|41000|/data/primary0||/data/primary0/base/10899,/data/primary0/base/1,/data/primary0/base/10898,/data/primary0/base/25780,/data/primary0/base/34782")
self.primary1 = GpDB.initFromString(
"3|1|p|p|s|u|sdw2|sdw2|40001|41001|/data/primary1||/data/primary1/base/10899,/data/primary1/base/1,/data/primary1/base/10898,/data/primary1/base/25780,/data/primary1/base/34782")
self.mirror0 = GpDB.initFromString(
"4|0|m|m|s|u|sdw2|sdw2|50000|51000|/data/mirror0||/data/mirror0/base/10899,/data/mirror0/base/1,/data/mirror0/base/10898,/data/mirror0/base/25780,/data/mirror0/base/34782")
self.mirror1 = GpDB.initFromString(
"5|1|m|m|s|u|sdw1|sdw1|50001|51001|/data/mirror1||/data/mirror1/base/10899,/data/mirror1/base/1,/data/mirror1/base/10898,/data/mirror1/base/25780,/data/mirror1/base/34782")
self.standby = GpDB.initFromString(
"6|-1|m|m|s|u|sdw3|sdw3|5433|5533|/data/standby||/data/standby/base/10899,/data/standby/base/1,/data/standby/base/10898,/data/standby/base/25780,/data/standby/base/34782")
return GpArray([self.master, self.primary0, self.primary1, self.mirror0, self.mirror1])
def _get_env(self, arg):
if arg not in self.os_environ:
return None
return self.os_environ[arg]
def os_exists_check(arg):
# Skip file related checks
if 'pg_log' in arg:
return True
elif 'postmaster.pid' in arg or '.s.PGSQL' in arg:
return False
return False
if __name__ == '__main__':
run_tests()
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .week_details import WeekDetails
from .day_details import DayDetails
from .hour_details import HourDetails
from .notification_settings import NotificationSettings
from .schedule import Schedule
from .applicable_schedule import ApplicableSchedule
from .week_details_fragment import WeekDetailsFragment
from .day_details_fragment import DayDetailsFragment
from .hour_details_fragment import HourDetailsFragment
from .notification_settings_fragment import NotificationSettingsFragment
from .schedule_fragment import ScheduleFragment
from .applicable_schedule_fragment import ApplicableScheduleFragment
from .artifact_parameter_properties import ArtifactParameterProperties
from .artifact_install_properties import ArtifactInstallProperties
from .apply_artifacts_request import ApplyArtifactsRequest
from .parameters_value_file_info import ParametersValueFileInfo
from .arm_template import ArmTemplate
from .arm_template_info import ArmTemplateInfo
from .arm_template_parameter_properties import ArmTemplateParameterProperties
from .artifact import Artifact
from .artifact_deployment_status_properties import ArtifactDeploymentStatusProperties
from .artifact_deployment_status_properties_fragment import ArtifactDeploymentStatusPropertiesFragment
from .artifact_parameter_properties_fragment import ArtifactParameterPropertiesFragment
from .artifact_install_properties_fragment import ArtifactInstallPropertiesFragment
from .artifact_source import ArtifactSource
from .artifact_source_fragment import ArtifactSourceFragment
from .attach_disk_properties import AttachDiskProperties
from .attach_new_data_disk_options import AttachNewDataDiskOptions
from .bulk_creation_parameters import BulkCreationParameters
from .compute_data_disk import ComputeDataDisk
from .compute_data_disk_fragment import ComputeDataDiskFragment
from .compute_vm_instance_view_status import ComputeVmInstanceViewStatus
from .compute_vm_instance_view_status_fragment import ComputeVmInstanceViewStatusFragment
from .compute_vm_properties import ComputeVmProperties
from .compute_vm_properties_fragment import ComputeVmPropertiesFragment
from .percentage_cost_threshold_properties import PercentageCostThresholdProperties
from .cost_threshold_properties import CostThresholdProperties
from .windows_os_info import WindowsOsInfo
from .linux_os_info import LinuxOsInfo
from .custom_image_properties_from_vm import CustomImagePropertiesFromVm
from .custom_image_properties_custom import CustomImagePropertiesCustom
from .custom_image import CustomImage
from .data_disk_properties import DataDiskProperties
from .detach_data_disk_properties import DetachDataDiskProperties
from .detach_disk_properties import DetachDiskProperties
from .disk import Disk
from .environment_deployment_properties import EnvironmentDeploymentProperties
from .dtl_environment import DtlEnvironment
from .evaluate_policies_properties import EvaluatePoliciesProperties
from .evaluate_policies_request import EvaluatePoliciesRequest
from .policy_violation import PolicyViolation
from .policy_set_result import PolicySetResult
from .evaluate_policies_response import EvaluatePoliciesResponse
from .event import Event
from .event_fragment import EventFragment
from .export_resource_usage_parameters import ExportResourceUsageParameters
from .external_subnet import ExternalSubnet
from .external_subnet_fragment import ExternalSubnetFragment
from .gallery_image_reference import GalleryImageReference
from .inbound_nat_rule import InboundNatRule
from .shared_public_ip_address_configuration import SharedPublicIpAddressConfiguration
from .network_interface_properties import NetworkInterfaceProperties
from .lab_virtual_machine_creation_parameter import LabVirtualMachineCreationParameter
from .formula_properties_from_vm import FormulaPropertiesFromVm
from .formula import Formula
from .gallery_image import GalleryImage
from .gallery_image_reference_fragment import GalleryImageReferenceFragment
from .parameter_info import ParameterInfo
from .generate_arm_template_request import GenerateArmTemplateRequest
from .generate_upload_uri_parameter import GenerateUploadUriParameter
from .generate_upload_uri_response import GenerateUploadUriResponse
from .identity_properties import IdentityProperties
from .inbound_nat_rule_fragment import InboundNatRuleFragment
from .lab import Lab
from .target_cost_properties import TargetCostProperties
from .lab_cost_summary_properties import LabCostSummaryProperties
from .lab_cost_details_properties import LabCostDetailsProperties
from .lab_resource_cost_properties import LabResourceCostProperties
from .lab_cost import LabCost
from .lab_fragment import LabFragment
from .lab_vhd import LabVhd
from .lab_virtual_machine import LabVirtualMachine
from .shared_public_ip_address_configuration_fragment import SharedPublicIpAddressConfigurationFragment
from .network_interface_properties_fragment import NetworkInterfacePropertiesFragment
from .lab_virtual_machine_fragment import LabVirtualMachineFragment
from .notification_channel import NotificationChannel
from .notification_channel_fragment import NotificationChannelFragment
from .notify_parameters import NotifyParameters
from .policy import Policy
from .policy_fragment import PolicyFragment
from .port import Port
from .port_fragment import PortFragment
from .resource import Resource
from .secret import Secret
from .service_runner import ServiceRunner
from .user_identity import UserIdentity
from .user_secret_store import UserSecretStore
from .user import User
from .subnet import Subnet
from .subnet_shared_public_ip_address_configuration import SubnetSharedPublicIpAddressConfiguration
from .subnet_override import SubnetOverride
from .virtual_network import VirtualNetwork
from .retarget_schedule_properties import RetargetScheduleProperties
from .shutdown_notification_content import ShutdownNotificationContent
from .subnet_fragment import SubnetFragment
from .subnet_shared_public_ip_address_configuration_fragment import SubnetSharedPublicIpAddressConfigurationFragment
from .subnet_override_fragment import SubnetOverrideFragment
from .user_identity_fragment import UserIdentityFragment
from .user_secret_store_fragment import UserSecretStoreFragment
from .user_fragment import UserFragment
from .virtual_network_fragment import VirtualNetworkFragment
from .lab_paged import LabPaged
from .lab_vhd_paged import LabVhdPaged
from .schedule_paged import SchedulePaged
from .artifact_source_paged import ArtifactSourcePaged
from .arm_template_paged import ArmTemplatePaged
from .artifact_paged import ArtifactPaged
from .custom_image_paged import CustomImagePaged
from .formula_paged import FormulaPaged
from .gallery_image_paged import GalleryImagePaged
from .notification_channel_paged import NotificationChannelPaged
from .policy_paged import PolicyPaged
from .service_runner_paged import ServiceRunnerPaged
from .user_paged import UserPaged
from .disk_paged import DiskPaged
from .dtl_environment_paged import DtlEnvironmentPaged
from .secret_paged import SecretPaged
from .lab_virtual_machine_paged import LabVirtualMachinePaged
from .virtual_network_paged import VirtualNetworkPaged
from .dev_test_labs_client_enums import (
EnableStatus,
NotificationStatus,
SourceControlType,
StorageType,
CostThresholdStatus,
WindowsOsState,
LinuxOsState,
CustomImageOsType,
HostCachingOptions,
NotificationChannelEventType,
TransportProtocol,
VirtualMachineCreationSource,
FileUploadOptions,
PremiumDataDisk,
TargetCostStatus,
ReportingCycleType,
CostType,
PolicyStatus,
PolicyFactName,
PolicyEvaluatorType,
UsagePermissionType,
)
__all__ = [
'WeekDetails',
'DayDetails',
'HourDetails',
'NotificationSettings',
'Schedule',
'ApplicableSchedule',
'WeekDetailsFragment',
'DayDetailsFragment',
'HourDetailsFragment',
'NotificationSettingsFragment',
'ScheduleFragment',
'ApplicableScheduleFragment',
'ArtifactParameterProperties',
'ArtifactInstallProperties',
'ApplyArtifactsRequest',
'ParametersValueFileInfo',
'ArmTemplate',
'ArmTemplateInfo',
'ArmTemplateParameterProperties',
'Artifact',
'ArtifactDeploymentStatusProperties',
'ArtifactDeploymentStatusPropertiesFragment',
'ArtifactParameterPropertiesFragment',
'ArtifactInstallPropertiesFragment',
'ArtifactSource',
'ArtifactSourceFragment',
'AttachDiskProperties',
'AttachNewDataDiskOptions',
'BulkCreationParameters',
'ComputeDataDisk',
'ComputeDataDiskFragment',
'ComputeVmInstanceViewStatus',
'ComputeVmInstanceViewStatusFragment',
'ComputeVmProperties',
'ComputeVmPropertiesFragment',
'PercentageCostThresholdProperties',
'CostThresholdProperties',
'WindowsOsInfo',
'LinuxOsInfo',
'CustomImagePropertiesFromVm',
'CustomImagePropertiesCustom',
'CustomImage',
'DataDiskProperties',
'DetachDataDiskProperties',
'DetachDiskProperties',
'Disk',
'EnvironmentDeploymentProperties',
'DtlEnvironment',
'EvaluatePoliciesProperties',
'EvaluatePoliciesRequest',
'PolicyViolation',
'PolicySetResult',
'EvaluatePoliciesResponse',
'Event',
'EventFragment',
'ExportResourceUsageParameters',
'ExternalSubnet',
'ExternalSubnetFragment',
'GalleryImageReference',
'InboundNatRule',
'SharedPublicIpAddressConfiguration',
'NetworkInterfaceProperties',
'LabVirtualMachineCreationParameter',
'FormulaPropertiesFromVm',
'Formula',
'GalleryImage',
'GalleryImageReferenceFragment',
'ParameterInfo',
'GenerateArmTemplateRequest',
'GenerateUploadUriParameter',
'GenerateUploadUriResponse',
'IdentityProperties',
'InboundNatRuleFragment',
'Lab',
'TargetCostProperties',
'LabCostSummaryProperties',
'LabCostDetailsProperties',
'LabResourceCostProperties',
'LabCost',
'LabFragment',
'LabVhd',
'LabVirtualMachine',
'SharedPublicIpAddressConfigurationFragment',
'NetworkInterfacePropertiesFragment',
'LabVirtualMachineFragment',
'NotificationChannel',
'NotificationChannelFragment',
'NotifyParameters',
'Policy',
'PolicyFragment',
'Port',
'PortFragment',
'Resource',
'Secret',
'ServiceRunner',
'UserIdentity',
'UserSecretStore',
'User',
'Subnet',
'SubnetSharedPublicIpAddressConfiguration',
'SubnetOverride',
'VirtualNetwork',
'RetargetScheduleProperties',
'ShutdownNotificationContent',
'SubnetFragment',
'SubnetSharedPublicIpAddressConfigurationFragment',
'SubnetOverrideFragment',
'UserIdentityFragment',
'UserSecretStoreFragment',
'UserFragment',
'VirtualNetworkFragment',
'LabPaged',
'LabVhdPaged',
'SchedulePaged',
'ArtifactSourcePaged',
'ArmTemplatePaged',
'ArtifactPaged',
'CustomImagePaged',
'FormulaPaged',
'GalleryImagePaged',
'NotificationChannelPaged',
'PolicyPaged',
'ServiceRunnerPaged',
'UserPaged',
'DiskPaged',
'DtlEnvironmentPaged',
'SecretPaged',
'LabVirtualMachinePaged',
'VirtualNetworkPaged',
'EnableStatus',
'NotificationStatus',
'SourceControlType',
'StorageType',
'CostThresholdStatus',
'WindowsOsState',
'LinuxOsState',
'CustomImageOsType',
'HostCachingOptions',
'NotificationChannelEventType',
'TransportProtocol',
'VirtualMachineCreationSource',
'FileUploadOptions',
'PremiumDataDisk',
'TargetCostStatus',
'ReportingCycleType',
'CostType',
'PolicyStatus',
'PolicyFactName',
'PolicyEvaluatorType',
'UsagePermissionType',
]
|
|
from __future__ import unicode_literals
import datetime
import os
import re
import sys
import types
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.http import (HttpResponse, HttpResponseServerError,
HttpResponseNotFound, HttpRequest, build_request_repr)
from django.template import Template, Context, TemplateDoesNotExist
from django.template.defaultfilters import force_escape, pprint
from django.utils.html import escape
from django.utils.importlib import import_module
from django.utils.encoding import force_bytes, smart_text
from django.utils import six
HIDDEN_SETTINGS = re.compile('API|TOKEN|KEY|SECRET|PASS|PROFANITIES_LIST|SIGNATURE')
CLEANSED_SUBSTITUTE = '********************'
def linebreak_iter(template_source):
yield 0
p = template_source.find('\n')
while p >= 0:
yield p+1
p = template_source.find('\n', p+1)
yield len(template_source) + 1
def cleanse_setting(key, value):
"""Cleanse an individual setting key/value of sensitive content.
If the value is a dictionary, recursively cleanse the keys in
that dictionary.
"""
try:
if HIDDEN_SETTINGS.search(key):
cleansed = CLEANSED_SUBSTITUTE
else:
if isinstance(value, dict):
cleansed = dict((k, cleanse_setting(k, v)) for k,v in value.items())
else:
cleansed = value
except TypeError:
# If the key isn't regex-able, just return as-is.
cleansed = value
return cleansed
def get_safe_settings():
"Returns a dictionary of the settings module, with sensitive settings blurred out."
settings_dict = {}
for k in dir(settings):
if k.isupper():
settings_dict[k] = cleanse_setting(k, getattr(settings, k))
return settings_dict
def technical_500_response(request, exc_type, exc_value, tb):
"""
Create a technical server error response. The last three arguments are
the values returned from sys.exc_info() and friends.
"""
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
if request.is_ajax():
text = reporter.get_traceback_text()
return HttpResponseServerError(text, content_type='text/plain')
else:
html = reporter.get_traceback_html()
return HttpResponseServerError(html, content_type='text/html')
# Cache for the default exception reporter filter instance.
default_exception_reporter_filter = None
def get_exception_reporter_filter(request):
global default_exception_reporter_filter
if default_exception_reporter_filter is None:
# Load the default filter for the first time and cache it.
modpath = settings.DEFAULT_EXCEPTION_REPORTER_FILTER
modname, classname = modpath.rsplit('.', 1)
try:
mod = import_module(modname)
except ImportError as e:
raise ImproperlyConfigured(
'Error importing default exception reporter filter %s: "%s"' % (modpath, e))
try:
default_exception_reporter_filter = getattr(mod, classname)()
except AttributeError:
raise ImproperlyConfigured('Default exception reporter filter module "%s" does not define a "%s" class' % (modname, classname))
if request:
return getattr(request, 'exception_reporter_filter', default_exception_reporter_filter)
else:
return default_exception_reporter_filter
class ExceptionReporterFilter(object):
"""
Base for all exception reporter filter classes. All overridable hooks
contain lenient default behaviors.
"""
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request, POST_override=self.get_post_parameters(request))
def get_post_parameters(self, request):
if request is None:
return {}
else:
return request.POST
def get_traceback_frame_variables(self, request, tb_frame):
return list(six.iteritems(tb_frame.f_locals))
class SafeExceptionReporterFilter(ExceptionReporterFilter):
"""
Use annotations made by the sensitive_post_parameters and
sensitive_variables decorators to filter out sensitive information.
"""
def is_active(self, request):
"""
This filter is to add safety in production environments (i.e. DEBUG
is False). If DEBUG is True then your site is not safe anyway.
This hook is provided as a convenience to easily activate or
deactivate the filter on a per request basis.
"""
return settings.DEBUG is False
def get_post_parameters(self, request):
"""
Replaces the values of POST parameters marked as sensitive with
stars (*********).
"""
if request is None:
return {}
else:
sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', [])
if self.is_active(request) and sensitive_post_parameters:
cleansed = request.POST.copy()
if sensitive_post_parameters == '__ALL__':
# Cleanse all parameters.
for k, v in cleansed.items():
cleansed[k] = CLEANSED_SUBSTITUTE
return cleansed
else:
# Cleanse only the specified parameters.
for param in sensitive_post_parameters:
if param in cleansed:
cleansed[param] = CLEANSED_SUBSTITUTE
return cleansed
else:
return request.POST
def get_traceback_frame_variables(self, request, tb_frame):
"""
Replaces the values of variables marked as sensitive with
stars (*********).
"""
# Loop through the frame's callers to see if the sensitive_variables
# decorator was used.
current_frame = tb_frame.f_back
sensitive_variables = None
while current_frame is not None:
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper'
and 'sensitive_variables_wrapper' in current_frame.f_locals):
# The sensitive_variables decorator was used, so we take note
# of the sensitive variables' names.
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
sensitive_variables = getattr(wrapper, 'sensitive_variables', None)
break
current_frame = current_frame.f_back
cleansed = []
if self.is_active(request) and sensitive_variables:
if sensitive_variables == '__ALL__':
# Cleanse all variables
for name, value in tb_frame.f_locals.items():
cleansed.append((name, CLEANSED_SUBSTITUTE))
return cleansed
else:
# Cleanse specified variables
for name, value in tb_frame.f_locals.items():
if name in sensitive_variables:
value = CLEANSED_SUBSTITUTE
elif isinstance(value, HttpRequest):
# Cleanse the request's POST parameters.
value = self.get_request_repr(value)
cleansed.append((name, value))
return cleansed
else:
# Potentially cleanse only the request if it's one of the frame variables.
for name, value in tb_frame.f_locals.items():
if isinstance(value, HttpRequest):
# Cleanse the request's POST parameters.
value = self.get_request_repr(value)
cleansed.append((name, value))
return cleansed
class ExceptionReporter(object):
"""
A class to organize and coordinate reporting on exceptions.
"""
def __init__(self, request, exc_type, exc_value, tb, is_email=False):
self.request = request
self.filter = get_exception_reporter_filter(self.request)
self.exc_type = exc_type
self.exc_value = exc_value
self.tb = tb
self.is_email = is_email
self.template_info = None
self.template_does_not_exist = False
self.loader_debug_info = None
# Handle deprecated string exceptions
if isinstance(self.exc_type, six.string_types):
self.exc_value = Exception('Deprecated String Exception: %r' % self.exc_type)
self.exc_type = type(self.exc_value)
def get_traceback_data(self):
"Return a Context instance containing traceback information."
if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist):
from django.template.loader import template_source_loaders
self.template_does_not_exist = True
self.loader_debug_info = []
for loader in template_source_loaders:
try:
source_list_func = loader.get_template_sources
# NOTE: This assumes exc_value is the name of the template that
# the loader attempted to load.
template_list = [{'name': t, 'exists': os.path.exists(t)} \
for t in source_list_func(str(self.exc_value))]
except AttributeError:
template_list = []
loader_name = loader.__module__ + '.' + loader.__class__.__name__
self.loader_debug_info.append({
'loader': loader_name,
'templates': template_list,
})
if (settings.TEMPLATE_DEBUG and
hasattr(self.exc_value, 'django_template_source')):
self.get_template_exception_info()
frames = self.get_traceback_frames()
for i, frame in enumerate(frames):
if 'vars' in frame:
frame['vars'] = [(k, force_escape(pprint(v))) for k, v in frame['vars']]
frames[i] = frame
unicode_hint = ''
if self.exc_type and issubclass(self.exc_type, UnicodeError):
start = getattr(self.exc_value, 'start', None)
end = getattr(self.exc_value, 'end', None)
if start is not None and end is not None:
unicode_str = self.exc_value.args[1]
unicode_hint = smart_text(unicode_str[max(start-5, 0):min(end+5, len(unicode_str))], 'ascii', errors='replace')
from django import get_version
c = {
'is_email': self.is_email,
'unicode_hint': unicode_hint,
'frames': frames,
'request': self.request,
'filtered_POST': self.filter.get_post_parameters(self.request),
'settings': get_safe_settings(),
'sys_executable': sys.executable,
'sys_version_info': '%d.%d.%d' % sys.version_info[0:3],
'server_time': datetime.datetime.now(),
'django_version_info': get_version(),
'sys_path' : sys.path,
'template_info': self.template_info,
'template_does_not_exist': self.template_does_not_exist,
'loader_debug_info': self.loader_debug_info,
}
# Check whether exception info is available
if self.exc_type:
c['exception_type'] = self.exc_type.__name__
if self.exc_value:
c['exception_value'] = smart_text(self.exc_value, errors='replace')
if frames:
c['lastframe'] = frames[-1]
return c
def get_traceback_html(self):
"Return HTML version of debug 500 HTTP error page."
t = Template(TECHNICAL_500_TEMPLATE, name='Technical 500 template')
c = Context(self.get_traceback_data())
return t.render(c)
def get_traceback_text(self):
"Return plain text version of debug 500 HTTP error page."
t = Template(TECHNICAL_500_TEXT_TEMPLATE, name='Technical 500 template')
c = Context(self.get_traceback_data(), autoescape=False)
return t.render(c)
def get_template_exception_info(self):
origin, (start, end) = self.exc_value.django_template_source
template_source = origin.reload()
context_lines = 10
line = 0
upto = 0
source_lines = []
before = during = after = ""
for num, next in enumerate(linebreak_iter(template_source)):
if start >= upto and end <= next:
line = num
before = escape(template_source[upto:start])
during = escape(template_source[start:end])
after = escape(template_source[end:next])
source_lines.append( (num, escape(template_source[upto:next])) )
upto = next
total = len(source_lines)
top = max(1, line - context_lines)
bottom = min(total, line + 1 + context_lines)
# In some rare cases, exc_value.args might be empty.
try:
message = self.exc_value.args[0]
except IndexError:
message = '(Could not get exception message)'
self.template_info = {
'message': message,
'source_lines': source_lines[top:bottom],
'before': before,
'during': during,
'after': after,
'top': top,
'bottom': bottom,
'total': total,
'line': line,
'name': origin.name,
}
def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None):
"""
Returns context_lines before and after lineno from file.
Returns (pre_context_lineno, pre_context, context_line, post_context).
"""
source = None
if loader is not None and hasattr(loader, "get_source"):
source = loader.get_source(module_name)
if source is not None:
source = source.splitlines()
if source is None:
try:
with open(filename, 'rb') as fp:
source = fp.readlines()
except (OSError, IOError):
pass
if source is None:
return None, [], None, []
encoding = 'ascii'
for line in source[:2]:
# File coding may be specified. Match pattern from PEP-263
# (http://www.python.org/dev/peps/pep-0263/)
match = re.search(br'coding[:=]\s*([-\w.]+)', line)
if match:
encoding = match.group(1)
break
source = [six.text_type(sline, encoding, 'replace') for sline in source]
lower_bound = max(0, lineno - context_lines)
upper_bound = lineno + context_lines
pre_context = [line.strip('\n') for line in source[lower_bound:lineno]]
context_line = source[lineno].strip('\n')
post_context = [line.strip('\n') for line in source[lineno+1:upper_bound]]
return lower_bound, pre_context, context_line, post_context
def get_traceback_frames(self):
frames = []
tb = self.tb
while tb is not None:
# Support for __traceback_hide__ which is used by a few libraries
# to hide internal frames.
if tb.tb_frame.f_locals.get('__traceback_hide__'):
tb = tb.tb_next
continue
filename = tb.tb_frame.f_code.co_filename
function = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno - 1
loader = tb.tb_frame.f_globals.get('__loader__')
module_name = tb.tb_frame.f_globals.get('__name__') or ''
pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file(filename, lineno, 7, loader, module_name)
if pre_context_lineno is not None:
frames.append({
'tb': tb,
'type': module_name.startswith('django.') and 'django' or 'user',
'filename': filename,
'function': function,
'lineno': lineno + 1,
'vars': self.filter.get_traceback_frame_variables(self.request, tb.tb_frame),
'id': id(tb),
'pre_context': pre_context,
'context_line': context_line,
'post_context': post_context,
'pre_context_lineno': pre_context_lineno + 1,
})
tb = tb.tb_next
return frames
def format_exception(self):
"""
Return the same data as from traceback.format_exception.
"""
import traceback
frames = self.get_traceback_frames()
tb = [ (f['filename'], f['lineno'], f['function'], f['context_line']) for f in frames ]
list = ['Traceback (most recent call last):\n']
list += traceback.format_list(tb)
list += traceback.format_exception_only(self.exc_type, self.exc_value)
return list
def technical_404_response(request, exception):
"Create a technical 404 error response. The exception should be the Http404."
try:
tried = exception.args[0]['tried']
except (IndexError, TypeError, KeyError):
tried = []
else:
if not tried:
# tried exists but is an empty list. The URLconf must've been empty.
return empty_urlconf(request)
urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF)
if isinstance(urlconf, types.ModuleType):
urlconf = urlconf.__name__
t = Template(TECHNICAL_404_TEMPLATE, name='Technical 404 template')
c = Context({
'urlconf': urlconf,
'root_urlconf': settings.ROOT_URLCONF,
'request_path': request.path_info[1:], # Trim leading slash
'urlpatterns': tried,
'reason': force_bytes(exception, errors='replace'),
'request': request,
'settings': get_safe_settings(),
})
return HttpResponseNotFound(t.render(c), content_type='text/html')
def empty_urlconf(request):
"Create an empty URLconf 404 error response."
t = Template(EMPTY_URLCONF_TEMPLATE, name='Empty URLConf template')
c = Context({
'project_name': settings.SETTINGS_MODULE.split('.')[0]
})
return HttpResponse(t.render(c), content_type='text/html')
#
# Templates are embedded in the file so that we know the error handler will
# always work even if the template loader is broken.
#
TECHNICAL_500_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE">
<title>{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %}{% if request %} at {{ request.path_info|escape }}{% endif %}</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; }
h2 { margin-bottom:.8em; }
h2 span { font-size:80%; color:#666; font-weight:normal; }
h3 { margin:1em 0 .5em 0; }
h4 { margin:0 0 .5em 0; font-weight: normal; }
code, pre { font-size: 100%; white-space: pre-wrap; }
table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; }
tbody td, tbody th { vertical-align:top; padding:2px 3px; }
thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; }
tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; }
table.vars { margin:5px 0 2px 40px; }
table.vars td, table.req td { font-family:monospace; }
table td.code { width:100%; }
table td.code pre { overflow:hidden; }
table.source th { color:#666; }
table.source td { font-family:monospace; white-space:pre; border-bottom:1px solid #eee; }
ul.traceback { list-style-type:none; color: #222; }
ul.traceback li.frame { padding-bottom:1em; color:#666; }
ul.traceback li.user { background-color:#e0e0e0; color:#000 }
div.context { padding:10px 0; overflow:hidden; }
div.context ol { padding-left:30px; margin:0 10px; list-style-position: inside; }
div.context ol li { font-family:monospace; white-space:pre; color:#777; cursor:pointer; }
div.context ol li pre { display:inline; }
div.context ol.context-line li { color:#505050; background-color:#dfdfdf; }
div.context ol.context-line li span { position:absolute; right:32px; }
.user div.context ol.context-line li { background-color:#bbb; color:#000; }
.user div.context ol li { color:#666; }
div.commands { margin-left: 40px; }
div.commands a { color:#555; text-decoration:none; }
.user div.commands a { color: black; }
#summary { background: #ffc; }
#summary h2 { font-weight: normal; color: #666; }
#explanation { background:#eee; }
#template, #template-not-exist { background:#f6f6f6; }
#template-not-exist ul { margin: 0 0 0 20px; }
#unicode-hint { background:#eee; }
#traceback { background:#eee; }
#requestinfo { background:#f6f6f6; padding-left:120px; }
#summary table { border:none; background:transparent; }
#requestinfo h2, #requestinfo h3 { position:relative; margin-left:-100px; }
#requestinfo h3 { margin-bottom:-1em; }
.error { background: #ffc; }
.specific { color:#cc3300; font-weight:bold; }
h2 span.commands { font-size:.7em;}
span.commands a:link {color:#5E5694;}
pre.exception_value { font-family: sans-serif; color: #666; font-size: 1.5em; margin: 10px 0 10px 0; }
</style>
{% if not is_email %}
<script type="text/javascript">
//<!--
function getElementsByClassName(oElm, strTagName, strClassName){
// Written by Jonathan Snook, http://www.snook.ca/jon; Add-ons by Robert Nyman, http://www.robertnyman.com
var arrElements = (strTagName == "*" && document.all)? document.all :
oElm.getElementsByTagName(strTagName);
var arrReturnElements = new Array();
strClassName = strClassName.replace(/\-/g, "\\-");
var oRegExp = new RegExp("(^|\\s)" + strClassName + "(\\s|$)");
var oElement;
for(var i=0; i<arrElements.length; i++){
oElement = arrElements[i];
if(oRegExp.test(oElement.className)){
arrReturnElements.push(oElement);
}
}
return (arrReturnElements)
}
function hideAll(elems) {
for (var e = 0; e < elems.length; e++) {
elems[e].style.display = 'none';
}
}
window.onload = function() {
hideAll(getElementsByClassName(document, 'table', 'vars'));
hideAll(getElementsByClassName(document, 'ol', 'pre-context'));
hideAll(getElementsByClassName(document, 'ol', 'post-context'));
hideAll(getElementsByClassName(document, 'div', 'pastebin'));
}
function toggle() {
for (var i = 0; i < arguments.length; i++) {
var e = document.getElementById(arguments[i]);
if (e) {
e.style.display = e.style.display == 'none' ? 'block' : 'none';
}
}
return false;
}
function varToggle(link, id) {
toggle('v' + id);
var s = link.getElementsByTagName('span')[0];
var uarr = String.fromCharCode(0x25b6);
var darr = String.fromCharCode(0x25bc);
s.innerHTML = s.innerHTML == uarr ? darr : uarr;
return false;
}
function switchPastebinFriendly(link) {
s1 = "Switch to copy-and-paste view";
s2 = "Switch back to interactive view";
link.innerHTML = link.innerHTML == s1 ? s2 : s1;
toggle('browserTraceback', 'pastebinTraceback');
return false;
}
//-->
</script>
{% endif %}
</head>
<body>
<div id="summary">
<h1>{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %}{% if request %} at {{ request.path_info|escape }}{% endif %}</h1>
<pre class="exception_value">{% if exception_value %}{{ exception_value|force_escape }}{% else %}No exception supplied{% endif %}</pre>
<table class="meta">
{% if request %}
<tr>
<th>Request Method:</th>
<td>{{ request.META.REQUEST_METHOD }}</td>
</tr>
<tr>
<th>Request URL:</th>
<td>{{ request.build_absolute_uri|escape }}</td>
</tr>
{% endif %}
<tr>
<th>Django Version:</th>
<td>{{ django_version_info }}</td>
</tr>
{% if exception_type %}
<tr>
<th>Exception Type:</th>
<td>{{ exception_type }}</td>
</tr>
{% endif %}
{% if exception_type and exception_value %}
<tr>
<th>Exception Value:</th>
<td><pre>{{ exception_value|force_escape }}</pre></td>
</tr>
{% endif %}
{% if lastframe %}
<tr>
<th>Exception Location:</th>
<td>{{ lastframe.filename|escape }} in {{ lastframe.function|escape }}, line {{ lastframe.lineno }}</td>
</tr>
{% endif %}
<tr>
<th>Python Executable:</th>
<td>{{ sys_executable|escape }}</td>
</tr>
<tr>
<th>Python Version:</th>
<td>{{ sys_version_info }}</td>
</tr>
<tr>
<th>Python Path:</th>
<td><pre>{{ sys_path|pprint }}</pre></td>
</tr>
<tr>
<th>Server time:</th>
<td>{{server_time|date:"r"}}</td>
</tr>
</table>
</div>
{% if unicode_hint %}
<div id="unicode-hint">
<h2>Unicode error hint</h2>
<p>The string that could not be encoded/decoded was: <strong>{{ unicode_hint|force_escape }}</strong></p>
</div>
{% endif %}
{% if template_does_not_exist %}
<div id="template-not-exist">
<h2>Template-loader postmortem</h2>
{% if loader_debug_info %}
<p>Django tried loading these templates, in this order:</p>
<ul>
{% for loader in loader_debug_info %}
<li>Using loader <code>{{ loader.loader }}</code>:
<ul>{% for t in loader.templates %}<li><code>{{ t.name }}</code> (File {% if t.exists %}exists{% else %}does not exist{% endif %})</li>{% endfor %}</ul>
</li>
{% endfor %}
</ul>
{% else %}
<p>Django couldn't find any templates because your <code>TEMPLATE_LOADERS</code> setting is empty!</p>
{% endif %}
</div>
{% endif %}
{% if template_info %}
<div id="template">
<h2>Error during template rendering</h2>
<p>In template <code>{{ template_info.name }}</code>, error at line <strong>{{ template_info.line }}</strong></p>
<h3>{{ template_info.message }}</h3>
<table class="source{% if template_info.top %} cut-top{% endif %}{% ifnotequal template_info.bottom template_info.total %} cut-bottom{% endifnotequal %}">
{% for source_line in template_info.source_lines %}
{% ifequal source_line.0 template_info.line %}
<tr class="error"><th>{{ source_line.0 }}</th>
<td>{{ template_info.before }}<span class="specific">{{ template_info.during }}</span>{{ template_info.after }}</td></tr>
{% else %}
<tr><th>{{ source_line.0 }}</th>
<td>{{ source_line.1 }}</td></tr>
{% endifequal %}
{% endfor %}
</table>
</div>
{% endif %}
{% if frames %}
<div id="traceback">
<h2>Traceback <span class="commands">{% if not is_email %}<a href="#" onclick="return switchPastebinFriendly(this);">Switch to copy-and-paste view</a></span>{% endif %}</h2>
{% autoescape off %}
<div id="browserTraceback">
<ul class="traceback">
{% for frame in frames %}
<li class="frame {{ frame.type }}">
<code>{{ frame.filename|escape }}</code> in <code>{{ frame.function|escape }}</code>
{% if frame.context_line %}
<div class="context" id="c{{ frame.id }}">
{% if frame.pre_context and not is_email %}
<ol start="{{ frame.pre_context_lineno }}" class="pre-context" id="pre{{ frame.id }}">{% for line in frame.pre_context %}<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ line|escape }}</pre></li>{% endfor %}</ol>
{% endif %}
<ol start="{{ frame.lineno }}" class="context-line"><li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ frame.context_line|escape }}</pre>{% if not is_email %} <span>...</span>{% endif %}</li></ol>
{% if frame.post_context and not is_email %}
<ol start='{{ frame.lineno|add:"1" }}' class="post-context" id="post{{ frame.id }}">{% for line in frame.post_context %}<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ line|escape }}</pre></li>{% endfor %}</ol>
{% endif %}
</div>
{% endif %}
{% if frame.vars %}
<div class="commands">
{% if is_email %}
<h2>Local Vars</h2>
{% else %}
<a href="#" onclick="return varToggle(this, '{{ frame.id }}')"><span>▶</span> Local vars</a>
{% endif %}
</div>
<table class="vars" id="v{{ frame.id }}">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in frame.vars|dictsort:"0" %}
<tr>
<td>{{ var.0|force_escape }}</td>
<td class="code"><pre>{{ var.1 }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
</li>
{% endfor %}
</ul>
</div>
{% endautoescape %}
<form action="http://dpaste.com/" name="pasteform" id="pasteform" method="post">
{% if not is_email %}
<div id="pastebinTraceback" class="pastebin">
<input type="hidden" name="language" value="PythonConsole">
<input type="hidden" name="title" value="{{ exception_type|escape }}{% if request %} at {{ request.path_info|escape }}{% endif %}">
<input type="hidden" name="source" value="Django Dpaste Agent">
<input type="hidden" name="poster" value="Django">
<textarea name="content" id="traceback_area" cols="140" rows="25">
Environment:
{% if request %}
Request Method: {{ request.META.REQUEST_METHOD }}
Request URL: {{ request.build_absolute_uri|escape }}
{% endif %}
Django Version: {{ django_version_info }}
Python Version: {{ sys_version_info }}
Installed Applications:
{{ settings.INSTALLED_APPS|pprint }}
Installed Middleware:
{{ settings.MIDDLEWARE_CLASSES|pprint }}
{% if template_does_not_exist %}Template Loader Error:
{% if loader_debug_info %}Django tried loading these templates, in this order:
{% for loader in loader_debug_info %}Using loader {{ loader.loader }}:
{% for t in loader.templates %}{{ t.name }} (File {% if t.exists %}exists{% else %}does not exist{% endif %})
{% endfor %}{% endfor %}
{% else %}Django couldn't find any templates because your TEMPLATE_LOADERS setting is empty!
{% endif %}
{% endif %}{% if template_info %}
Template error:
In template {{ template_info.name }}, error at line {{ template_info.line }}
{{ template_info.message }}{% for source_line in template_info.source_lines %}{% ifequal source_line.0 template_info.line %}
{{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }}
{% else %}
{{ source_line.0 }} : {{ source_line.1 }}
{% endifequal %}{% endfor %}{% endif %}
Traceback:
{% for frame in frames %}File "{{ frame.filename|escape }}" in {{ frame.function|escape }}
{% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line|escape }}{% endif %}
{% endfor %}
Exception Type: {{ exception_type|escape }}{% if request %} at {{ request.path_info|escape }}{% endif %}
Exception Value: {{ exception_value|force_escape }}
</textarea>
<br><br>
<input type="submit" value="Share this traceback on a public Web site">
</div>
</form>
</div>
{% endif %}
{% endif %}
<div id="requestinfo">
<h2>Request information</h2>
{% if request %}
<h3 id="get-info">GET</h3>
{% if request.GET %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.GET.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No GET data</p>
{% endif %}
<h3 id="post-info">POST</h3>
{% if filtered_POST %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in filtered_POST.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No POST data</p>
{% endif %}
<h3 id="files-info">FILES</h3>
{% if request.FILES %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.FILES.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No FILES data</p>
{% endif %}
<h3 id="cookie-info">COOKIES</h3>
{% if request.COOKIES %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.COOKIES.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No cookie data</p>
{% endif %}
<h3 id="meta-info">META</h3>
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.META.items|dictsort:"0" %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>Request data not supplied</p>
{% endif %}
<h3 id="settings-info">Settings</h3>
<h4>Using settings module <code>{{ settings.SETTINGS_MODULE }}</code></h4>
<table class="req">
<thead>
<tr>
<th>Setting</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in settings.items|dictsort:"0" %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
{% if not is_email %}
<div id="explanation">
<p>
You're seeing this error because you have <code>DEBUG = True</code> in your
Django settings file. Change that to <code>False</code>, and Django will
display a standard 500 page.
</p>
</div>
{% endif %}
</body>
</html>
"""
TECHNICAL_500_TEXT_TEMPLATE = """{% firstof exception_type 'Report' %}{% if request %} at {{ request.path_info }}{% endif %}
{% firstof exception_value 'No exception supplied' %}
{% if request %}
Request Method: {{ request.META.REQUEST_METHOD }}
Request URL: {{ request.build_absolute_uri }}{% endif %}
Django Version: {{ django_version_info }}
Python Executable: {{ sys_executable }}
Python Version: {{ sys_version_info }}
Python Path: {{ sys_path }}
Server time: {{server_time|date:"r"}}
Installed Applications:
{{ settings.INSTALLED_APPS|pprint }}
Installed Middleware:
{{ settings.MIDDLEWARE_CLASSES|pprint }}
{% if template_does_not_exist %}Template loader Error:
{% if loader_debug_info %}Django tried loading these templates, in this order:
{% for loader in loader_debug_info %}Using loader {{ loader.loader }}:
{% for t in loader.templates %}{{ t.name }} (File {% if t.exists %}exists{% else %}does not exist{% endif %})
{% endfor %}{% endfor %}
{% else %}Django couldn't find any templates because your TEMPLATE_LOADERS setting is empty!
{% endif %}
{% endif %}{% if template_info %}
Template error:
In template {{ template_info.name }}, error at line {{ template_info.line }}
{{ template_info.message }}{% for source_line in template_info.source_lines %}{% ifequal source_line.0 template_info.line %}
{{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }}
{% else %}
{{ source_line.0 }} : {{ source_line.1 }}
{% endifequal %}{% endfor %}{% endif %}{% if frames %}
Traceback:
{% for frame in frames %}File "{{ frame.filename }}" in {{ frame.function }}
{% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line }}{% endif %}
{% endfor %}
{% if exception_type %}Exception Type: {{ exception_type }}{% if request %} at {{ request.path_info }}{% endif %}
{% if exception_value %}Exception Value: {{ exception_value }}{% endif %}{% endif %}{% endif %}
{% if request %}Request information:
GET:{% for k, v in request.GET.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No GET data{% endfor %}
POST:{% for k, v in filtered_POST.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No POST data{% endfor %}
FILES:{% for k, v in request.FILES.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No FILES data{% endfor %}
COOKIES:{% for k, v in request.COOKIES.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No cookie data{% endfor %}
META:{% for k, v in request.META.items|dictsort:"0" %}
{{ k }} = {{ v|stringformat:"r" }}{% endfor %}
{% else %}Request data not supplied
{% endif %}
Settings:
Using settings module {{ settings.SETTINGS_MODULE }}{% for k, v in settings.items|dictsort:"0" %}
{{ k }} = {{ v|stringformat:"r" }}{% endfor %}
You're seeing this error because you have DEBUG = True in your
Django settings file. Change that to False, and Django will
display a standard 500 page.
"""
TECHNICAL_404_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>Page not found at {{ request.path_info|escape }}</title>
<meta name="robots" content="NONE,NOARCHIVE">
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; background:#eee; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; margin-bottom:.4em; }
h1 span { font-size:60%; color:#666; font-weight:normal; }
table { border:none; border-collapse: collapse; width:100%; }
td, th { vertical-align:top; padding:2px 3px; }
th { width:12em; text-align:right; color:#666; padding-right:.5em; }
#info { background:#f6f6f6; }
#info ol { margin: 0.5em 4em; }
#info ol li { font-family: monospace; }
#summary { background: #ffc; }
#explanation { background:#eee; border-bottom: 0px none; }
</style>
</head>
<body>
<div id="summary">
<h1>Page not found <span>(404)</span></h1>
<table class="meta">
<tr>
<th>Request Method:</th>
<td>{{ request.META.REQUEST_METHOD }}</td>
</tr>
<tr>
<th>Request URL:</th>
<td>{{ request.build_absolute_uri|escape }}</td>
</tr>
</table>
</div>
<div id="info">
{% if urlpatterns %}
<p>
Using the URLconf defined in <code>{{ urlconf }}</code>,
Django tried these URL patterns, in this order:
</p>
<ol>
{% for pattern in urlpatterns %}
<li>
{% for pat in pattern %}
{{ pat.regex.pattern }}
{% if forloop.last and pat.name %}[name='{{ pat.name }}']{% endif %}
{% endfor %}
</li>
{% endfor %}
</ol>
<p>The current URL, <code>{{ request_path|escape }}</code>, didn't match any of these.</p>
{% else %}
<p>{{ reason }}</p>
{% endif %}
</div>
<div id="explanation">
<p>
You're seeing this error because you have <code>DEBUG = True</code> in
your Django settings file. Change that to <code>False</code>, and Django
will display a standard 404 page.
</p>
</div>
</body>
</html>
"""
EMPTY_URLCONF_TEMPLATE = """
<!DOCTYPE html>
<html lang="en"><head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE"><title>Welcome to Django</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; }
h2 { margin-bottom:.8em; }
h2 span { font-size:80%; color:#666; font-weight:normal; }
h3 { margin:1em 0 .5em 0; }
h4 { margin:0 0 .5em 0; font-weight: normal; }
table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; }
tbody td, tbody th { vertical-align:top; padding:2px 3px; }
thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; }
tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; }
ul { margin-left: 2em; margin-top: 1em; }
#summary { background: #e0ebff; }
#summary h2 { font-weight: normal; color: #666; }
#explanation { background:#eee; }
#instructions { background:#f6f6f6; }
#summary table { border:none; background:transparent; }
</style>
</head>
<body>
<div id="summary">
<h1>It worked!</h1>
<h2>Congratulations on your first Django-powered page.</h2>
</div>
<div id="instructions">
<p>Of course, you haven't actually done any work yet. Here's what to do next:</p>
<ul>
<li>If you plan to use a database, edit the <code>DATABASES</code> setting in <code>{{ project_name }}/settings.py</code>.</li>
<li>Start your first app by running <code>python manage.py startapp [appname]</code>.</li>
</ul>
</div>
<div id="explanation">
<p>
You're seeing this message because you have <code>DEBUG = True</code> in your
Django settings file and you haven't configured any URLs. Get to work!
</p>
</div>
</body></html>
"""
|
|
"""FHIaims calculator interface."""
# FHIaims.py - IO routines for phonopy-FHI-aims
# methods compatible with the corresponding ones from ase.io.aims
# only minimal subset of functionality required within phonopy context is implemented
#
# Copyright (C) 2009-2011 Joerg Meyer (jm)
# All rights reserved.
#
# This file is part of phonopy.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the phonopy project nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Modified 2020 by Florian Knoop
import sys
import numpy as np
from phonopy.interface.vasp import check_forces, get_drift_forces
from phonopy.structure.atoms import PhonopyAtoms as Atoms
# FK 2018/07/19
def lmap(func, lis):
"""Python2/3 compatibility.
replace map(int, list) with lmap(int, list) that always returns a list
instead of an iterator. Otherwise conflicts with np.array in python3.
"""
return list(map(func, lis))
def read_aims(filename):
"""Read FHI-aims geometry files in phonopy context."""
lines = open(filename, "r").readlines()
cell = []
is_frac = []
positions = []
symbols = []
magmoms = []
for line in lines:
fields = line.split()
if not len(fields):
continue
if fields[0] == "lattice_vector":
vec = lmap(float, fields[1:4])
cell.append(vec)
elif fields[0][0:4] == "atom":
if fields[0] == "atom":
frac = False
elif fields[0] == "atom_frac":
frac = True
pos = lmap(float, fields[1:4])
sym = fields[4]
is_frac.append(frac)
positions.append(pos)
symbols.append(sym)
magmoms.append(None)
# implicitly assuming that initial_moments line adhere to FHI-aims geometry.in
# specification, i.e. two subsequent initial_moments lines do not occur
# if they do, the value specified in the last line is taken here - without
# any warning
elif fields[0] == "initial_moment":
magmoms[-1] = float(fields[1])
for (n, frac) in enumerate(is_frac):
if frac:
pos = [
sum([positions[n][ll] * cell[ll][i] for ll in range(3)])
for i in range(3)
]
positions[n] = pos
if None in magmoms:
atoms = Atoms(cell=cell, symbols=symbols, positions=positions)
else:
atoms = Atoms(cell=cell, symbols=symbols, positions=positions, magmoms=magmoms)
return atoms
def write_aims(filename, atoms):
"""Write FHI-aims geometry files in phonopy context."""
lines = ""
lines += "# geometry.in for FHI-aims \n"
lines += "# | generated by phonopy.FHIaims.write_aims() \n"
lattice_vector_line = "lattice_vector " + "%16.16f " * 3 + "\n"
for vec in atoms.get_cell():
lines += lattice_vector_line % tuple(vec)
N = atoms.get_number_of_atoms()
atom_line = "atom " + "%16.16f " * 3 + "%s \n"
positions = atoms.get_positions()
symbols = atoms.get_chemical_symbols()
initial_moment_line = "initial_moment %16.6f\n"
magmoms = atoms.get_magnetic_moments()
for n in range(N):
lines += atom_line % (tuple(positions[n]) + (symbols[n],))
if magmoms is not None:
lines += initial_moment_line % magmoms[n]
with open(filename, "w") as f:
f.write(lines)
class Atoms_with_forces(Atoms):
"""Hack to phonopy.atoms to maintain ASE compatibility also for forces."""
def get_forces(self):
"""Return forces."""
return self.forces
def read_aims_output(filename):
"""Read aims output.
Read FHI-aims output and return geometry, energy and forces
from last self-consistency iteration.
"""
lines = open(filename, "r").readlines()
ll = 0
N = 0
while ll < len(lines):
line = lines[ll]
if "| Number of atoms" in line:
N = int(line.split()[5])
elif "| Unit cell:" in line:
cell = []
for i in range(3):
ll += 1
vec = lmap(float, lines[ll].split()[1:4])
cell.append(vec)
elif ("Atomic structure:" in line) or ("Updated atomic structure:" in line):
if "Atomic structure:" in line:
i_sym = 3
i_pos_min = 4
i_pos_max = 7
elif "Updated atomic structure:" in line:
i_sym = 4
i_pos_min = 1
i_pos_max = 4
ll += 1
symbols = []
positions = []
for n in range(N):
ll += 1
fields = lines[ll].split()
sym = fields[i_sym]
pos = lmap(float, fields[i_pos_min:i_pos_max])
symbols.append(sym)
positions.append(pos)
elif "Total atomic forces" in line:
forces = []
for i in range(N):
ll += 1
force = lmap(float, lines[ll].split()[-3:])
forces.append(force)
ll += 1
atoms = Atoms_with_forces(cell=cell, symbols=symbols, positions=positions)
atoms.forces = forces
return atoms
def write_supercells_with_displacements(
supercell, cells_with_disps, ids, pre_filename="geometry.in", width=3
):
"""Write perfect supercell and supercells with displacements.
Args:
supercell: perfect supercell
cells_with_disps: supercells with displaced atoms
filename: root-filename
"""
# original cell
write_aims(pre_filename + ".supercell", supercell)
# displaced cells
for i, cell in zip(ids, cells_with_disps):
filename = "{pre_filename}-{0:0{width}}".format(
i, pre_filename=pre_filename, width=width
)
write_aims(filename, cell)
def parse_set_of_forces(num_atoms, forces_filenames, verbose=True):
"""Parse the forces from output files in ``forces_filenames``."""
is_parsed = True
force_sets = []
for i, filename in enumerate(forces_filenames):
if verbose:
sys.stdout.write("%d. " % (i + 1))
atoms = read_aims_output(filename)
forces = atoms.forces
if check_forces(forces, num_atoms, filename, verbose=verbose):
drift_force = get_drift_forces(forces, filename=filename, verbose=verbose)
force_sets.append(np.array(forces) - drift_force)
else:
is_parsed = False
if is_parsed:
return force_sets
else:
return []
|
|
import datetime
from dateutil.relativedelta import relativedelta
from django import forms
from django.db.models import Q
from selectable import forms as selectable
from timepiece import utils
from timepiece.crm.models import Project, ProjectRelationship
from timepiece.entries.models import Entry, Location, ProjectHours
from timepiece.entries.lookups import ActivityLookup
from timepiece.forms import (
INPUT_FORMATS, TimepieceSplitDateTimeField, TimepieceDateInput)
class ClockInForm(forms.ModelForm):
active_comment = forms.CharField(
label='Notes for the active entry', widget=forms.Textarea,
required=False)
start_time = TimepieceSplitDateTimeField(required=False)
class Meta:
model = Entry
fields = ('active_comment', 'location', 'project', 'activity',
'start_time', 'comments')
widgets = {
'activity': selectable.AutoComboboxSelectWidget(lookup_class=ActivityLookup),
}
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
self.active = kwargs.pop('active', None)
initial = kwargs.get('initial', {})
default_loc = utils.get_setting('TIMEPIECE_DEFAULT_LOCATION_SLUG')
if default_loc:
try:
loc = Location.objects.get(slug=default_loc)
except Location.DoesNotExist:
loc = None
if loc:
initial['location'] = loc.pk
project = initial.get('project', None)
try:
last_project_entry = Entry.objects.filter(
user=self.user, project=project).order_by('-end_time')[0]
except IndexError:
initial['activity'] = None
else:
initial['activity'] = last_project_entry.activity.pk
super(ClockInForm, self).__init__(*args, **kwargs)
self.fields['start_time'].initial = datetime.datetime.now()
self.fields['project'].queryset = Project.trackable.filter(
users=self.user)
if not self.active:
self.fields.pop('active_comment')
else:
self.fields['active_comment'].initial = self.active.comments
self.instance.user = self.user
def clean_start_time(self):
"""
Make sure that the start time doesn't come before the active entry
"""
start = self.cleaned_data.get('start_time')
if not start:
return start
active_entries = self.user.timepiece_entries.filter(
start_time__gte=start, end_time__isnull=True)
for entry in active_entries:
output = ('The start time is on or before the current entry: '
'%s - %s starting at %s' % (entry.project, entry.activity,
entry.start_time.strftime('%H:%M:%S')))
raise forms.ValidationError(output)
return start
def clean(self):
start_time = self.clean_start_time()
data = self.cleaned_data
if not start_time:
return data
if self.active:
self.active.unpause()
self.active.comments = data['active_comment']
self.active.end_time = start_time - relativedelta(seconds=1)
if not self.active.clean():
raise forms.ValidationError(data)
return data
def save(self, commit=True):
self.instance.hours = 0
entry = super(ClockInForm, self).save(commit=commit)
if self.active and commit:
self.active.save()
return entry
class ClockOutForm(forms.ModelForm):
start_time = TimepieceSplitDateTimeField()
end_time = TimepieceSplitDateTimeField()
class Meta:
model = Entry
fields = ('location', 'start_time', 'end_time', 'comments')
def __init__(self, *args, **kwargs):
kwargs['initial'] = kwargs.get('initial', None) or {}
kwargs['initial']['end_time'] = datetime.datetime.now()
super(ClockOutForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
entry = super(ClockOutForm, self).save(commit=False)
entry.unpause(entry.end_time)
if commit:
entry.save()
return entry
class AddUpdateEntryForm(forms.ModelForm):
start_time = TimepieceSplitDateTimeField()
end_time = TimepieceSplitDateTimeField()
class Meta:
model = Entry
exclude = ('user', 'pause_time', 'site', 'hours', 'status',
'entry_group')
widgets = {
'activity': selectable.AutoComboboxSelectWidget(lookup_class=ActivityLookup),
}
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
self.acting_user = kwargs.pop('acting_user')
super(AddUpdateEntryForm, self).__init__(*args, **kwargs)
self.instance.user = self.user
self.fields['project'].queryset = Project.trackable.filter(
users=self.user)
# If editing the active entry, remove the end_time field.
if self.instance.start_time and not self.instance.end_time:
self.fields.pop('end_time')
def clean(self):
"""
If we're not editing the active entry, ensure that this entry doesn't
conflict with or come after the active entry.
"""
active = utils.get_active_entry(self.user)
start_time = self.cleaned_data.get('start_time', None)
end_time = self.cleaned_data.get('end_time', None)
if active and active.pk != self.instance.pk:
if (start_time and start_time > active.start_time) or \
(end_time and end_time > active.start_time):
raise forms.ValidationError(
'The start time or end time conflict with the active '
'entry: {activity} on {project} starting at '
'{start_time}.'.format(
project=active.project,
activity=active.activity,
start_time=active.start_time.strftime('%H:%M:%S'),
))
month_start = utils.get_month_start(start_time)
next_month = month_start + relativedelta(months=1)
entries = self.instance.user.timepiece_entries.filter(
Q(status=Entry.APPROVED) | Q(status=Entry.INVOICED),
start_time__gte=month_start,
end_time__lt=next_month
)
entry = self.instance
if not self.acting_user.is_superuser:
if (entries.exists() and not entry.id or entry.id and entry.status == Entry.INVOICED):
message = 'You cannot add/edit entries after a timesheet has been ' \
'approved or invoiced. Please correct the start and end times.'
raise forms.ValidationError(message)
return self.cleaned_data
class ProjectHoursForm(forms.ModelForm):
class Meta:
model = ProjectHours
fields = ['week_start', 'project', 'user', 'hours', 'published']
def save(self, commit=True):
ph = super(ProjectHoursForm, self).save()
# since hours are being assigned to a user, add the user
# to the project if they are not already in it so they can track time
ProjectRelationship.objects.get_or_create(user=self.cleaned_data['user'],
project=self.cleaned_data['project'])
return ph
class ProjectHoursSearchForm(forms.Form):
week_start = forms.DateField(
label='Week of', required=False,
input_formats=INPUT_FORMATS, widget=TimepieceDateInput())
def clean_week_start(self):
week_start = self.cleaned_data.get('week_start', None)
return utils.get_week_start(week_start, False) if week_start else None
|
|
# Copyright (c) 2014 The Beardcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
# Add python-bitcoinrpc to module search path:
import os
import sys
from decimal import Decimal, ROUND_DOWN
import json
import random
import shutil
import subprocess
import time
import re
from authproxy import AuthServiceProxy, JSONRPCException
from util import *
def p2p_port(n):
return 11000 + n + os.getpid()%999
def rpc_port(n):
return 12000 + n + os.getpid()%999
def check_json_precision():
"""Make sure json library being used does not lose precision converting BDC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def sync_blocks(rpc_connections, wait=1):
"""
Wait until everybody has the same block count
"""
while True:
counts = [ x.getblockcount() for x in rpc_connections ]
if counts == [ counts[0] ]*len(counts):
break
time.sleep(wait)
def sync_mempools(rpc_connections, wait=1):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while True:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
break
time.sleep(wait)
bitcoind_processes = {}
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node"+str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
with open(os.path.join(datadir, "beardcoin.conf"), 'w') as f:
f.write("regtest=1\n");
f.write("rpcuser=rt\n");
f.write("rpcpassword=rt\n");
f.write("port="+str(p2p_port(n))+"\n");
f.write("rpcport="+str(rpc_port(n))+"\n");
return datadir
def initialize_chain(test_dir):
"""
Create (or copy from cache) a 200-block-long chain and
4 wallets.
bitcoind and beardcoin-cli must be in search path.
"""
if (not os.path.isdir(os.path.join("cache","node0"))
or not os.path.isdir(os.path.join("cache","node1"))
or not os.path.isdir(os.path.join("cache","node2"))
or not os.path.isdir(os.path.join("cache","node3"))):
#find and delete old cache directories if any exist
for i in range(4):
if os.path.isdir(os.path.join("cache","node"+str(i))):
shutil.rmtree(os.path.join("cache","node"+str(i)))
devnull = open(os.devnull, "w")
# Create cache directories, run bitcoinds:
for i in range(4):
datadir=initialize_datadir("cache", i)
args = [ os.getenv("BITCOIND", "bitcoind"), "-keypool=1", "-datadir="+datadir, "-discover=0" ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
bitcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print "initialize_chain: bitcoind started, calling beardcoin-cli -rpcwait getblockcount"
subprocess.check_call([ os.getenv("BITCOINCLI", "beardcoin-cli"), "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
if os.getenv("PYTHON_DEBUG", ""):
print "initialize_chain: beardcoin-cli -rpcwait getblockcount completed"
devnull.close()
rpcs = []
for i in range(4):
try:
url = "http://rt:rt@127.0.0.1:%d"%(rpc_port(i),)
rpcs.append(AuthServiceProxy(url))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 nodes
# gets 25 mature blocks and 25 immature.
# blocks are created with timestamps 10 minutes apart, starting
# at 1 Jan 2014
block_time = 1388534400
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(rpcs, block_time)
rpcs[peer].generate(1)
block_time += 10*60
# Must sync before next peer starts generating blocks
sync_blocks(rpcs)
# Shut them down, and clean up cache directories:
stop_nodes(rpcs)
wait_bitcoinds()
for i in range(4):
os.remove(log_filename("cache", i, "debug.log"))
os.remove(log_filename("cache", i, "db.log"))
os.remove(log_filename("cache", i, "peers.dat"))
os.remove(log_filename("cache", i, "fee_estimates.dat"))
for i in range(4):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
initialize_datadir(test_dir, i) # Overwrite port/rpcport in beardcoin.conf
def initialize_chain_clean(test_dir, num_nodes):
"""
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization.
"""
for i in range(num_nodes):
datadir=initialize_datadir(test_dir, i)
def _rpchost_to_args(rpchost):
'''Convert optional IP:port spec to rpcconnect/rpcport args'''
if rpchost is None:
return []
match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost)
if not match:
raise ValueError('Invalid RPC host spec ' + rpchost)
rpcconnect = match.group(1)
rpcport = match.group(2)
if rpcconnect.startswith('['): # remove IPv6 [...] wrapping
rpcconnect = rpcconnect[1:-1]
rv = ['-rpcconnect=' + rpcconnect]
if rpcport:
rv += ['-rpcport=' + rpcport]
return rv
def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
"""
Start a bitcoind and return RPC connection to it
"""
datadir = os.path.join(dirname, "node"+str(i))
if binary is None:
binary = os.getenv("BITCOIND", "bitcoind")
args = [ binary, "-datadir="+datadir, "-keypool=1", "-discover=0", "-rest" ]
if extra_args is not None: args.extend(extra_args)
bitcoind_processes[i] = subprocess.Popen(args)
devnull = open(os.devnull, "w")
if os.getenv("PYTHON_DEBUG", ""):
print "start_node: bitcoind started, calling beardcoin-cli -rpcwait getblockcount"
subprocess.check_call([ os.getenv("BITCOINCLI", "beardcoin-cli"), "-datadir="+datadir] +
_rpchost_to_args(rpchost) +
["-rpcwait", "getblockcount"], stdout=devnull)
if os.getenv("PYTHON_DEBUG", ""):
print "start_node: calling beardcoin-cli -rpcwait getblockcount returned"
devnull.close()
url = "http://rt:rt@%s:%d" % (rpchost or '127.0.0.1', rpc_port(i))
if timewait is not None:
proxy = AuthServiceProxy(url, timeout=timewait)
else:
proxy = AuthServiceProxy(url)
proxy.url = url # store URL on proxy for info
return proxy
def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, binary=None):
"""
Start multiple bitcoinds, return RPC connections to them
"""
if extra_args is None: extra_args = [ None for i in range(num_nodes) ]
if binary is None: binary = [ None for i in range(num_nodes) ]
return [ start_node(i, dirname, extra_args[i], rpchost, binary=binary[i]) for i in range(num_nodes) ]
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
def stop_node(node, i):
node.stop()
bitcoind_processes[i].wait()
del bitcoind_processes[i]
def stop_nodes(nodes):
for node in nodes:
node.stop()
del nodes[:] # Emptying array closes connections as a side effect
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def wait_bitcoinds():
# Wait for all bitcoinds to cleanly exit
for bitcoind in bitcoind_processes.values():
bitcoind.wait()
bitcoind_processes.clear()
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >=0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out+fee
change = amount_in - amount
if change > amount*2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def send_zeropri_transaction(from_node, to_node, amount, fee):
"""
Create&broadcast a zero-priority transaction.
Returns (txid, hex-encoded-txdata)
Ensures transaction is zero-priority by first creating a send-to-self,
then using its output
"""
# Create a send-to-self with confirmed inputs:
self_address = from_node.getnewaddress()
(total_in, inputs) = gather_inputs(from_node, amount+fee*2)
outputs = make_change(from_node, total_in, amount+fee, fee)
outputs[self_address] = float(amount+fee)
self_rawtx = from_node.createrawtransaction(inputs, outputs)
self_signresult = from_node.signrawtransaction(self_rawtx)
self_txid = from_node.sendrawtransaction(self_signresult["hex"], True)
vout = find_output(from_node, self_txid, amount+fee)
# Now immediately spend the output to create a 1-input, 1-output
# zero-priority transaction:
inputs = [ { "txid" : self_txid, "vout" : vout } ]
outputs = { to_node.getnewaddress() : float(amount) }
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"])
def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random zero-priority transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee)
return (txid, txhex, fee)
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount+fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc:
pass
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
|
|
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_array_almost_equal, run_module_suite
from scipy.stats import (binned_statistic, binned_statistic_2d,
binned_statistic_dd)
from scipy.lib.six import u
class TestBinnedStatistic(object):
@classmethod
def setup_class(cls):
np.random.seed(9865)
cls.x = np.random.random(100)
cls.y = np.random.random(100)
cls.v = np.random.random(100)
cls.X = np.random.random((100, 3))
def test_1d_count(self):
x = self.x
v = self.v
count1, edges1, bc = binned_statistic(x, v, 'count', bins=10)
count2, edges2 = np.histogram(x, bins=10)
assert_array_almost_equal(count1, count2)
assert_array_almost_equal(edges1, edges2)
def test_1d_sum(self):
x = self.x
v = self.v
sum1, edges1, bc = binned_statistic(x, v, 'sum', bins=10)
sum2, edges2 = np.histogram(x, bins=10, weights=v)
assert_array_almost_equal(sum1, sum2)
assert_array_almost_equal(edges1, edges2)
def test_1d_mean(self):
x = self.x
v = self.v
stat1, edges1, bc = binned_statistic(x, v, 'mean', bins=10)
stat2, edges2, bc = binned_statistic(x, v, np.mean, bins=10)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_1d_std(self):
x = self.x
v = self.v
stat1, edges1, bc = binned_statistic(x, v, 'std', bins=10)
stat2, edges2, bc = binned_statistic(x, v, np.std, bins=10)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_1d_median(self):
x = self.x
v = self.v
stat1, edges1, bc = binned_statistic(x, v, 'median', bins=10)
stat2, edges2, bc = binned_statistic(x, v, np.median, bins=10)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_1d_bincode(self):
x = self.x[:20]
v = self.v[:20]
count1, edges1, bc = binned_statistic(x, v, 'count', bins=3)
bc2 = np.array([3, 2, 1, 3, 2, 3, 3, 3, 3, 1, 1, 3, 3, 1, 2, 3, 1,
1, 2, 1])
bcount = [(bc == i).sum() for i in np.unique(bc)]
assert_array_almost_equal(bc, bc2)
assert_array_almost_equal(bcount, count1)
def test_1d_range_keyword(self):
# Regression test for gh-3063, range can be (min, max) or [(min, max)]
np.random.seed(9865)
x = np.arange(30)
data = np.random.random(30)
mean, bins, _ = binned_statistic(x[:15], data[:15])
mean_range, bins_range, _ = binned_statistic(x, data, range=[(0, 14)])
mean_range2, bins_range2, _ = binned_statistic(x, data, range=(0, 14))
assert_array_almost_equal(mean, mean_range)
assert_array_almost_equal(bins, bins_range)
assert_array_almost_equal(mean, mean_range2)
assert_array_almost_equal(bins, bins_range2)
def test_2d_count(self):
x = self.x
y = self.y
v = self.v
count1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'count', bins=5)
count2, binx2, biny2 = np.histogram2d(x, y, bins=5)
assert_array_almost_equal(count1, count2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_sum(self):
x = self.x
y = self.y
v = self.v
sum1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'sum', bins=5)
sum2, binx2, biny2 = np.histogram2d(x, y, bins=5, weights=v)
assert_array_almost_equal(sum1, sum2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_mean(self):
x = self.x
y = self.y
v = self.v
stat1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'mean', bins=5)
stat2, binx2, biny2, bc = binned_statistic_2d(x, y, v, np.mean, bins=5)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_mean_unicode(self):
x = self.x
y = self.y
v = self.v
stat1, binx1, biny1, bc = binned_statistic_2d(x, y, v, u('mean'), bins=5)
stat2, binx2, biny2, bc = binned_statistic_2d(x, y, v, np.mean, bins=5)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_std(self):
x = self.x
y = self.y
v = self.v
stat1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'std', bins=5)
stat2, binx2, biny2, bc = binned_statistic_2d(x, y, v, np.std, bins=5)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_median(self):
x = self.x
y = self.y
v = self.v
stat1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'median', bins=5)
stat2, binx2, biny2, bc = binned_statistic_2d(x, y, v, np.median, bins=5)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(binx1, binx2)
assert_array_almost_equal(biny1, biny2)
def test_2d_bincode(self):
x = self.x[:20]
y = self.y[:20]
v = self.v[:20]
count1, binx1, biny1, bc = binned_statistic_2d(x, y, v, 'count', bins=3)
bc2 = np.array([17, 11, 6, 16, 11, 17, 18, 17, 17, 7, 6, 18, 16,
6, 11, 16, 6, 6, 11, 8])
bcount = [(bc == i).sum() for i in np.unique(bc)]
assert_array_almost_equal(bc, bc2)
count1adj = count1[count1.nonzero()]
assert_array_almost_equal(bcount, count1adj)
def test_dd_count(self):
X = self.X
v = self.v
count1, edges1, bc = binned_statistic_dd(X, v, 'count', bins=3)
count2, edges2 = np.histogramdd(X, bins=3)
assert_array_almost_equal(count1, count2)
assert_array_almost_equal(edges1, edges2)
def test_dd_sum(self):
X = self.X
v = self.v
sum1, edges1, bc = binned_statistic_dd(X, v, 'sum', bins=3)
sum2, edges2 = np.histogramdd(X, bins=3, weights=v)
assert_array_almost_equal(sum1, sum2)
assert_array_almost_equal(edges1, edges2)
def test_dd_mean(self):
X = self.X
v = self.v
stat1, edges1, bc = binned_statistic_dd(X, v, 'mean', bins=3)
stat2, edges2, bc = binned_statistic_dd(X, v, np.mean, bins=3)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_dd_std(self):
X = self.X
v = self.v
stat1, edges1, bc = binned_statistic_dd(X, v, 'std', bins=3)
stat2, edges2, bc = binned_statistic_dd(X, v, np.std, bins=3)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_dd_median(self):
X = self.X
v = self.v
stat1, edges1, bc = binned_statistic_dd(X, v, 'median', bins=3)
stat2, edges2, bc = binned_statistic_dd(X, v, np.median, bins=3)
assert_array_almost_equal(stat1, stat2)
assert_array_almost_equal(edges1, edges2)
def test_dd_bincode(self):
X = self.X[:20]
v = self.v[:20]
count1, edges1, bc = binned_statistic_dd(X, v, 'count', bins=3)
bc2 = np.array([63, 33, 86, 83, 88, 67, 57, 33, 42, 41, 82, 83, 92,
32, 36, 91, 43, 87, 81, 81])
bcount = [(bc == i).sum() for i in np.unique(bc)]
assert_array_almost_equal(bc, bc2)
count1adj = count1[count1.nonzero()]
assert_array_almost_equal(bcount, count1adj)
if __name__ == "__main__":
run_module_suite()
|
|
# Copyright 2011 OpenStack Foundation.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import socket
import sys
import traceback
from oslo.config import cfg
from oslo_serialization import jsonutils
from oslo.utils import importutils
import six
from six import moves
_PY26 = sys.version_info[0:2] == (2, 6)
from openstack_auth.openstack.common._i18n import _
from openstack_auth.openstack.common import local
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config-append',
metavar='PATH',
deprecated_name='log-config',
help='The name of a logging configuration file. This file '
'is appended to any existing logging configuration '
'files. For details about logging configuration files, '
'see the Python logging module documentation.'),
cfg.StrOpt('log-format',
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s .'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths.'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, '
'and will change in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing
# syslog format deprecation in J
default=False,
help='(Optional) Enables or disables syslog rfc5424 format '
'for logging. If enabled, prefixes the MSG part of the '
'syslog message with APP-NAME (RFC5424). The '
'format without the APP-NAME is deprecated in I, '
'and will be removed in J.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='Syslog facility to receive log lines.')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error.')
]
DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN',
'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
'oslo.messaging=INFO', 'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN',
'urllib3.connectionpool=WARN', 'websocket=WARN',
"keystonemiddleware=WARN", "routes.middleware=WARN",
"stevedore=WARN"]
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user_identity)s] '
'%(instance)s%(message)s',
help='Format string to use for log messages with context.'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='Format string to use for log messages without context.'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='Data to append to log format when level is DEBUG.'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels',
default=DEFAULT_LOG_LEVELS,
help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors',
default=False,
help='Enables or disables publication of error events.'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='Enables or disables fatal status of deprecations.'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='The format for an instance that is passed with the log '
'message.'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='The format for an instance UUID that is passed with the '
'log message.'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
return None
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
def isEnabledFor(self, level):
if _PY26:
# This method was added in python 2.7 (and it does the exact
# same logic, so we need to do the exact same logic so that
# python 2.6 has this capability as well).
return self.logger.isEnabledFor(level)
else:
return super(BaseLoggerAdapter, self).isEnabledFor(level)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
if six.PY3:
# In Python 3, the code fails because the 'manager' attribute
# cannot be found when using a LoggerAdapter as the
# underlying logger. Work around this issue.
self._logger.manager = self._logger.logger.manager
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
self._deprecated_messages_sent = dict()
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
"""Call this method when a deprecated feature is used.
If the system is configured for fatal deprecations then the message
is logged at the 'critical' level and :class:`DeprecatedConfig` will
be raised.
Otherwise, the message will be logged (once) at the 'warn' level.
:raises: :class:`DeprecatedConfig` if the system is configured for
fatal deprecations.
"""
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
# Using a list because a tuple with dict can't be stored in a set.
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
if args in sent_args:
# Already logged this message, so don't log it again.
return
sent_args.append(args)
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(msg, six.text_type):
msg = six.text_type(msg)
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_uuid = (extra.get('instance_uuid') or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
elif instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra['instance'] = instance_extra
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
extra['project'] = self.project
extra['version'] = self.version
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [moves.filter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(exc_type, value, tb):
extra = {'exc_info': (exc_type, value, tb)}
getLogger(product_name).critical(
"".join(traceback.format_exception_only(exc_type, value)),
**extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config_append):
try:
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
except (moves.configparser.Error, KeyError) as exc:
raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'):
"""Setup logging."""
if CONF.log_config_append:
_load_log_config(CONF.log_config_append)
else:
_setup_logging_from_conf(product_name, version)
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string=None,
default_log_levels=None):
# Just in case the caller is not setting the
# default_log_level. This is insurance because
# we introduced the default_log_level parameter
# later in a backwards in-compatible change
if default_log_levels is not None:
cfg.set_defaults(
log_opts,
default_log_levels=default_log_levels)
if logging_context_format_string is not None:
cfg.set_defaults(
log_opts,
logging_context_format_string=logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
class RFCSysLogHandler(logging.handlers.SysLogHandler):
def __init__(self, *args, **kwargs):
self.binary_name = _get_binary_name()
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
def format(self, record):
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
msg = logging.handlers.SysLogHandler.format(self, record)
msg = self.binary_name + ' ' + msg
return msg
def _setup_logging_from_conf(project, version):
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not logpath:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
handler = importutils.import_object(
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(project=project,
version=version,
datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
# NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
# to integer code.
if sys.version_info < (2, 7):
level = logging.getLevelName(level_name)
logger.setLevel(level)
else:
logger.setLevel(level_name)
if CONF.use_syslog:
try:
facility = _find_facility_from_conf()
# TODO(bogdando) use the format provided by RFCSysLogHandler
# after existing syslog format deprecation in J
if CONF.use_syslog_rfc_format:
syslog = RFCSysLogHandler(facility=facility)
else:
syslog = logging.handlers.SysLogHandler(facility=facility)
log_root.addHandler(syslog)
except socket.error:
log_root.error('Unable to add syslog handler. Verify that syslog '
'is running.')
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg.rstrip())
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
If available, uses the context value stored in TLS - local.store.context
"""
def __init__(self, *args, **kwargs):
"""Initialize ContextFormatter instance
Takes additional keyword arguments which can be used in the message
format string.
:keyword project: project name
:type project: string
:keyword version: project version
:type version: string
"""
self.project = kwargs.pop('project', 'unknown')
self.version = kwargs.pop('version', 'unknown')
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(record.msg, six.text_type):
record.msg = six.text_type(record.msg)
# store project info
record.project = self.project
record.version = self.version
# store request info
context = getattr(local.store, 'context', None)
if context:
d = _dictify_context(context)
for k, v in d.items():
setattr(record, k, v)
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color', 'user_identity'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
fmt = CONF.logging_context_format_string
else:
fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
fmt += " " + CONF.logging_debug_format_suffix
if sys.version_info < (3, 2):
self._fmt = fmt
else:
self._style = logging.PercentStyle(fmt)
self._fmt = self._style._fmt
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))
|
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase, RequestFactory
from django.core.urlresolvers import reverse
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.template import Template, Context
from django.utils.html import strip_tags
from ...core.tests import utils
from .models import CommentPoll, CommentPollChoice, CommentPollVote, PollMode
from .forms import PollVoteManyForm
from .utils.render_static import post_render_static_polls
from .utils import render
User = get_user_model()
class PollViewTest(TestCase):
def setUp(self):
utils.cache_clear()
self.user = utils.create_user()
self.user2 = utils.create_user()
self.category = utils.create_category()
self.topic = utils.create_topic(self.category, user=self.user)
self.topic2 = utils.create_topic(self.category, user=self.user2)
self.comment = utils.create_comment(topic=self.topic)
self.user_comment = utils.create_comment(topic=self.topic, user=self.user)
self.poll = CommentPoll.objects.create(comment=self.comment, name='foo')
self.poll_multi = CommentPoll.objects.create(comment=self.comment, name='bar', choice_max=2)
def test_poll_close_logged_in(self):
"""
User must be logged in
"""
response = self.client.post(reverse('spirit:comment:poll:close', kwargs={'pk': self.poll.pk, }),
{})
self.assertEqual(response.status_code, 302)
def test_poll_close_wrong_user(self):
"""
Try to close another user poll should return 404
"""
utils.login(self)
response = self.client.post(reverse('spirit:comment:poll:close', kwargs={'pk': self.poll.pk, }),
{})
self.assertEqual(response.status_code, 404)
def test_poll_close_get(self):
"""
GET, poll_close
"""
utils.login(self)
response = self.client.get(reverse('spirit:comment:poll:close', kwargs={'pk': self.poll.pk, }))
self.assertEqual(response.status_code, 405)
def test_poll_close_post(self):
"""
POST, poll_close
"""
utils.login(self)
poll = CommentPoll.objects.create(comment=self.user_comment, name='foo')
response = self.client.post(reverse('spirit:comment:poll:close', kwargs={'pk': poll.pk, }),
{})
expected_url = poll.get_absolute_url()
self.assertRedirects(response, expected_url, status_code=302, target_status_code=302)
self.assertTrue(CommentPoll.objects.get(pk=poll.pk).is_closed)
def test_poll_close_open_post(self):
"""
POST, poll_open
"""
utils.login(self)
poll = CommentPoll.objects.create(comment=self.user_comment, name='foo', close_at=timezone.now())
self.assertTrue(poll.is_closed)
response = self.client.post(reverse('spirit:comment:poll:open', kwargs={'pk': poll.pk, }),
{})
expected_url = poll.get_absolute_url()
self.assertRedirects(response, expected_url, status_code=302, target_status_code=302)
self.assertFalse(CommentPoll.objects.get(pk=poll.pk).is_closed)
def test_poll_vote_logged_in(self):
"""
User must be logged in
"""
response = self.client.post(reverse('spirit:comment:poll:vote', kwargs={'pk': self.poll.pk, }),
{})
expected_url = reverse('spirit:user:auth:login') + "?next=" + self.poll.get_absolute_url()
self.assertRedirects(response, expected_url, status_code=302)
def test_poll_vote_get(self):
"""
GET, poll_vote
Post is required
"""
utils.login(self)
response = self.client.get(reverse('spirit:comment:poll:vote', kwargs={'pk': self.poll.pk, }))
self.assertEqual(response.status_code, 405)
def test_poll_vote_post(self):
"""
POST, poll_vote
"""
utils.login(self)
choice = CommentPollChoice.objects.create(poll=self.poll, number=1, description="op1")
form_data = {'choices': choice.pk, }
response = self.client.post(reverse('spirit:comment:poll:vote', kwargs={'pk': self.poll.pk, }),
form_data)
expected_url = self.poll.get_absolute_url()
self.assertRedirects(response, expected_url, status_code=302, target_status_code=302)
self.assertEqual(len(CommentPollVote.objects.filter(choice=choice)), 1)
def test_poll_vote_post_invalid(self):
"""
POST, poll_vote
"""
utils.login(self)
response = self.client.post(reverse('spirit:comment:poll:vote', kwargs={'pk': self.poll.pk, }),
{}, follow=True)
self.assertEqual(len(response.context['messages']), 1) # error message
def test_poll_vote_post_invalid_redirect(self):
"""
POST, poll_vote
"""
utils.login(self)
response = self.client.post(reverse('spirit:comment:poll:vote', kwargs={'pk': self.poll.pk, }),
{})
expected_url = self.poll.get_absolute_url()
self.assertRedirects(response, expected_url, status_code=302, target_status_code=302)
def test_poll_vote_post_multi(self):
"""
Should be able to vote many options
"""
utils.login(self)
choice_a = CommentPollChoice.objects.create(poll=self.poll_multi, number=1, description="op a")
choice_b = CommentPollChoice.objects.create(poll=self.poll_multi, number=2, description="op b")
CommentPollChoice.objects.create(poll=self.poll_multi, number=3, description="op c")
form_data = {'choices': [choice_a.pk, choice_b.pk]}
response = self.client.post(reverse('spirit:comment:poll:vote', kwargs={'pk': self.poll_multi.pk, }),
form_data)
expected_url = self.poll.get_absolute_url()
self.assertRedirects(response, expected_url, status_code=302, target_status_code=302)
self.assertEqual(len(CommentPollVote.objects.all()), 2)
self.assertEqual(len(CommentPollVote.objects.filter(choice=choice_a.pk)), 1)
self.assertEqual(len(CommentPollVote.objects.filter(choice=choice_b.pk)), 1)
def test_poll_vote_post_count(self):
"""
Should increase the vote counters
"""
utils.login(self)
choice_a = CommentPollChoice.objects.create(poll=self.poll_multi, number=1, description="op a")
choice_b = CommentPollChoice.objects.create(poll=self.poll_multi, number=2, description="op b")
choice_c = CommentPollChoice.objects.create(poll=self.poll_multi, number=3, description="op c")
form_data = {'choices': [choice_a.pk, choice_b.pk]}
response = self.client.post(
reverse('spirit:comment:poll:vote', kwargs={'pk': self.poll_multi.pk, }), form_data
)
expected_url = self.poll.get_absolute_url()
self.assertRedirects(response, expected_url, status_code=302, target_status_code=302)
self.assertEqual(CommentPollChoice.objects.get(pk=choice_a.pk).vote_count, 1)
self.assertEqual(CommentPollChoice.objects.get(pk=choice_b.pk).vote_count, 1)
self.assertEqual(CommentPollChoice.objects.get(pk=choice_c.pk).vote_count, 0)
form_data = {'choices': [choice_a.pk]}
response = self.client.post(
reverse('spirit:comment:poll:vote', kwargs={'pk': self.poll_multi.pk, }), form_data
)
expected_url = self.poll.get_absolute_url()
self.assertRedirects(response, expected_url, status_code=302, target_status_code=302)
self.assertEqual(CommentPollChoice.objects.get(pk=choice_a.pk).vote_count, 1)
self.assertEqual(CommentPollChoice.objects.get(pk=choice_b.pk).vote_count, 0)
self.assertEqual(CommentPollChoice.objects.get(pk=choice_c.pk).vote_count, 0)
def test_poll_voters_logged_in(self):
"""
User must be logged in
"""
poll_choice = CommentPollChoice.objects.create(poll=self.poll, number=1, description="op1")
response = self.client.get(reverse('spirit:comment:poll:voters', kwargs={'pk': poll_choice.pk, }))
self.assertEqual(response.status_code, 302)
def test_poll_voters(self):
"""
Should query choice voters
"""
poll_choice = CommentPollChoice.objects.create(poll=self.poll, number=1, description="op1")
poll_choice2 = CommentPollChoice.objects.create(poll=self.poll, number=2, description="op2")
vote = CommentPollVote.objects.create(voter=self.user, choice=poll_choice)
CommentPollVote.objects.create(voter=self.user2, choice=poll_choice2)
utils.login(self)
response = self.client.get(reverse('spirit:comment:poll:voters', kwargs={'pk': poll_choice.pk, }))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['choice'], poll_choice)
self.assertEqual(list(response.context['votes']), [vote])
def test_poll_voters_secret(self):
"""
Should forbid view voters of secret poll when is not closed
"""
poll = CommentPoll.objects.create(comment=self.comment, name='foobar', mode=PollMode.SECRET)
poll_choice = CommentPollChoice.objects.create(poll=poll, number=1, description="op1")
utils.login(self)
response = self.client.get(reverse('spirit:comment:poll:voters', kwargs={'pk': poll_choice.pk, }))
self.assertEqual(response.status_code, 403)
def test_poll_voters_secret_closed(self):
"""
Should allow view voters of secret poll when is closed
"""
yesterday = timezone.now() - timezone.timedelta(days=1)
poll = CommentPoll.objects.create(comment=self.comment, name='foobar',
mode=PollMode.SECRET, close_at=yesterday)
poll_choice = CommentPollChoice.objects.create(poll=poll, number=1, description="op1")
utils.login(self)
response = self.client.get(reverse('spirit:comment:poll:voters', kwargs={'pk': poll_choice.pk, }))
self.assertEqual(response.status_code, 200)
class PollFormTest(TestCase):
def setUp(self):
utils.cache_clear()
self.user = utils.create_user()
self.user2 = utils.create_user()
self.category = utils.create_category()
self.topic = utils.create_topic(self.category, user=self.user)
self.comment = utils.create_comment(topic=self.topic)
self.comment2 = utils.create_comment(topic=self.topic)
# Single choice
self.poll = CommentPoll.objects.create(comment=self.comment, name='foo')
self.poll_choice = CommentPollChoice.objects.create(poll=self.poll, number=1, description="op1")
self.poll_choice2 = CommentPollChoice.objects.create(poll=self.poll, number=2, description="op2")
self.poll_vote = CommentPollVote.objects.create(voter=self.user, choice=self.poll_choice)
self.poll_vote2 = CommentPollVote.objects.create(voter=self.user2, choice=self.poll_choice)
# ...poor man prefetch
self.poll_choice.votes = [self.poll_vote]
self.poll.choices = [self.poll_choice, self.poll_choice2]
# Multi choice
self.poll_multi = CommentPoll.objects.create(comment=self.comment2, name='bar', choice_max=2)
self.poll_multi_choice = CommentPollChoice.objects.create(poll=self.poll_multi, number=1, description="op1")
self.poll_multi_choice2 = CommentPollChoice.objects.create(poll=self.poll_multi, number=2, description="op2")
self.poll_multi_choice3 = CommentPollChoice.objects.create(poll=self.poll_multi, number=3, description="op3")
self.poll_multi_vote = CommentPollVote.objects.create(voter=self.user, choice=self.poll_multi_choice)
self.poll_multi_vote2 = CommentPollVote.objects.create(voter=self.user, choice=self.poll_multi_choice2)
self.poll_multi_vote3 = CommentPollVote.objects.create(voter=self.user2, choice=self.poll_multi_choice)
# ...poor man prefetch
self.poll_multi_choice.votes = [self.poll_multi_vote]
self.poll_multi_choice2.votes = [self.poll_multi_vote2]
self.poll_multi.choices = [self.poll_multi_choice, self.poll_multi_choice2]
def test_vote_load_initial_single(self):
"""
TopicPollVoteManyForm
"""
form = PollVoteManyForm(user=self.user, poll=self.poll)
form.load_initial()
self.assertEqual(form.initial, {'choices': self.poll_choice.pk, })
def test_vote_load_initial_multi(self):
"""
TopicPollVoteManyForm
"""
form = PollVoteManyForm(user=self.user, poll=self.poll_multi)
form.load_initial()
self.assertDictEqual(form.initial, {'choices': [self.poll_multi_choice.pk, self.poll_multi_choice2.pk], })
def test_vote_load_initial_empty(self):
"""
TopicPollVoteManyForm
"""
CommentPollVote.objects.all().delete()
self.poll_choice.votes = []
form = PollVoteManyForm(user=self.user, poll=self.poll)
form.load_initial()
self.assertEqual(form.initial, {})
def test_vote_load_initial_choice_limit(self):
"""
Load initial for a single choice poll that was previously a multi choice poll
"""
# multi to single
self.poll_multi.choice_max = 1
form = PollVoteManyForm(user=self.user, poll=self.poll_multi)
form.load_initial()
self.assertDictEqual(form.initial, {'choices': self.poll_multi_choice.pk, })
# single to multi
self.poll.choice_max = 2
form = PollVoteManyForm(user=self.user, poll=self.poll)
form.load_initial()
self.assertDictEqual(form.initial, {'choices': [self.poll_choice.pk, ], })
def test_vote_poll_closed(self):
"""
Cant vote on closed poll
"""
self.poll.close_at = timezone.now()
self.poll.save()
form_data = {'choices': self.poll_choice.pk, }
form = PollVoteManyForm(user=self.user, poll=self.poll, data=form_data)
self.assertFalse(form.is_valid())
def test_create_vote_single(self):
"""
TopicPollVoteManyForm
"""
CommentPollVote.objects.all().delete()
form_data = {'choices': self.poll_choice.pk, }
form = PollVoteManyForm(user=self.user, poll=self.poll, data=form_data)
self.assertTrue(form.is_valid())
form.save_m2m()
self.assertEqual(len(CommentPollVote.objects.all()), 1)
self.assertEqual(len(CommentPollVote.objects.filter(choice=self.poll_choice, is_removed=False)), 1)
def test_create_vote_multi(self):
"""
TopicPollVoteManyForm
"""
CommentPollVote.objects.all().delete()
self.poll_multi_choice.votes = []
self.poll_multi_choice2.votes = []
form_data = {'choices': [self.poll_multi_choice.pk, self.poll_multi_choice2.pk], }
form = PollVoteManyForm(user=self.user, poll=self.poll_multi, data=form_data)
self.assertTrue(form.is_valid())
def test_create_vote_multi_invalid(self):
"""
Limit selected choices to choice_limit
"""
CommentPollVote.objects.all().delete()
self.poll_multi_choice.votes = []
self.poll_multi_choice2.votes = []
form_data = {'choices': [self.poll_multi_choice.pk,
self.poll_multi_choice2.pk,
self.poll_multi_choice3.pk], }
form = PollVoteManyForm(user=self.user, poll=self.poll_multi, data=form_data)
self.assertFalse(form.is_valid())
def test_update_vote_single(self):
"""
TopicPollVoteManyForm
"""
self.assertEqual(len(CommentPollVote.objects.filter(choice=self.poll_choice2, is_removed=False)), 0)
self.assertEqual(len(CommentPollVote.objects.filter(choice=self.poll_choice, is_removed=False)), 2)
form_data = {'choices': self.poll_choice2.pk, }
form = PollVoteManyForm(user=self.user, poll=self.poll, data=form_data)
self.assertTrue(form.is_valid())
form.save_m2m()
self.assertEqual(len(CommentPollVote.objects.filter(choice=self.poll_choice2, is_removed=False)), 1)
self.assertEqual(len(CommentPollVote.objects.filter(choice=self.poll_choice, is_removed=False)), 1)
class CommentPollTemplateTagsTest(TestCase):
def setUp(self):
utils.cache_clear()
self.user = utils.create_user()
self.category = utils.create_category()
self.topic = utils.create_topic(category=self.category)
self.user_comment = utils.create_comment(topic=self.topic, user=self.user, comment_html="<poll name=foo>")
self.user_poll = CommentPoll.objects.create(comment=self.user_comment, name='foo')
self.user_comment_with_polls = self.user_comment.__class__.objects\
.filter(pk=self.user_comment.pk)\
.with_polls(self.user)\
.first()
self.request = RequestFactory().get('/')
self.request.user = self.user
def test_render_polls_form(self):
"""
Should display poll vote form
"""
res = []
def mock_render_to_string(tlt, ctx):
res.append(tlt)
res.append(ctx)
org_render_to_string, render.render_to_string = render.render_to_string, mock_render_to_string
try:
render.render_polls(self.user_comment_with_polls, self.request, 'csrf_token_foo')
self.assertEqual(len(res), 2)
template, context = res[0], res[1]
self.assertEqual(template, 'spirit/comment/poll/_form.html')
self.assertEqual(context['form'].poll, self.user_poll)
self.assertIsInstance(context['poll'], CommentPoll)
self.assertEqual(context['user'], self.user)
self.assertEqual(context['comment'], self.user_comment_with_polls)
self.assertEqual(context['request'], self.request)
self.assertEqual(context['csrf_token'], 'csrf_token_foo')
finally:
render.render_to_string = org_render_to_string
def test_render_polls_template_form(self):
"""
Should display poll vote form
"""
out = Template(
"{% load spirit_tags %}"
"{% post_render_comment comment=comment %}"
).render(Context({'comment': self.user_comment_with_polls, 'request': self.request, 'csrf_token': 'foo'}))
self.assertNotEqual(out.strip(), "")
self.assertTrue("<poll" not in out)
form_id = 'id="p%s"' % self.user_poll.pk
self.assertTrue(form_id in out)
show_link = '?show_poll=%(pk)s#p%(pk)s' % {'pk': self.user_poll.pk}
self.assertTrue(show_link in out)
def test_render_polls_template_form_not_author(self):
"""
Should display poll vote form
"""
request = RequestFactory().get('/')
request.user = utils.create_user()
out = Template(
"{% load spirit_tags %}"
"{% post_render_comment comment=comment %}"
).render(Context({'comment': self.user_comment_with_polls, 'request': request, 'csrf_token': 'foo'}))
self.assertNotEqual(out.strip(), "")
form_id = 'id="p%s"' % self.user_poll.pk
self.assertTrue(form_id in out)
def test_render_polls_template_form_close(self):
"""
Should display the close button
"""
out = Template(
"{% load spirit_tags %}"
"{% post_render_comment comment=comment %}"
).render(Context({'comment': self.user_comment_with_polls, 'request': self.request, 'csrf_token': 'foo'}))
self.assertNotEqual(out.strip(), "")
close_link = reverse('spirit:comment:poll:close', kwargs={'pk': self.user_poll.pk})
self.assertTrue(close_link in out)
def test_render_polls_template_form_close_not_author(self):
"""
Should *not* display the close button to not poll author
"""
request = RequestFactory().get('/')
request.user = utils.create_user()
out = Template(
"{% load spirit_tags %}"
"{% post_render_comment comment=comment %}"
).render(Context({'comment': self.user_comment_with_polls, 'request': request, 'csrf_token': 'foo'}))
self.assertNotEqual(out.strip(), "")
close_link = reverse('spirit:comment:poll:close', kwargs={'pk': self.user_poll.pk})
self.assertTrue(close_link not in out)
def test_render_polls_template_form_open(self):
"""
Should display the open button
"""
self.user_comment_with_polls.polls[0].close_at = timezone.now() # renders results.html
out = Template(
"{% load spirit_tags %}"
"{% post_render_comment comment=comment %}"
).render(Context({'comment': self.user_comment_with_polls, 'request': self.request, 'csrf_token': 'foo'}))
self.assertNotEqual(out.strip(), "")
open_link = reverse('spirit:comment:poll:open', kwargs={'pk': self.user_poll.pk})
self.assertTrue(open_link in out)
def test_render_polls_secret(self):
"""
Should not display the view results link when poll is secret and is not closed
"""
comment = utils.create_comment(topic=self.topic, comment_html="<poll name=bar>")
CommentPoll.objects.create(comment=comment, name='bar', mode=PollMode.SECRET)
user_comment_with_polls = comment.__class__.objects\
.filter(pk=comment.pk)\
.with_polls(self.user)\
.first()
out = Template(
"{% load spirit_tags %}"
"{% post_render_comment comment=comment %}"
).render(Context({'comment': user_comment_with_polls, 'request': self.request, 'csrf_token': 'foo'}))
self.assertNotEqual(out.strip(), "")
self.assertFalse('Show results' in out)
self.assertTrue('form' in out)
def test_render_polls_secret_closed(self):
"""
Should display the results when poll is secret and is closed
"""
comment = utils.create_comment(topic=self.topic, comment_html="<poll name=bar>")
yesterday = timezone.now() - timezone.timedelta(days=1)
CommentPoll.objects.create(comment=comment, name='bar', mode=PollMode.SECRET, close_at=yesterday)
user_comment_with_polls = comment.__class__.objects\
.filter(pk=comment.pk)\
.with_polls(self.user)\
.first()
out = Template(
"{% load spirit_tags %}"
"{% post_render_comment comment=comment %}"
).render(Context({'comment': user_comment_with_polls, 'request': self.request, 'csrf_token': 'foo'}))
self.assertNotEqual(out.strip(), "")
self.assertFalse('show_poll=' in out)
self.assertFalse('form' in out)
self.assertTrue('comment-poll' in out)
class PollModelsTest(TestCase):
def setUp(self):
utils.cache_clear()
self.user = utils.create_user()
self.category = utils.create_category()
self.topic = utils.create_topic(category=self.category, user=self.user)
self.comment = utils.create_comment(topic=self.topic)
self.poll = CommentPoll.objects.create(comment=self.comment, name='foo')
self.choice = CommentPollChoice.objects.create(poll=self.poll, number=1, description=1)
self.vote = CommentPollVote.objects.create(choice=self.choice, voter=self.user)
# Kinda like comment.with_polls()
self.poll.choices = list(CommentPollChoice.objects.filter(poll=self.poll))
for c in self.poll.choices:
c.votes = list(CommentPollVote.objects.filter(choice=c, voter=self.user))
def test_poll_is_multiple_choice(self):
"""
Should be true when max > 1
"""
poll = CommentPoll.objects.create(comment=self.comment, name='bar', choice_max=2)
self.assertFalse(self.poll.is_multiple_choice)
self.assertTrue(poll.is_multiple_choice)
def test_poll_has_choice_min(self):
"""
Should be true when min > 1
"""
poll = CommentPoll.objects.create(comment=self.comment, name='bar', choice_min=2)
self.assertFalse(self.poll.has_choice_min)
self.assertTrue(poll.has_choice_min)
def test_poll_is_closed(self):
"""
Should be true when close_at > now
"""
yesterday = timezone.now() - timezone.timedelta(days=1)
tomorrow = timezone.now() + timezone.timedelta(days=1)
poll_old = CommentPoll.objects.create(comment=self.comment, name='bar', close_at=yesterday)
poll_new = CommentPoll.objects.create(comment=self.comment, name='bar2', close_at=tomorrow)
self.assertFalse(self.poll.is_closed)
self.assertTrue(poll_old.is_closed)
self.assertFalse(poll_new.is_closed)
def test_poll_has_user_voted(self):
"""
Should be true when the user has voted
"""
poll = CommentPoll.objects.create(comment=self.comment, name='bar')
CommentPollChoice.objects.create(poll=poll, number=1, description=1)
poll.choices = list(CommentPollChoice.objects.filter(poll=poll))
for c in poll.choices:
c.votes = []
self.assertTrue(self.poll.has_user_voted)
self.assertFalse(poll.has_user_voted)
def test_poll_mode_txt(self):
"""
Should return the mode description
"""
poll = CommentPoll.objects.create(comment=self.comment, name='bar')
self.assertEqual(poll.mode_txt, 'default')
poll = CommentPoll.objects.create(comment=self.comment, name='bar2', mode=PollMode.SECRET)
self.assertEqual(poll.mode_txt, 'secret')
def test_poll_total_votes(self):
"""
Should return the total votes
"""
poll = CommentPoll.objects.create(comment=self.comment, name='bar')
CommentPollChoice.objects.create(poll=poll, number=1, description='foo', vote_count=5)
CommentPollChoice.objects.create(poll=poll, number=2, description='bar', vote_count=5)
poll.choices = list(CommentPollChoice.objects.filter(poll=poll))
self.assertEqual(poll.total_votes, 10)
def test_poll_is_secret(self):
"""
Should return whether the poll is secret or not
"""
poll = CommentPoll.objects.create(comment=self.comment, name='bar')
self.assertFalse(poll.is_secret)
poll.mode = PollMode.SECRET
self.assertTrue(poll.is_secret)
def test_poll_can_show_results(self):
"""
Should return whether the poll results can be shown or not depending on the mode
"""
poll = CommentPoll.objects.create(comment=self.comment, name='bar')
self.assertTrue(poll.can_show_results)
poll.mode = PollMode.SECRET
self.assertFalse(poll.can_show_results)
yesterday = timezone.now() - timezone.timedelta(days=1)
poll.close_at = yesterday
self.assertTrue(poll.can_show_results)
def test_poll_update_or_create_many(self):
"""
Should create or update many polls for a given comment
"""
poll_raw = {'name': 'foo_raw', 'title': 'foo', 'choice_min': 2,
'choice_max': 2, 'close_at': timezone.now(), 'mode': PollMode.SECRET}
CommentPoll.update_or_create_many(comment=self.comment, polls_raw=[poll_raw])
poll = CommentPoll.objects.all().order_by('pk').last()
self.assertEqual(poll.name, poll_raw['name'])
self.assertEqual(poll.title, poll_raw['title'])
self.assertEqual(poll.choice_min, poll_raw['choice_min'])
self.assertEqual(poll.choice_max, poll_raw['choice_max'])
self.assertEqual(poll.close_at, poll_raw['close_at'])
self.assertEqual(poll.mode, poll_raw['mode'])
# Update
CommentPoll.update_or_create_many(comment=self.comment, polls_raw=[{'name': poll.name, 'title': 'bar'}])
poll_updated = CommentPoll.objects.all().order_by('pk').last()
self.assertEqual(poll.pk, poll_updated.pk)
self.assertEqual(poll_updated.title, 'bar')
def test_poll_update_or_create_many_update_un_remove(self):
"""
Should mark the poll as not removed on update
"""
poll = CommentPoll.objects.create(comment=self.comment, name='foo_rm', is_removed=True)
CommentPoll.update_or_create_many(comment=poll.comment, polls_raw=[{'name': poll.name}])
poll_updated = CommentPoll.objects.all().order_by('pk').last()
self.assertEqual(poll.pk, poll_updated.pk)
self.assertFalse(poll_updated.is_removed)
def test_poll_choice_vote(self):
"""
Should return the user vote for a given choice
"""
choice = CommentPollChoice.objects.create(poll=self.poll, number=5, description="foobar")
vote = CommentPollVote.objects.create(choice=choice, voter=self.user)
choice.votes = list(CommentPollVote.objects.filter(choice=choice, voter=self.user))
self.assertEqual(choice.vote, vote)
choice.votes = []
self.assertIsNone(choice.vote)
del choice.votes
self.assertIsNone(choice.vote)
choice.votes = [vote, vote]
self.assertRaises(AssertionError, lambda: choice.vote)
def test_poll_choice_votes_percentage(self):
"""
Should return the percentage of votes for a choice
"""
poll = CommentPoll.objects.create(comment=self.comment, name='percentage')
choice = CommentPollChoice.objects.create(poll=poll, number=1, description="foobar", vote_count=1)
poll.total_votes = 2
self.assertEqual(choice.votes_percentage, 50)
poll.total_votes = 3
self.assertEqual('{:.2f}'.format(choice.votes_percentage), '33.33')
poll.total_votes = 0
self.assertEqual(choice.votes_percentage, 0)
def test_poll_choice_increase_vote_count(self):
"""
Should increase the vote count of all choices for a given user and poll
"""
poll = CommentPoll.objects.create(comment=self.comment, name='percentage')
choice = CommentPollChoice.objects.create(poll=poll, number=1, description="foobar")
choice2 = CommentPollChoice.objects.create(poll=poll, number=2, description="foobar")
CommentPollVote.objects.create(choice=choice, voter=self.user)
CommentPollVote.objects.create(choice=choice2, voter=self.user)
user2 = utils.create_user()
CommentPollVote.objects.create(choice=choice, voter=user2)
CommentPollChoice.increase_vote_count(poll, self.user)
self.assertEqual(CommentPollChoice.objects.get(pk=self.choice.pk).vote_count, 0)
self.assertEqual(CommentPollChoice.objects.get(pk=choice.pk).vote_count, 1)
self.assertEqual(CommentPollChoice.objects.get(pk=choice2.pk).vote_count, 1)
CommentPollChoice.objects.filter(pk=choice.pk).update(is_removed=True)
CommentPollChoice.increase_vote_count(poll, self.user)
self.assertEqual(CommentPollChoice.objects.get(pk=choice.pk).vote_count, 1)
self.assertEqual(CommentPollChoice.objects.get(pk=choice2.pk).vote_count, 2)
def test_poll_choice_decrease_vote_count(self):
"""
Should decrease the vote count of all choices for a given user and poll
"""
poll = CommentPoll.objects.create(comment=self.comment, name='percentage')
choice = CommentPollChoice.objects.create(poll=poll, number=1, description="foobar", vote_count=2)
choice2 = CommentPollChoice.objects.create(poll=poll, number=2, description="foobar", vote_count=2)
CommentPollVote.objects.create(choice=choice, voter=self.user)
CommentPollVote.objects.create(choice=choice2, voter=self.user)
user2 = utils.create_user()
CommentPollVote.objects.create(choice=choice, voter=user2)
CommentPollChoice.decrease_vote_count(poll, self.user)
self.assertEqual(CommentPollChoice.objects.get(pk=self.choice.pk).vote_count, 0)
self.assertEqual(CommentPollChoice.objects.get(pk=choice.pk).vote_count, 1)
self.assertEqual(CommentPollChoice.objects.get(pk=choice2.pk).vote_count, 1)
CommentPollChoice.objects.filter(pk=choice.pk).update(is_removed=True)
CommentPollChoice.decrease_vote_count(poll, self.user)
self.assertEqual(CommentPollChoice.objects.get(pk=choice.pk).vote_count, 1)
self.assertEqual(CommentPollChoice.objects.get(pk=choice2.pk).vote_count, 0)
def test_poll_choice_update_or_create_many(self):
"""
Should create or update many choices for a given poll
"""
choice_raw = {'poll_name': 'foo', 'number': 2, 'description': '2 bar'}
CommentPollChoice.update_or_create_many(comment=self.comment, choices_raw=[choice_raw])
choice = CommentPollChoice.objects.all().order_by('pk').last()
self.assertTrue(CommentPollChoice.objects.get(pk=self.choice.pk).is_removed)
self.assertEqual(choice.poll, self.poll)
self.assertEqual(choice.number, 2)
self.assertEqual(choice.description, '2 bar')
self.assertFalse(choice.is_removed)
# Update
choice_raw2 = {'poll_name': 'foo', 'number': 1, 'description': '1 bar'}
choice_raw['description'] = '2 foo'
CommentPollChoice.update_or_create_many(comment=self.comment, choices_raw=[choice_raw, choice_raw2])
choice_updated = CommentPollChoice.objects.all().order_by('pk').last()
self.assertFalse(CommentPollChoice.objects.get(pk=self.choice.pk).is_removed)
self.assertEqual(choice_updated.poll, self.poll)
self.assertEqual(choice_updated.number, 2)
self.assertEqual(choice_updated.description, '2 foo')
self.assertFalse(choice.is_removed)
def test_poll_choice_update_or_create_many_removed_poll(self):
"""
Should raise an Exception if poll is_removed
"""
CommentPoll.objects.filter(pk=self.poll.pk).update(is_removed=True)
choice_raw = {'poll_name': 'foo', 'number': 2, 'description': '2 bar'}
self.assertRaises(KeyError, CommentPollChoice.update_or_create_many,
comment=self.comment, choices_raw=[choice_raw])
class PollUtilsTest(TestCase):
def setUp(self):
utils.cache_clear()
self.user = utils.create_user()
self.category = utils.create_category()
self.topic = utils.create_topic(category=self.category, user=self.user)
self.comment = utils.create_comment(topic=self.topic, comment_html="<poll name=foo>")
self.poll = CommentPoll.objects.create(comment=self.comment, name='foo', title="my poll")
self.choice = CommentPollChoice.objects.create(poll=self.poll, number=1, description="choice 1")
self.choice = CommentPollChoice.objects.create(poll=self.poll, number=2, description="choice 2")
def test_post_render_static_polls(self):
"""
Should render the static polls
"""
comment_html = post_render_static_polls(self.comment)
self.assertTrue('my poll' in comment_html)
comment_parts = [
l.strip()
for l in strip_tags(comment_html).splitlines()
if l.strip()
]
self.assertEqual(comment_parts, [
'my poll',
'#1 choice 1',
'#2 choice 2',
'Name: foo, choice selection: from 1 up to 1, mode: default'
])
def test_post_render_static_polls_many(self):
"""
Should render the many static polls
"""
comment = utils.create_comment(topic=self.topic, comment_html="<poll name=foo>\n<poll name=bar>")
CommentPoll.objects.create(comment=comment, name='foo', title="my poll")
CommentPoll.objects.create(comment=comment, name='bar', title="my other poll")
comment_html = post_render_static_polls(comment)
self.assertTrue('my poll' in comment_html)
self.assertTrue('my other poll' in comment_html)
def test_post_render_static_polls_close_at(self):
"""
Should render the static polls with close_at
"""
now = timezone.now()
comment = utils.create_comment(topic=self.topic, comment_html="<poll name=foo>")
CommentPoll.objects.create(comment=comment, name='foo', title="my poll", close_at=now)
comment_html = post_render_static_polls(comment)
self.assertTrue('close at:' in comment_html)
self.assertTrue('Name:' in comment_html)
self.assertTrue('choice selection:' in comment_html)
self.assertTrue('mode:' in comment_html)
def test_post_render_static_polls_no_poll(self):
"""
Should render the comment with no poll
"""
comment = utils.create_comment(topic=self.topic, comment_html="foo")
comment_html = post_render_static_polls(comment)
self.assertEqual(comment_html, 'foo')
def test_post_render_static_polls_removed_poll(self):
"""
Should not render removed polls
"""
self.poll.is_removed = True
self.poll.save()
comment_html = post_render_static_polls(self.comment)
self.assertEqual(comment_html, "<poll name=foo>")
|
|
# Generated by Haxe 3.3.0
import math as python_lib_Math
import math as Math
import requests as Requests
import functools as python_lib_Functools
import inspect as python_lib_Inspect
import json as python_lib_Json
import random as python_lib_Random
class _hx_AnonObject:
def __init__(self, fields):
self.__dict__ = fields
class Enum:
_hx_class_name = "Enum"
__slots__ = ("tag", "index", "params")
_hx_fields = ["tag", "index", "params"]
_hx_methods = ["__str__"]
def __init__(self,tag,index,params):
self.tag = tag
self.index = index
self.params = params
def __str__(self):
if (self.params is None):
return self.tag
else:
return (((HxOverrides.stringOrNull(self.tag) + "(") + HxOverrides.stringOrNull(",".join([python_Boot.toString1(x1,'') for x1 in self.params]))) + ")")
class KwCall:
_hx_class_name = "KwCall"
__slots__ = ()
class IterableAdaptor:
_hx_class_name = "IterableAdaptor"
__slots__ = ()
_hx_statics = ["iterator"]
@staticmethod
def iterator(it):
_this_x = it
return python_HaxeIterator(_this_x.__iter__())
class IteratorAdaptor:
_hx_class_name = "IteratorAdaptor"
__slots__ = ()
_hx_statics = ["iterator"]
@staticmethod
def iterator(it):
return python_HaxeIterator(it)
class DynamicIterationAdaptor:
_hx_class_name = "DynamicIterationAdaptor"
__slots__ = ()
_hx_statics = ["iterator"]
@staticmethod
def iterator(it):
_this_x = it
return python_HaxeIterator(_this_x.__iter__())
class Reflect:
_hx_class_name = "Reflect"
__slots__ = ()
_hx_statics = ["field"]
@staticmethod
def field(o,field):
return python_Boot.field(o,field)
class Script:
_hx_class_name = "Script"
__slots__ = ()
_hx_statics = ["main", "create_parameters"]
@staticmethod
def main():
_g = haxe_ds_StringMap()
_g.h["lat"] = 37.78
_g.h["lon"] = -122.41
parameters = _g
kwArgs = dict()
kwArgs["params"] = Script.create_parameters(parameters)
this1 = kwArgs
response = Requests.get("http://api.open-notify.org/iss-pass.json",**this1)
response_string = "{\"response\": [{\"duration\": 369, \"risetime\": 1441456672}, {\"duration\": 626, \"risetime\": 1441462284}, {\"duration\": 581, \"risetime\": 1441468104}, {\"duration\": 482, \"risetime\": 1441474000}, {\"duration\": 509, \"risetime\": 1441479853}], \"message\": \"success\", \"request\": {\"longitude\": -122.41, \"altitude\": 100, \"latitude\": 37.78, \"datetime\": 1441417753, \"passes\": 5}}"
data = python_lib_Json.loads(response_string)
first_pass_duration = data['response'][0]['duration']
print(str(first_pass_duration))
@staticmethod
def create_parameters(input_parameters):
output_parameters = ""
tmp = input_parameters.keys()
while tmp.hasNext():
key = tmp.next()
output_parameters = (("null" if output_parameters is None else output_parameters) + HxOverrides.stringOrNull(((((("null" if key is None else key) + "=") + Std.string(input_parameters.h.get(key,None))) + "&"))))
output_parameters = HxString.substr(output_parameters,0,(len(output_parameters) - 1))
return output_parameters
class Std:
_hx_class_name = "Std"
__slots__ = ()
_hx_statics = ["string"]
@staticmethod
def string(s):
return python_Boot.toString1(s,"")
class haxe_IMap:
_hx_class_name = "haxe.IMap"
__slots__ = ()
class haxe_ds_StringMap:
_hx_class_name = "haxe.ds.StringMap"
__slots__ = ("h",)
_hx_fields = ["h"]
_hx_methods = ["keys"]
def __init__(self):
self.h = dict()
def keys(self):
return python_HaxeIterator(iter(self.h.keys()))
class python_Boot:
_hx_class_name = "python.Boot"
__slots__ = ()
_hx_statics = ["keywords", "toString1", "fields", "simpleField", "field", "getInstanceFields", "getSuperClass", "getClassFields", "prefixLength", "unhandleKeywords"]
@staticmethod
def toString1(o,s):
if (o is None):
return "null"
if isinstance(o,str):
return o
if (s is None):
s = ""
if (len(s) >= 5):
return "<...>"
if isinstance(o,bool):
if o:
return "true"
else:
return "false"
if isinstance(o,int):
return str(o)
if isinstance(o,float):
try:
if (o == int(o)):
return str(Math.floor((o + 0.5)))
else:
return str(o)
except Exception as _hx_e:
_hx_e1 = _hx_e
e = _hx_e1
return str(o)
if isinstance(o,list):
o1 = o
l = len(o1)
st = "["
s = (("null" if s is None else s) + "\t")
_g1 = 0
_g = l
while (_g1 < _g):
i = _g1
_g1 = (_g1 + 1)
prefix = ""
if (i > 0):
prefix = ","
st = (("null" if st is None else st) + HxOverrides.stringOrNull(((("null" if prefix is None else prefix) + HxOverrides.stringOrNull(python_Boot.toString1((o1[i] if i >= 0 and i < len(o1) else None),s))))))
st = (("null" if st is None else st) + "]")
return st
try:
if hasattr(o,"toString"):
return o.toString()
except Exception as _hx_e:
_hx_e1 = _hx_e
pass
if (python_lib_Inspect.isfunction(o) or python_lib_Inspect.ismethod(o)):
return "<function>"
if hasattr(o,"__class__"):
if isinstance(o,_hx_AnonObject):
toStr = None
try:
fields = python_Boot.fields(o)
_g2 = []
_g11 = 0
while (_g11 < len(fields)):
f = (fields[_g11] if _g11 >= 0 and _g11 < len(fields) else None)
_g11 = (_g11 + 1)
tmp = (("" + ("null" if f is None else f)) + " : ")
tmp1 = python_Boot.toString1(python_Boot.simpleField(o,f),(("null" if s is None else s) + "\t"))
_g2.append((("null" if tmp is None else tmp) + ("null" if tmp1 is None else tmp1)))
fieldsStr = _g2
toStr = (("{ " + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr]))) + " }")
except Exception as _hx_e:
_hx_e1 = _hx_e
e2 = _hx_e1
return "{ ... }"
if (toStr is None):
return "{ ... }"
else:
return toStr
if isinstance(o,Enum):
o2 = o
l1 = len(o2.params)
hasParams = (l1 > 0)
if hasParams:
paramsStr = ""
_g12 = 0
_g3 = l1
while (_g12 < _g3):
i1 = _g12
_g12 = (_g12 + 1)
prefix1 = ""
if (i1 > 0):
prefix1 = ","
paramsStr = (("null" if paramsStr is None else paramsStr) + HxOverrides.stringOrNull(((("null" if prefix1 is None else prefix1) + HxOverrides.stringOrNull(python_Boot.toString1((o2.params[i1] if i1 >= 0 and i1 < len(o2.params) else None),s))))))
return (((HxOverrides.stringOrNull(o2.tag) + "(") + ("null" if paramsStr is None else paramsStr)) + ")")
else:
return o2.tag
if hasattr(o,"_hx_class_name"):
if (o.__class__.__name__ != "type"):
fields1 = python_Boot.getInstanceFields(o)
_g4 = []
_g13 = 0
while (_g13 < len(fields1)):
f1 = (fields1[_g13] if _g13 >= 0 and _g13 < len(fields1) else None)
_g13 = (_g13 + 1)
tmp2 = (("" + ("null" if f1 is None else f1)) + " : ")
tmp3 = python_Boot.toString1(python_Boot.simpleField(o,f1),(("null" if s is None else s) + "\t"))
_g4.append((("null" if tmp2 is None else tmp2) + ("null" if tmp3 is None else tmp3)))
fieldsStr1 = _g4
toStr1 = (((HxOverrides.stringOrNull(o._hx_class_name) + "( ") + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr1]))) + " )")
return toStr1
else:
fields2 = python_Boot.getClassFields(o)
_g5 = []
_g14 = 0
while (_g14 < len(fields2)):
f2 = (fields2[_g14] if _g14 >= 0 and _g14 < len(fields2) else None)
_g14 = (_g14 + 1)
tmp4 = (("" + ("null" if f2 is None else f2)) + " : ")
tmp5 = python_Boot.toString1(python_Boot.simpleField(o,f2),(("null" if s is None else s) + "\t"))
_g5.append((("null" if tmp4 is None else tmp4) + ("null" if tmp5 is None else tmp5)))
fieldsStr2 = _g5
toStr2 = (((("#" + HxOverrides.stringOrNull(o._hx_class_name)) + "( ") + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr2]))) + " )")
return toStr2
if (o == str):
return "#String"
if (o == list):
return "#Array"
if callable(o):
return "function"
try:
if hasattr(o,"__repr__"):
return o.__repr__()
except Exception as _hx_e:
_hx_e1 = _hx_e
pass
if hasattr(o,"__str__"):
return o.__str__([])
if hasattr(o,"__name__"):
return o.__name__
return "???"
else:
return str(o)
@staticmethod
def fields(o):
a = []
if (o is not None):
if hasattr(o,"_hx_fields"):
fields = o._hx_fields
return list(fields)
if isinstance(o,_hx_AnonObject):
d = o.__dict__
keys = d.keys()
handler = python_Boot.unhandleKeywords
for k in keys:
a.append(handler(k))
elif hasattr(o,"__dict__"):
d1 = o.__dict__
keys1 = d1.keys()
for k in keys1:
a.append(k)
return a
@staticmethod
def simpleField(o,field):
if (field is None):
return None
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
@staticmethod
def field(o,field):
if (field is None):
return None
_hx_local_0 = len(field)
if (_hx_local_0 == 10):
if (field == "charCodeAt"):
if isinstance(o,str):
s1 = o
def _hx_local_1(a11):
return HxString.charCodeAt(s1,a11)
return _hx_local_1
elif (_hx_local_0 == 11):
if (field == "lastIndexOf"):
if isinstance(o,str):
s3 = o
def _hx_local_2(a15):
return HxString.lastIndexOf(s3,a15)
return _hx_local_2
elif isinstance(o,list):
a4 = o
def _hx_local_3(x4):
return python_internal_ArrayImpl.lastIndexOf(a4,x4)
return _hx_local_3
elif (field == "toLowerCase"):
if isinstance(o,str):
s7 = o
def _hx_local_4():
return HxString.toLowerCase(s7)
return _hx_local_4
elif (field == "toUpperCase"):
if isinstance(o,str):
s9 = o
def _hx_local_5():
return HxString.toUpperCase(s9)
return _hx_local_5
elif (_hx_local_0 == 9):
if (field == "substring"):
if isinstance(o,str):
s6 = o
def _hx_local_6(a19):
return HxString.substring(s6,a19)
return _hx_local_6
elif (_hx_local_0 == 4):
if (field == "copy"):
if isinstance(o,list):
def _hx_local_7():
return list(o)
return _hx_local_7
elif (field == "join"):
if isinstance(o,list):
def _hx_local_8(sep):
return sep.join([python_Boot.toString1(x1,'') for x1 in o])
return _hx_local_8
elif (field == "push"):
if isinstance(o,list):
x7 = o
def _hx_local_9(e):
return python_internal_ArrayImpl.push(x7,e)
return _hx_local_9
elif (field == "sort"):
if isinstance(o,list):
x11 = o
def _hx_local_10(f2):
python_internal_ArrayImpl.sort(x11,f2)
return _hx_local_10
elif (_hx_local_0 == 5):
if (field == "shift"):
if isinstance(o,list):
x9 = o
def _hx_local_11():
return python_internal_ArrayImpl.shift(x9)
return _hx_local_11
elif (field == "slice"):
if isinstance(o,list):
x10 = o
def _hx_local_12(a16):
return python_internal_ArrayImpl.slice(x10,a16)
return _hx_local_12
elif (field == "split"):
if isinstance(o,str):
s4 = o
def _hx_local_13(d):
return HxString.split(s4,d)
return _hx_local_13
elif (_hx_local_0 == 7):
if (field == "indexOf"):
if isinstance(o,str):
s2 = o
def _hx_local_14(a13):
return HxString.indexOf(s2,a13)
return _hx_local_14
elif isinstance(o,list):
a = o
def _hx_local_15(x1):
return python_internal_ArrayImpl.indexOf(a,x1)
return _hx_local_15
elif (field == "reverse"):
if isinstance(o,list):
a5 = o
def _hx_local_16():
python_internal_ArrayImpl.reverse(a5)
return _hx_local_16
elif (field == "unshift"):
if isinstance(o,list):
x14 = o
def _hx_local_17(e2):
python_internal_ArrayImpl.unshift(x14,e2)
return _hx_local_17
elif (_hx_local_0 == 3):
if (field == "map"):
if isinstance(o,list):
x5 = o
def _hx_local_18(f1):
return python_internal_ArrayImpl.map(x5,f1)
return _hx_local_18
elif (field == "pop"):
if isinstance(o,list):
x6 = o
def _hx_local_19():
return python_internal_ArrayImpl.pop(x6)
return _hx_local_19
elif (_hx_local_0 == 8):
if (field == "iterator"):
if isinstance(o,list):
x3 = o
def _hx_local_20():
return python_internal_ArrayImpl.iterator(x3)
return _hx_local_20
elif (field == "toString"):
if isinstance(o,str):
s8 = o
def _hx_local_21():
return HxString.toString(s8)
return _hx_local_21
elif isinstance(o,list):
x13 = o
def _hx_local_22():
return python_internal_ArrayImpl.toString(x13)
return _hx_local_22
elif (_hx_local_0 == 6):
if (field == "charAt"):
if isinstance(o,str):
s = o
def _hx_local_23(a1):
return HxString.charAt(s,a1)
return _hx_local_23
elif (field == "concat"):
if isinstance(o,list):
a12 = o
def _hx_local_24(a2):
return python_internal_ArrayImpl.concat(a12,a2)
return _hx_local_24
elif (field == "filter"):
if isinstance(o,list):
x = o
def _hx_local_25(f):
return python_internal_ArrayImpl.filter(x,f)
return _hx_local_25
elif (field == "insert"):
if isinstance(o,list):
a3 = o
def _hx_local_26(a14,x2):
python_internal_ArrayImpl.insert(a3,a14,x2)
return _hx_local_26
elif (field == "length"):
if isinstance(o,str):
return len(o)
elif isinstance(o,list):
return len(o)
elif (field == "remove"):
if isinstance(o,list):
x8 = o
def _hx_local_27(e1):
return python_internal_ArrayImpl.remove(x8,e1)
return _hx_local_27
elif (field == "splice"):
if isinstance(o,list):
x12 = o
def _hx_local_28(a17,a21):
return python_internal_ArrayImpl.splice(x12,a17,a21)
return _hx_local_28
elif (field == "substr"):
if isinstance(o,str):
s5 = o
def _hx_local_29(a18):
return HxString.substr(s5,a18)
return _hx_local_29
else:
pass
field1 = (("_hx_" + field) if ((field in python_Boot.keywords)) else (("_hx_" + field) if (((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95)))) else field))
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
@staticmethod
def getInstanceFields(c):
f = (c._hx_fields if (hasattr(c,"_hx_fields")) else [])
if hasattr(c,"_hx_methods"):
f = (f + c._hx_methods)
sc = python_Boot.getSuperClass(c)
if (sc is None):
return f
else:
scArr = python_Boot.getInstanceFields(sc)
scMap = set(scArr)
_g = 0
while (_g < len(f)):
f1 = (f[_g] if _g >= 0 and _g < len(f) else None)
_g = (_g + 1)
if (not (f1 in scMap)):
scArr.append(f1)
return scArr
@staticmethod
def getSuperClass(c):
if (c is None):
return None
try:
if hasattr(c,"_hx_super"):
return c._hx_super
return None
except Exception as _hx_e:
_hx_e1 = _hx_e
pass
return None
@staticmethod
def getClassFields(c):
if hasattr(c,"_hx_statics"):
x = c._hx_statics
return list(x)
else:
return []
@staticmethod
def unhandleKeywords(name):
if (HxString.substr(name,0,python_Boot.prefixLength) == "_hx_"):
real = HxString.substr(name,python_Boot.prefixLength,None)
if (real in python_Boot.keywords):
return real
return name
class python_HaxeIterator:
_hx_class_name = "python.HaxeIterator"
__slots__ = ("it", "x", "has", "checked")
_hx_fields = ["it", "x", "has", "checked"]
_hx_methods = ["next", "hasNext"]
def __init__(self,it):
self.checked = False
self.has = False
self.x = None
self.it = it
def next(self):
if (not self.checked):
self.hasNext()
self.checked = False
return self.x
def hasNext(self):
if (not self.checked):
try:
self.x = self.it.__next__()
self.has = True
except Exception as _hx_e:
_hx_e1 = _hx_e
if isinstance(_hx_e1, StopIteration):
s = _hx_e1
self.has = False
self.x = None
else:
raise _hx_e
self.checked = True
return self.has
class python_internal_ArrayImpl:
_hx_class_name = "python.internal.ArrayImpl"
__slots__ = ()
_hx_statics = ["concat", "iterator", "indexOf", "lastIndexOf", "toString", "pop", "push", "unshift", "remove", "shift", "slice", "sort", "splice", "map", "filter", "insert", "reverse", "_get"]
@staticmethod
def concat(a1,a2):
return (a1 + a2)
@staticmethod
def iterator(x):
return python_HaxeIterator(x.__iter__())
@staticmethod
def indexOf(a,x,fromIndex = None):
_hx_len = len(a)
l = (0 if ((fromIndex is None)) else ((_hx_len + fromIndex) if ((fromIndex < 0)) else fromIndex))
if (l < 0):
l = 0
_g1 = l
while (_g1 < _hx_len):
i = _g1
_g1 = (_g1 + 1)
if (a[i] == x):
return i
return -1
@staticmethod
def lastIndexOf(a,x,fromIndex = None):
_hx_len = len(a)
l = (_hx_len if ((fromIndex is None)) else (((_hx_len + fromIndex) + 1) if ((fromIndex < 0)) else (fromIndex + 1)))
if (l > _hx_len):
l = _hx_len
while True:
l = (l - 1)
tmp = l
if (not ((tmp > -1))):
break
if (a[l] == x):
return l
return -1
@staticmethod
def toString(x):
return (("[" + HxOverrides.stringOrNull(",".join([python_Boot.toString1(x1,'') for x1 in x]))) + "]")
@staticmethod
def pop(x):
if (len(x) == 0):
return None
else:
return x.pop()
@staticmethod
def push(x,e):
x.append(e)
return len(x)
@staticmethod
def unshift(x,e):
x.insert(0, e)
@staticmethod
def remove(x,e):
try:
x.remove(e)
return True
except Exception as _hx_e:
_hx_e1 = _hx_e
e1 = _hx_e1
return False
@staticmethod
def shift(x):
if (len(x) == 0):
return None
return x.pop(0)
@staticmethod
def slice(x,pos,end = None):
return x[pos:end]
@staticmethod
def sort(x,f):
x.sort(key= python_lib_Functools.cmp_to_key(f))
@staticmethod
def splice(x,pos,_hx_len):
if (pos < 0):
pos = (len(x) + pos)
if (pos < 0):
pos = 0
res = x[pos:(pos + _hx_len)]
del x[pos:(pos + _hx_len)]
return res
@staticmethod
def map(x,f):
return list(map(f,x))
@staticmethod
def filter(x,f):
return list(filter(f,x))
@staticmethod
def insert(a,pos,x):
a.insert(pos, x)
@staticmethod
def reverse(a):
a.reverse()
@staticmethod
def _get(x,idx):
if ((idx > -1) and ((idx < len(x)))):
return x[idx]
else:
return None
class HxOverrides:
_hx_class_name = "HxOverrides"
__slots__ = ()
_hx_statics = ["eq", "stringOrNull"]
@staticmethod
def eq(a,b):
if (isinstance(a,list) or isinstance(b,list)):
return a is b
return (a == b)
@staticmethod
def stringOrNull(s):
if (s is None):
return "null"
else:
return s
class HxString:
_hx_class_name = "HxString"
__slots__ = ()
_hx_statics = ["split", "charCodeAt", "charAt", "lastIndexOf", "toUpperCase", "toLowerCase", "indexOf", "toString", "substring", "substr"]
@staticmethod
def split(s,d):
if (d == ""):
return list(s)
else:
return s.split(d)
@staticmethod
def charCodeAt(s,index):
if ((((s is None) or ((len(s) == 0))) or ((index < 0))) or ((index >= len(s)))):
return None
else:
return ord(s[index])
@staticmethod
def charAt(s,index):
if ((index < 0) or ((index >= len(s)))):
return ""
else:
return s[index]
@staticmethod
def lastIndexOf(s,_hx_str,startIndex = None):
if (startIndex is None):
return s.rfind(_hx_str, 0, len(s))
else:
i = s.rfind(_hx_str, 0, (startIndex + 1))
startLeft = (max(0,((startIndex + 1) - len(_hx_str))) if ((i == -1)) else (i + 1))
check = s.find(_hx_str, startLeft, len(s))
if ((check > i) and ((check <= startIndex))):
return check
else:
return i
@staticmethod
def toUpperCase(s):
return s.upper()
@staticmethod
def toLowerCase(s):
return s.lower()
@staticmethod
def indexOf(s,_hx_str,startIndex = None):
if (startIndex is None):
return s.find(_hx_str)
else:
return s.find(_hx_str, startIndex)
@staticmethod
def toString(s):
return s
@staticmethod
def substring(s,startIndex,endIndex = None):
if (startIndex < 0):
startIndex = 0
if (endIndex is None):
return s[startIndex:]
else:
if (endIndex < 0):
endIndex = 0
if (endIndex < startIndex):
return s[endIndex:startIndex]
else:
return s[startIndex:endIndex]
@staticmethod
def substr(s,startIndex,_hx_len = None):
if (_hx_len is None):
return s[startIndex:]
else:
if (_hx_len == 0):
return ""
return s[startIndex:(startIndex + _hx_len)]
Math.NEGATIVE_INFINITY = float("-inf")
Math.POSITIVE_INFINITY = float("inf")
Math.NaN = float("nan")
Math.PI = python_lib_Math.pi
python_Boot.keywords = set(["and", "del", "from", "not", "with", "as", "elif", "global", "or", "yield", "assert", "else", "if", "pass", "None", "break", "except", "import", "raise", "True", "class", "exec", "in", "return", "False", "continue", "finally", "is", "try", "def", "for", "lambda", "while"])
python_Boot.prefixLength = len("_hx_")
Script.main()
|
|
#!/usr/bin/env python2.7
# Copyright 2016 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Multiple inheritance for your dockerfiles.
Requires: python 2.7, docker-py, pyyaml (RUN: easy_install pip; pip install docker-py pyyaml)
"""
import json
import sys
import os
import textwrap
from collections import OrderedDict
from io import StringIO, BytesIO
import argparse
import pprint
import docker, docker.utils
import yaml
class DockerMaker(object):
def __init__(self, makefile, repository=None,
build_images=True,
print_dockerfiles=False,
no_cache=False,
tag=None,
pull=False):
self._sources = set()
self.makefile_path = makefile
self.img_defs = self.parse_yaml(self.makefile_path)
self.all_targets = self.img_defs.pop('_ALL_', None)
# Connect to docker daemon if necessary
if build_images:
connection = docker.utils.kwargs_from_env()
if 'tls' in connection:
connection['tls'].assert_hostname = False
self.client = docker.Client(**connection)
else:
self.client = None
if repository and repository[-1] not in '/:':
self.repo = repository + '/'
elif repository is None:
self.repo = ''
else:
self.repo = repository
self.tag = tag
self.build_images = build_images
self.print_dockerfiles = print_dockerfiles
self.pull = pull
self.no_cache = no_cache
def parse_yaml(self, filename):
fname = os.path.expanduser(filename)
print 'READING %s' % os.path.expanduser(fname)
if fname in self._sources: raise ValueError('Circular _SOURCE_')
self._sources.add(fname)
with open(fname, 'r') as yaml_file:
yamldefs = yaml.load(yaml_file)
sourcedefs = {}
for s in yamldefs.get('_SOURCES_', []):
sourcedefs.update(self.parse_yaml(s))
sourcedefs.update(yamldefs)
return sourcedefs
def build(self, image):
"""
Drives the build of the final image - get the list of steps and execute them.
:param image: name of the image from the yaml file to build
:return: final tagged image name
"""
print 'docker-make starting build for %s' % image
build_steps = self.generate_build_order(image)
for istep, step in enumerate(build_steps):
print ' **** DockerMake Step %d/%d: %s ***' % (istep + 1, len(build_steps), ','.join(step.images))
print ' * Build directory: %s' % step.build_dir
print ' * Target image name: %s' % step.tag
dockerfile = '\n'.join(step.dockerfile)
# build the image
if self.build_images:
self.build_step(step, dockerfile)
# Dump the dockerfile to a file
if self.print_dockerfiles:
if not os.path.exists('docker_makefiles'):
os.makedirs('docker_makefiles')
if '/' in step.tag:
filename = 'docker_makefiles/Dockerfile.%s' % image
else:
filename = 'docker_makefiles/Dockerfile.%s' % step.tag
with open(filename, 'w') as dfout:
print >> dfout, dockerfile
return step.tag
def build_step(self, step, dockerfile):
"""
Drives an individual build step. Build steps are separated by build_directory.
If a build has zero one or less build_directories, it will be built in a single
step.
"""
# set up the build context
build_args = dict(decode=True, tag=step.tag, pull=self.pull,
fileobj=None, path=None, dockerfile=None,
nocache=self.no_cache)
if step.build_dir is not None:
tempname = '_docker_make_tmp/'
tempdir = '%s/%s' % (step.build_dir, tempname)
temp_df = tempdir + 'Dockerfile'
if not os.path.isdir(tempdir):
os.makedirs(tempdir)
with open(temp_df, 'w') as df_out:
print >> df_out, dockerfile
build_args['path'] = os.path.abspath(step.build_dir)
build_args['dockerfile'] = tempname + 'Dockerfile'
else:
build_args['fileobj'] = StringIO(unicode(dockerfile))
# TODO: remove this workaround for docker/docker-py#1134 -- AMV 7/19/16
build_args['decode'] = False
# start the build
stream = self.client.build(**build_args)
# monitor the output
for item in stream:
# TODO: this is more workaround for docker/docker-py#1134
try:
item = json.loads(item)
except ValueError:
print item,
continue
#### end of workaround - this can be removed once resolved - AMV 7/19/16
if item.keys() == ['stream']:
print item['stream'].strip()
elif 'errorDetail' in item or 'error' in item:
raise BuildError(dockerfile, item, build_args)
else:
print item,
# remove the temporary dockerfile
if step.build_dir is not None:
os.unlink(temp_df)
os.rmdir(tempdir)
def generate_build_order(self, image):
"""
Separate the build into a series of one or more intermediate steps.
Each specified build directory gets its own step
"""
repo_name = self.repo + image
if self.tag:
if ':' in repo_name:
repo_name += '-' + self.tag
else:
repo_name += ':' + self.tag
dependencies = self.sort_dependencies(image)
base = self.get_external_base_image(image, dependencies)
build_steps = [BuildStep(base)]
step = build_steps[0]
for d in dependencies:
dep_definition = self.img_defs[d]
mydir = dep_definition.get('build_directory', None)
if mydir is not None:
mydir = os.path.expanduser(mydir) # expands `~` to home directory
if step.build_dir is not None:
# Create a new build step if there's already a build directory
step.tag = '%dbuild_%s' % (len(build_steps), image)
build_steps.append(BuildStep(step.tag))
step = build_steps[-1]
step.build_dir = mydir
step.images.append(d)
if 'build' in dep_definition:
step.dockerfile.append('\n#Commands for %s' % d)
step.dockerfile.append(dep_definition['build'])
else:
step.dockerfile.append('\n####end of requirements for %s\n' % d)
# Sets the last step's name to the final build target
step.tag = repo_name
for step in build_steps:
step.dockerfile.insert(0, '#Build directory: %s\n#tag: %s' %
(step.build_dir, step.tag))
return build_steps
def sort_dependencies(self, com, dependencies=None):
"""
Topologically sort the docker commands by their requirements
TODO: sort using a "maximum common tree"?
:param com: process this docker image's dependencies
:param dependencies: running cache of sorted dependencies (ordered dict)
:return type: OrderedDict
"""
if dependencies is None: dependencies = OrderedDict()
if com in dependencies: return
requires = self.img_defs[com].get('requires', [])
assert type(requires) == list, 'Requirements for %s are not a list' % com
for dep in requires:
self.sort_dependencies(dep, dependencies)
if com in dependencies:
raise ValueError('Circular dependency found', dependencies)
dependencies[com] = None
return dependencies
def get_external_base_image(self, image, dependencies):
"""
Makes sure that this image has exactly one external base image
"""
base = None
base_for = None
for d in dependencies:
this_base = self.img_defs[d].get('FROM', None)
if this_base is not None and base is not None and this_base != base:
error = ('Multiple external dependencies: image %s depends on:\n' % image +
' %s (FROM: %s), and\n' % (base_for, base) +
' %s (FROM: %s).' % (d, this_base))
raise ValueError(error)
if this_base is not None:
base = this_base
base_for = d
if base is None:
raise ValueError("No base image found in %s's dependencies" % image)
return base
class BuildError(Exception):
def __init__(self, dockerfile, item, build_args):
with open('dockerfile.fail', 'w') as dff:
print>> dff, dockerfile
with BytesIO() as stream:
print >> stream, '\n -------- Docker daemon output --------'
pprint.pprint(item, stream, indent=4)
print >> stream, ' -------- Arguments to client.build --------'
pprint.pprint(build_args, stream, indent=4)
print >> stream, 'This dockerfile was written to dockerfile.fail'
stream.seek(0)
super(BuildError, self).__init__(stream.read())
class BuildStep(object):
def __init__(self, baseimage):
self.dockerfile = ['FROM %s\n' % baseimage]
self.tag = None
self.build_dir = None
self.images = []
def main():
args = make_arg_parser().parse_args()
# Help and exit
if args.help_yaml:
print_yaml_help()
return
# Otherwise, parse the yaml file
maker = DockerMaker(args.makefile, repository=args.repository,
build_images=not (args.no_build or args.list),
print_dockerfiles=(args.print_dockerfiles or args.no_build),
pull=args.pull, no_cache=args.no_cache, tag=args.tag)
if args.list:
print 'TARGETS in `%s`' % args.makefile
for item in maker.img_defs.keys(): print ' *', item
return
# Assemble custom requirements target
if args.requires or args.name:
assert args.requires and args.name
assert args.name not in maker.img_defs
maker.img_defs[args.name] = {'requires': args.requires}
targets = [args.name]
elif args.all:
assert len(args.TARGETS) == 0, "Pass either a list of targets or `--all`, not both"
if maker.all_targets is not None:
targets = maker.all_targets
else:
targets = maker.img_defs.keys()
else:
targets = args.TARGETS
if not targets:
print 'No build targets specified!'
print 'Targets in `%s`:' % args.makefile
for item in maker.img_defs.keys(): print ' *', item
return
# Actually build the images! (or Dockerfiles)
built, warnings = [], []
for t in targets:
name = maker.build(t)
print ' docker-make built:', name
built.append(name)
if args.push_to_registry:
success, w = push(maker, name)
warnings.extend(w)
if not success: built[-1] += ' -- PUSH FAILED'
else: built[-1] += ' -- pushed to %s' % name.split('/')[0]
# Summarize the build process
print '\ndocker-make finished.'
print 'Built: '
for item in built: print ' *', item
if warnings:
print 'Warnings:'
for item in warnings: print ' *', item
def push(maker, name):
success = False
warnings = []
if '/' not in name or name.split('/')[0].find('.') < 0:
warn = 'WARNING: could not push %s - ' \
'repository name does not contain a registry URL' % name
warnings.append(warn)
print warn
else:
print ' Pushing %s to %s:' % (name, name.split('/')[0])
line = {'error': 'no push information received'}
_lastid = None
for line in maker.client.push(name, stream=True):
line = yaml.load(line)
if 'status' in line:
if line.get('id', None) == _lastid and line['status'] == 'Pushing':
print '\r', line['status'], line['id'], line.get('progress', ''),
sys.stdout.flush()
else:
print line['status'], line.get('id', '')
_lastid = line.get('id', None)
else:
print line
if 'error' in line:
warnings.append('WARNING: push failed for %s. Message: %s' % (name, line['error']))
else:
success = True
return success, warnings
def print_yaml_help():
print "A brief introduction to writing Dockerfile.yml files:\n"
print 'SYNTAX:'
print printable_code("""[image_name]:
build_directory: [relative path where the ADD and COPY commands will look for files]
requires:
- [other image name]
- [yet another image name]
FROM: [named_base_image]
build: |
RUN [something]
ADD [something else]
[Dockerfile commands go here]
[other image name]: ...
[yet another image name]: ...""")
print
print textwrap.fill("The idea is to write dockerfile commands for each specific "
'piece of functionality in the build field, and "inherit" all other'
' functionality from a list of other components that your image requires. '
'If you need to add files with the ADD and COPY commands, specify the root'
' directory for those files with build_directory. Your tree of '
'"requires" must have exactly one unique named base image '
'in the FROM field.')
print '\n\nAN EXAMPLE:'
print printable_code("""devbase:
FROM: phusion/baseimage
build: |
RUN apt-get -y update && apt-get -y install build-essential
airline_data:
requires:
- devbase
build_directory: sample_data/airline_data
build: |
ADD AirlinePassengers.csv
python_image:
requires:
- devbase
build: |
RUN apt-get -y update \
&& apt-get install -y python python-pip \
&& pip install pandas
data_science:
requires:
- python_image
- airline_data""")
def printable_code(c):
output = []
dedented = textwrap.dedent(c)
for line in dedented.split('\n'):
output.append(' >> ' + line)
return '\n'.join(output)
def make_arg_parser():
parser = argparse.ArgumentParser(description=
"NOTE: Docker environmental variables must be set.\n"
"For a docker-machine, run "
"`eval $(docker-machine env [machine-name])`")
bo = parser.add_argument_group('Choosing what to build')
bo.add_argument('TARGETS', nargs="*",
help='Docker images to build as specified in the YAML file')
bo.add_argument('-f', '--makefile',
default='DockerMake.yml',
help='YAML file containing build instructions')
bo.add_argument('-a', '--all', action='store_true',
help="Print or build all images (or those specified by _ALL_)")
bo.add_argument('-l', '--list', action='store_true',
help='List all available targets in the file, then exit.')
bo.add_argument('--requires', nargs="*",
help='Build a special image from these requirements. Requires --name')
bo.add_argument('--name', type=str,
help="Name for custom docker images (requires --requires)")
df = parser.add_argument_group('Dockerfiles')
df.add_argument('-p', '--print_dockerfiles', action='store_true',
help="Print out the generated dockerfiles named `Dockerfile.[image]`")
df.add_argument('-n', '--no_build', action='store_true',
help='Only print Dockerfiles, don\'t build them. Implies --print.')
ca = parser.add_argument_group('Image caching')
ca.add_argument('--pull', action='store_true',
help='Always try to pull updated FROM images')
ca.add_argument('--no-cache', action='store_true',
help="Rebuild every layer")
# TODO: add a way to invalidate a specific target
rt = parser.add_argument_group('Repositories and tags')
rt.add_argument('--repository', '-r', '-u',
help="Prepend this repository to all built images, e.g.\n"
"`docker-make hello-world -u quay.io/elvis` will tag the image "
"as `quay.io/elvis/hello-world`. You can add a ':' to the end to "
"image names into tags:\n `docker-make -u quay.io/elvis/repo: hello-world` "
"will create the image in the elvis repository: quay.io/elvis/repo:hello-world")
rt.add_argument('--tag', '-t', type=str,
help='Tag all built images with this tag. If image names are ALREADY tags (i.e.,'
' your repo name ends in a ":"), this will append the tag name with a dash. '
'For example: `docker-make hello-world -u elvis/repo: -t 1.0` will create '
'the image "elvis/repo:hello-world-1.0')
rt.add_argument('--push-to-registry', '-P', action='store_true',
help='Push all built images to the repository specified '
'(only if image repository contains a URL) -- to push to dockerhub.com, '
'use index.docker.io as the registry)')
hh = parser.add_argument_group('Help')
hh.add_argument('--help-yaml', action='store_true',
help="Print summary of YAML file format and exit.")
return parser
__license__ = """Copyright (c) 2016, Autodesk Research
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."""
if __name__ == '__main__': main()
|
|
"""Implementation of code management magic functions.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012 The IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Stdlib
import inspect
import io
import os
import re
import sys
import ast
from itertools import chain
from urllib.request import urlopen
from urllib.parse import urlencode
# Our own packages
from IPython.core.error import TryNext, StdinNotImplementedError, UsageError
from IPython.core.macro import Macro
from IPython.core.magic import Magics, magics_class, line_magic
from IPython.core.oinspect import find_file, find_source_lines
from IPython.testing.skipdoctest import skip_doctest
from IPython.utils.contexts import preserve_keys
from IPython.utils.path import get_py_filename
from warnings import warn
from logging import error
from IPython.utils.text import get_text_list
#-----------------------------------------------------------------------------
# Magic implementation classes
#-----------------------------------------------------------------------------
# Used for exception handling in magic_edit
class MacroToEdit(ValueError): pass
ipython_input_pat = re.compile(r"<ipython\-input\-(\d+)-[a-z\d]+>$")
# To match, e.g. 8-10 1:5 :10 3-
range_re = re.compile(r"""
(?P<start>\d+)?
((?P<sep>[\-:])
(?P<end>\d+)?)?
$""", re.VERBOSE)
def extract_code_ranges(ranges_str):
"""Turn a string of range for %%load into 2-tuples of (start, stop)
ready to use as a slice of the content split by lines.
Examples
--------
list(extract_input_ranges("5-10 2"))
[(4, 10), (1, 2)]
"""
for range_str in ranges_str.split():
rmatch = range_re.match(range_str)
if not rmatch:
continue
sep = rmatch.group("sep")
start = rmatch.group("start")
end = rmatch.group("end")
if sep == '-':
start = int(start) - 1 if start else None
end = int(end) if end else None
elif sep == ':':
start = int(start) - 1 if start else None
end = int(end) - 1 if end else None
else:
end = int(start)
start = int(start) - 1
yield (start, end)
def extract_symbols(code, symbols):
"""
Return a tuple (blocks, not_found)
where ``blocks`` is a list of code fragments
for each symbol parsed from code, and ``not_found`` are
symbols not found in the code.
For example::
In [1]: code = '''a = 10
...: def b(): return 42
...: class A: pass'''
In [2]: extract_symbols(code, 'A,b,z')
Out[2]: (['class A: pass\\n', 'def b(): return 42\\n'], ['z'])
"""
symbols = symbols.split(',')
# this will raise SyntaxError if code isn't valid Python
py_code = ast.parse(code)
marks = [(getattr(s, 'name', None), s.lineno) for s in py_code.body]
code = code.split('\n')
symbols_lines = {}
# we already know the start_lineno of each symbol (marks).
# To find each end_lineno, we traverse in reverse order until each
# non-blank line
end = len(code)
for name, start in reversed(marks):
while not code[end - 1].strip():
end -= 1
if name:
symbols_lines[name] = (start - 1, end)
end = start - 1
# Now symbols_lines is a map
# {'symbol_name': (start_lineno, end_lineno), ...}
# fill a list with chunks of codes for each requested symbol
blocks = []
not_found = []
for symbol in symbols:
if symbol in symbols_lines:
start, end = symbols_lines[symbol]
blocks.append('\n'.join(code[start:end]) + '\n')
else:
not_found.append(symbol)
return blocks, not_found
def strip_initial_indent(lines):
"""For %load, strip indent from lines until finding an unindented line.
https://github.com/ipython/ipython/issues/9775
"""
indent_re = re.compile(r'\s+')
it = iter(lines)
first_line = next(it)
indent_match = indent_re.match(first_line)
if indent_match:
# First line was indented
indent = indent_match.group()
yield first_line[len(indent):]
for line in it:
if line.startswith(indent):
yield line[len(indent):]
else:
# Less indented than the first line - stop dedenting
yield line
break
else:
yield first_line
# Pass the remaining lines through without dedenting
for line in it:
yield line
class InteractivelyDefined(Exception):
"""Exception for interactively defined variable in magic_edit"""
def __init__(self, index):
self.index = index
@magics_class
class CodeMagics(Magics):
"""Magics related to code management (loading, saving, editing, ...)."""
def __init__(self, *args, **kwargs):
self._knowntemps = set()
super(CodeMagics, self).__init__(*args, **kwargs)
@line_magic
def save(self, parameter_s=''):
"""Save a set of lines or a macro to a given filename.
Usage:\\
%save [options] filename n1-n2 n3-n4 ... n5 .. n6 ...
Options:
-r: use 'raw' input. By default, the 'processed' history is used,
so that magics are loaded in their transformed version to valid
Python. If this option is given, the raw input as typed as the
command line is used instead.
-f: force overwrite. If file exists, %save will prompt for overwrite
unless -f is given.
-a: append to the file instead of overwriting it.
This function uses the same syntax as %history for input ranges,
then saves the lines to the filename you specify.
It adds a '.py' extension to the file if you don't do so yourself, and
it asks for confirmation before overwriting existing files.
If `-r` option is used, the default extension is `.ipy`.
"""
opts,args = self.parse_options(parameter_s,'fra',mode='list')
if not args:
raise UsageError('Missing filename.')
raw = 'r' in opts
force = 'f' in opts
append = 'a' in opts
mode = 'a' if append else 'w'
ext = '.ipy' if raw else '.py'
fname, codefrom = args[0], " ".join(args[1:])
if not fname.endswith(('.py','.ipy')):
fname += ext
file_exists = os.path.isfile(fname)
if file_exists and not force and not append:
try:
overwrite = self.shell.ask_yes_no('File `%s` exists. Overwrite (y/[N])? ' % fname, default='n')
except StdinNotImplementedError:
print("File `%s` exists. Use `%%save -f %s` to force overwrite" % (fname, parameter_s))
return
if not overwrite :
print('Operation cancelled.')
return
try:
cmds = self.shell.find_user_code(codefrom,raw)
except (TypeError, ValueError) as e:
print(e.args[0])
return
with io.open(fname, mode, encoding="utf-8") as f:
if not file_exists or not append:
f.write("# coding: utf-8\n")
f.write(cmds)
# make sure we end on a newline
if not cmds.endswith('\n'):
f.write('\n')
print('The following commands were written to file `%s`:' % fname)
print(cmds)
@line_magic
def pastebin(self, parameter_s=''):
"""Upload code to dpaste's paste bin, returning the URL.
Usage:\\
%pastebin [-d "Custom description"] 1-7
The argument can be an input history range, a filename, or the name of a
string or macro.
Options:
-d: Pass a custom description for the gist. The default will say
"Pasted from IPython".
"""
opts, args = self.parse_options(parameter_s, 'd:')
try:
code = self.shell.find_user_code(args)
except (ValueError, TypeError) as e:
print(e.args[0])
return
post_data = urlencode({
"title": opts.get('d', "Pasted from IPython"),
"syntax": "python3",
"content": code
}).encode('utf-8')
response = urlopen("http://dpaste.com/api/v2/", post_data)
return response.headers.get('Location')
@line_magic
def loadpy(self, arg_s):
"""Alias of `%load`
`%loadpy` has gained some flexibility and dropped the requirement of a `.py`
extension. So it has been renamed simply into %load. You can look at
`%load`'s docstring for more info.
"""
self.load(arg_s)
@line_magic
def load(self, arg_s):
"""Load code into the current frontend.
Usage:\\
%load [options] source
where source can be a filename, URL, input history range, macro, or
element in the user namespace
Options:
-r <lines>: Specify lines or ranges of lines to load from the source.
Ranges could be specified as x-y (x..y) or in python-style x:y
(x..(y-1)). Both limits x and y can be left blank (meaning the
beginning and end of the file, respectively).
-s <symbols>: Specify function or classes to load from python source.
-y : Don't ask confirmation for loading source above 200 000 characters.
-n : Include the user's namespace when searching for source code.
This magic command can either take a local filename, a URL, an history
range (see %history) or a macro as argument, it will prompt for
confirmation before loading source with more than 200 000 characters, unless
-y flag is passed or if the frontend does not support raw_input::
%load myscript.py
%load 7-27
%load myMacro
%load http://www.example.com/myscript.py
%load -r 5-10 myscript.py
%load -r 10-20,30,40: foo.py
%load -s MyClass,wonder_function myscript.py
%load -n MyClass
%load -n my_module.wonder_function
"""
opts,args = self.parse_options(arg_s,'yns:r:')
if not args:
raise UsageError('Missing filename, URL, input history range, '
'macro, or element in the user namespace.')
search_ns = 'n' in opts
contents = self.shell.find_user_code(args, search_ns=search_ns)
if 's' in opts:
try:
blocks, not_found = extract_symbols(contents, opts['s'])
except SyntaxError:
# non python code
error("Unable to parse the input as valid Python code")
return
if len(not_found) == 1:
warn('The symbol `%s` was not found' % not_found[0])
elif len(not_found) > 1:
warn('The symbols %s were not found' % get_text_list(not_found,
wrap_item_with='`')
)
contents = '\n'.join(blocks)
if 'r' in opts:
ranges = opts['r'].replace(',', ' ')
lines = contents.split('\n')
slices = extract_code_ranges(ranges)
contents = [lines[slice(*slc)] for slc in slices]
contents = '\n'.join(strip_initial_indent(chain.from_iterable(contents)))
l = len(contents)
# 200 000 is ~ 2500 full 80 character lines
# so in average, more than 5000 lines
if l > 200000 and 'y' not in opts:
try:
ans = self.shell.ask_yes_no(("The text you're trying to load seems pretty big"\
" (%d characters). Continue (y/[N]) ?" % l), default='n' )
except StdinNotImplementedError:
#assume yes if raw input not implemented
ans = True
if ans is False :
print('Operation cancelled.')
return
contents = "# %load {}\n".format(arg_s) + contents
self.shell.set_next_input(contents, replace=True)
@staticmethod
def _find_edit_target(shell, args, opts, last_call):
"""Utility method used by magic_edit to find what to edit."""
def make_filename(arg):
"Make a filename from the given args"
try:
filename = get_py_filename(arg)
except IOError:
# If it ends with .py but doesn't already exist, assume we want
# a new file.
if arg.endswith('.py'):
filename = arg
else:
filename = None
return filename
# Set a few locals from the options for convenience:
opts_prev = 'p' in opts
opts_raw = 'r' in opts
# custom exceptions
class DataIsObject(Exception): pass
# Default line number value
lineno = opts.get('n',None)
if opts_prev:
args = '_%s' % last_call[0]
if args not in shell.user_ns:
args = last_call[1]
# by default this is done with temp files, except when the given
# arg is a filename
use_temp = True
data = ''
# First, see if the arguments should be a filename.
filename = make_filename(args)
if filename:
use_temp = False
elif args:
# Mode where user specifies ranges of lines, like in %macro.
data = shell.extract_input_lines(args, opts_raw)
if not data:
try:
# Load the parameter given as a variable. If not a string,
# process it as an object instead (below)
#print '*** args',args,'type',type(args) # dbg
data = eval(args, shell.user_ns)
if not isinstance(data, str):
raise DataIsObject
except (NameError,SyntaxError):
# given argument is not a variable, try as a filename
filename = make_filename(args)
if filename is None:
warn("Argument given (%s) can't be found as a variable "
"or as a filename." % args)
return (None, None, None)
use_temp = False
except DataIsObject:
# macros have a special edit function
if isinstance(data, Macro):
raise MacroToEdit(data)
# For objects, try to edit the file where they are defined
filename = find_file(data)
if filename:
if 'fakemodule' in filename.lower() and \
inspect.isclass(data):
# class created by %edit? Try to find source
# by looking for method definitions instead, the
# __module__ in those classes is FakeModule.
attrs = [getattr(data, aname) for aname in dir(data)]
for attr in attrs:
if not inspect.ismethod(attr):
continue
filename = find_file(attr)
if filename and \
'fakemodule' not in filename.lower():
# change the attribute to be the edit
# target instead
data = attr
break
m = ipython_input_pat.match(os.path.basename(filename))
if m:
raise InteractivelyDefined(int(m.groups()[0]))
datafile = 1
if filename is None:
filename = make_filename(args)
datafile = 1
if filename is not None:
# only warn about this if we get a real name
warn('Could not find file where `%s` is defined.\n'
'Opening a file named `%s`' % (args, filename))
# Now, make sure we can actually read the source (if it was
# in a temp file it's gone by now).
if datafile:
if lineno is None:
lineno = find_source_lines(data)
if lineno is None:
filename = make_filename(args)
if filename is None:
warn('The file where `%s` was defined '
'cannot be read or found.' % data)
return (None, None, None)
use_temp = False
if use_temp:
filename = shell.mktempfile(data)
print('IPython will make a temporary file named:',filename)
# use last_call to remember the state of the previous call, but don't
# let it be clobbered by successive '-p' calls.
try:
last_call[0] = shell.displayhook.prompt_count
if not opts_prev:
last_call[1] = args
except:
pass
return filename, lineno, use_temp
def _edit_macro(self,mname,macro):
"""open an editor with the macro data in a file"""
filename = self.shell.mktempfile(macro.value)
self.shell.hooks.editor(filename)
# and make a new macro object, to replace the old one
with open(filename) as mfile:
mvalue = mfile.read()
self.shell.user_ns[mname] = Macro(mvalue)
@skip_doctest
@line_magic
def edit(self, parameter_s='',last_call=['','']):
"""Bring up an editor and execute the resulting code.
Usage:
%edit [options] [args]
%edit runs IPython's editor hook. The default version of this hook is
set to call the editor specified by your $EDITOR environment variable.
If this isn't found, it will default to vi under Linux/Unix and to
notepad under Windows. See the end of this docstring for how to change
the editor hook.
You can also set the value of this editor via the
``TerminalInteractiveShell.editor`` option in your configuration file.
This is useful if you wish to use a different editor from your typical
default with IPython (and for Windows users who typically don't set
environment variables).
This command allows you to conveniently edit multi-line code right in
your IPython session.
If called without arguments, %edit opens up an empty editor with a
temporary file and will execute the contents of this file when you
close it (don't forget to save it!).
Options:
-n <number>: open the editor at a specified line number. By default,
the IPython editor hook uses the unix syntax 'editor +N filename', but
you can configure this by providing your own modified hook if your
favorite editor supports line-number specifications with a different
syntax.
-p: this will call the editor with the same data as the previous time
it was used, regardless of how long ago (in your current session) it
was.
-r: use 'raw' input. This option only applies to input taken from the
user's history. By default, the 'processed' history is used, so that
magics are loaded in their transformed version to valid Python. If
this option is given, the raw input as typed as the command line is
used instead. When you exit the editor, it will be executed by
IPython's own processor.
-x: do not execute the edited code immediately upon exit. This is
mainly useful if you are editing programs which need to be called with
command line arguments, which you can then do using %run.
Arguments:
If arguments are given, the following possibilities exist:
- If the argument is a filename, IPython will load that into the
editor. It will execute its contents with execfile() when you exit,
loading any code in the file into your interactive namespace.
- The arguments are ranges of input history, e.g. "7 ~1/4-6".
The syntax is the same as in the %history magic.
- If the argument is a string variable, its contents are loaded
into the editor. You can thus edit any string which contains
python code (including the result of previous edits).
- If the argument is the name of an object (other than a string),
IPython will try to locate the file where it was defined and open the
editor at the point where it is defined. You can use `%edit function`
to load an editor exactly at the point where 'function' is defined,
edit it and have the file be executed automatically.
- If the object is a macro (see %macro for details), this opens up your
specified editor with a temporary file containing the macro's data.
Upon exit, the macro is reloaded with the contents of the file.
Note: opening at an exact line is only supported under Unix, and some
editors (like kedit and gedit up to Gnome 2.8) do not understand the
'+NUMBER' parameter necessary for this feature. Good editors like
(X)Emacs, vi, jed, pico and joe all do.
After executing your code, %edit will return as output the code you
typed in the editor (except when it was an existing file). This way
you can reload the code in further invocations of %edit as a variable,
via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
the output.
Note that %edit is also available through the alias %ed.
This is an example of creating a simple function inside the editor and
then modifying it. First, start up the editor::
In [1]: edit
Editing... done. Executing edited code...
Out[1]: 'def foo():\\n print "foo() was defined in an editing
session"\\n'
We can then call the function foo()::
In [2]: foo()
foo() was defined in an editing session
Now we edit foo. IPython automatically loads the editor with the
(temporary) file where foo() was previously defined::
In [3]: edit foo
Editing... done. Executing edited code...
And if we call foo() again we get the modified version::
In [4]: foo()
foo() has now been changed!
Here is an example of how to edit a code snippet successive
times. First we call the editor::
In [5]: edit
Editing... done. Executing edited code...
hello
Out[5]: "print 'hello'\\n"
Now we call it again with the previous output (stored in _)::
In [6]: edit _
Editing... done. Executing edited code...
hello world
Out[6]: "print 'hello world'\\n"
Now we call it with the output #8 (stored in _8, also as Out[8])::
In [7]: edit _8
Editing... done. Executing edited code...
hello again
Out[7]: "print 'hello again'\\n"
Changing the default editor hook:
If you wish to write your own editor hook, you can put it in a
configuration file which you load at startup time. The default hook
is defined in the IPython.core.hooks module, and you can use that as a
starting example for further modifications. That file also has
general instructions on how to set a new hook for use once you've
defined it."""
opts,args = self.parse_options(parameter_s,'prxn:')
try:
filename, lineno, is_temp = self._find_edit_target(self.shell,
args, opts, last_call)
except MacroToEdit as e:
self._edit_macro(args, e.args[0])
return
except InteractivelyDefined as e:
print("Editing In[%i]" % e.index)
args = str(e.index)
filename, lineno, is_temp = self._find_edit_target(self.shell,
args, opts, last_call)
if filename is None:
# nothing was found, warnings have already been issued,
# just give up.
return
if is_temp:
self._knowntemps.add(filename)
elif (filename in self._knowntemps):
is_temp = True
# do actual editing here
print('Editing...', end=' ')
sys.stdout.flush()
try:
# Quote filenames that may have spaces in them
if ' ' in filename:
filename = "'%s'" % filename
self.shell.hooks.editor(filename,lineno)
except TryNext:
warn('Could not open editor')
return
# XXX TODO: should this be generalized for all string vars?
# For now, this is special-cased to blocks created by cpaste
if args.strip() == 'pasted_block':
with open(filename, 'r') as f:
self.shell.user_ns['pasted_block'] = f.read()
if 'x' in opts: # -x prevents actual execution
print()
else:
print('done. Executing edited code...')
with preserve_keys(self.shell.user_ns, '__file__'):
if not is_temp:
self.shell.user_ns['__file__'] = filename
if 'r' in opts: # Untranslated IPython code
with open(filename, 'r') as f:
source = f.read()
self.shell.run_cell(source, store_history=False)
else:
self.shell.safe_execfile(filename, self.shell.user_ns,
self.shell.user_ns)
if is_temp:
try:
with open(filename) as f:
return f.read()
except IOError as msg:
if msg.filename == filename:
warn('File not found. Did you forget to save?')
return
else:
self.shell.showtraceback()
|
|
"""The tests for the MQTT sensor platform."""
from datetime import datetime, timedelta
import json
from unittest.mock import ANY, patch
from homeassistant.components import mqtt
from homeassistant.components.mqtt.discovery import async_start
import homeassistant.components.sensor as sensor
from homeassistant.const import EVENT_STATE_CHANGED, STATE_UNAVAILABLE
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
async_fire_mqtt_message,
async_fire_time_changed,
async_mock_mqtt_component,
mock_registry,
)
async def test_setting_sensor_value_via_mqtt_message(hass, mqtt_mock):
"""Test the setting of the value via MQTT."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"unit_of_measurement": "fav unit",
}
},
)
async_fire_mqtt_message(hass, "test-topic", "100")
state = hass.states.get("sensor.test")
assert state.state == "100"
assert state.attributes.get("unit_of_measurement") == "fav unit"
async def test_setting_sensor_value_expires(hass, mqtt_mock, caplog):
"""Test the expiration of the value."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"unit_of_measurement": "fav unit",
"expire_after": "4",
"force_update": True,
}
},
)
state = hass.states.get("sensor.test")
assert state.state == "unknown"
now = datetime(2017, 1, 1, 1, tzinfo=dt_util.UTC)
with patch(("homeassistant.helpers.event." "dt_util.utcnow"), return_value=now):
async_fire_time_changed(hass, now)
async_fire_mqtt_message(hass, "test-topic", "100")
await hass.async_block_till_done()
# Value was set correctly.
state = hass.states.get("sensor.test")
assert state.state == "100"
# Time jump +3s
now = now + timedelta(seconds=3)
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Value is not yet expired
state = hass.states.get("sensor.test")
assert state.state == "100"
# Next message resets timer
with patch(("homeassistant.helpers.event." "dt_util.utcnow"), return_value=now):
async_fire_time_changed(hass, now)
async_fire_mqtt_message(hass, "test-topic", "101")
await hass.async_block_till_done()
# Value was updated correctly.
state = hass.states.get("sensor.test")
assert state.state == "101"
# Time jump +3s
now = now + timedelta(seconds=3)
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Value is not yet expired
state = hass.states.get("sensor.test")
assert state.state == "101"
# Time jump +2s
now = now + timedelta(seconds=2)
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Value is expired now
state = hass.states.get("sensor.test")
assert state.state == "unknown"
async def test_setting_sensor_value_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of the value via MQTT with JSON payload."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"unit_of_measurement": "fav unit",
"value_template": "{{ value_json.val }}",
}
},
)
async_fire_mqtt_message(hass, "test-topic", '{ "val": "100" }')
state = hass.states.get("sensor.test")
assert state.state == "100"
async def test_force_update_disabled(hass, mqtt_mock):
"""Test force update option."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"unit_of_measurement": "fav unit",
}
},
)
events = []
@ha.callback
def callback(event):
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
async_fire_mqtt_message(hass, "test-topic", "100")
await hass.async_block_till_done()
assert len(events) == 1
async_fire_mqtt_message(hass, "test-topic", "100")
await hass.async_block_till_done()
assert len(events) == 1
async def test_force_update_enabled(hass, mqtt_mock):
"""Test force update option."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"unit_of_measurement": "fav unit",
"force_update": True,
}
},
)
events = []
@ha.callback
def callback(event):
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
async_fire_mqtt_message(hass, "test-topic", "100")
await hass.async_block_till_done()
assert len(events) == 1
async_fire_mqtt_message(hass, "test-topic", "100")
await hass.async_block_till_done()
assert len(events) == 2
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"availability_topic": "availability-topic",
}
},
)
state = hass.states.get("sensor.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "online")
state = hass.states.get("sensor.test")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "offline")
state = hass.states.get("sensor.test")
assert state.state == STATE_UNAVAILABLE
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"availability_topic": "availability-topic",
"payload_available": "good",
"payload_not_available": "nogood",
}
},
)
state = hass.states.get("sensor.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "good")
state = hass.states.get("sensor.test")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "nogood")
state = hass.states.get("sensor.test")
assert state.state == STATE_UNAVAILABLE
async def test_setting_sensor_attribute_via_legacy_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"unit_of_measurement": "fav unit",
"json_attributes_topic": "test-attributes-topic",
}
},
)
async_fire_mqtt_message(hass, "test-attributes-topic", '{ "val": "100" }')
state = hass.states.get("sensor.test")
assert state.attributes.get("val") == "100"
async def test_update_with_legacy_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"unit_of_measurement": "fav unit",
"json_attributes_topic": "test-attributes-topic",
}
},
)
async_fire_mqtt_message(hass, "test-attributes-topic", '[ "list", "of", "things"]')
state = hass.states.get("sensor.test")
assert state.attributes.get("val") is None
assert "JSON result was not a dictionary" in caplog.text
async def test_update_with_legacy_json_attrs_bad_JSON(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"unit_of_measurement": "fav unit",
"json_attributes_topic": "test-attributes-topic",
}
},
)
async_fire_mqtt_message(hass, "test-attributes-topic", "This is not JSON")
state = hass.states.get("sensor.test")
assert state.attributes.get("val") is None
assert "Erroneous JSON: This is not JSON" in caplog.text
async def test_update_with_legacy_json_attrs_and_template(hass, mqtt_mock):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"unit_of_measurement": "fav unit",
"value_template": "{{ value_json.val }}",
"json_attributes": "val",
}
},
)
async_fire_mqtt_message(hass, "test-topic", '{ "val": "100" }')
state = hass.states.get("sensor.test")
assert state.attributes.get("val") == "100"
assert state.state == "100"
async def test_invalid_device_class(hass, mqtt_mock):
"""Test device_class option with invalid value."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"device_class": "foobarnotreal",
}
},
)
state = hass.states.get("sensor.test")
assert state is None
async def test_valid_device_class(hass, mqtt_mock):
"""Test device_class option with valid values."""
assert await async_setup_component(
hass,
"sensor",
{
"sensor": [
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"device_class": "temperature",
},
{"platform": "mqtt", "name": "Test 2", "state_topic": "test-topic"},
]
},
)
await hass.async_block_till_done()
state = hass.states.get("sensor.test_1")
assert state.attributes["device_class"] == "temperature"
state = hass.states.get("sensor.test_2")
assert "device_class" not in state.attributes
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"json_attributes_topic": "attr-topic",
}
},
)
async_fire_mqtt_message(hass, "attr-topic", '{ "val": "100" }')
state = hass.states.get("sensor.test")
assert state.attributes.get("val") == "100"
async def test_setting_attribute_with_template(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"json_attributes_topic": "attr-topic",
"json_attributes_template": "{{ value_json['Timer1'] | tojson }}",
}
},
)
async_fire_mqtt_message(
hass, "attr-topic", json.dumps({"Timer1": {"Arm": 0, "Time": "22:18"}})
)
state = hass.states.get("sensor.test")
assert state.attributes.get("Arm") == 0
assert state.attributes.get("Time") == "22:18"
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"json_attributes_topic": "attr-topic",
}
},
)
async_fire_mqtt_message(hass, "attr-topic", '[ "list", "of", "things"]')
state = hass.states.get("sensor.test")
assert state.attributes.get("val") is None
assert "JSON result was not a dictionary" in caplog.text
async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"json_attributes_topic": "attr-topic",
}
},
)
async_fire_mqtt_message(hass, "attr-topic", "This is not JSON")
state = hass.states.get("sensor.test")
assert state.attributes.get("val") is None
assert "Erroneous JSON: This is not JSON" in caplog.text
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data1 = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "json_attributes_topic": "attr-topic1" }'
)
data2 = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "json_attributes_topic": "attr-topic2" }'
)
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data1)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "attr-topic1", '{ "val": "100" }')
state = hass.states.get("sensor.beer")
assert state.attributes.get("val") == "100"
# Change json_attributes_topic
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data2)
await hass.async_block_till_done()
# Verify we are no longer subscribing to the old topic
async_fire_mqtt_message(hass, "attr-topic1", '{ "val": "50" }')
state = hass.states.get("sensor.beer")
assert state.attributes.get("val") == "100"
# Verify we are subscribing to the new topic
async_fire_mqtt_message(hass, "attr-topic2", '{ "val": "75" }')
state = hass.states.get("sensor.beer")
assert state.attributes.get("val") == "75"
async def test_unique_id(hass):
"""Test unique id option only creates one sensor per unique_id."""
await async_mock_mqtt_component(hass)
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: [
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"unique_id": "TOTALLY_UNIQUE",
},
{
"platform": "mqtt",
"name": "Test 2",
"state_topic": "test-topic",
"unique_id": "TOTALLY_UNIQUE",
},
]
},
)
async_fire_mqtt_message(hass, "test-topic", "payload")
assert len(hass.states.async_all()) == 1
async def test_discovery_removal_sensor(hass, mqtt_mock, caplog):
"""Test removal of discovered sensor."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data = '{ "name": "Beer",' ' "state_topic": "test_topic" }'
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data)
await hass.async_block_till_done()
state = hass.states.get("sensor.beer")
assert state is not None
assert state.name == "Beer"
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", "")
await hass.async_block_till_done()
state = hass.states.get("sensor.beer")
assert state is None
async def test_discovery_update_sensor(hass, mqtt_mock, caplog):
"""Test update of discovered sensor."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data1 = '{ "name": "Beer",' ' "state_topic": "test_topic" }'
data2 = '{ "name": "Milk",' ' "state_topic": "test_topic" }'
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data1)
await hass.async_block_till_done()
state = hass.states.get("sensor.beer")
assert state is not None
assert state.name == "Beer"
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data2)
await hass.async_block_till_done()
state = hass.states.get("sensor.beer")
assert state is not None
assert state.name == "Milk"
state = hass.states.get("sensor.milk")
assert state is None
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data1 = '{ "name": "Beer",' ' "state_topic": "test_topic#" }'
data2 = '{ "name": "Milk",' ' "state_topic": "test_topic" }'
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data1)
await hass.async_block_till_done()
state = hass.states.get("sensor.beer")
assert state is None
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data2)
await hass.async_block_till_done()
state = hass.states.get("sensor.milk")
assert state is not None
assert state.name == "Milk"
state = hass.states.get("sensor.beer")
assert state is None
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT sensor device registry integration."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, "homeassistant", {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
data = json.dumps(
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"device": {
"identifiers": ["helloworld"],
"connections": [["mac", "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
"unique_id": "veryunique",
}
)
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.identifiers == {("mqtt", "helloworld")}
assert device.connections == {("mac", "02:5b:26:a8:dc:12")}
assert device.manufacturer == "Whatever"
assert device.name == "Beer"
assert device.model == "Glass"
assert device.sw_version == "0.1-beta"
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, "homeassistant", {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
config = {
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"device": {
"identifiers": ["helloworld"],
"connections": [["mac", "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
"unique_id": "veryunique",
}
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.name == "Beer"
config["device"]["name"] = "Milk"
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.name == "Milk"
async def test_entity_id_update(hass, mqtt_mock):
"""Test MQTT subscriptions are managed when entity_id is updated."""
registry = mock_registry(hass, {})
mock_mqtt = await async_mock_mqtt_component(hass)
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: [
{
"platform": "mqtt",
"name": "beer",
"state_topic": "test-topic",
"availability_topic": "avty-topic",
"unique_id": "TOTALLY_UNIQUE",
}
]
},
)
state = hass.states.get("sensor.beer")
assert state is not None
assert mock_mqtt.async_subscribe.call_count == 2
mock_mqtt.async_subscribe.assert_any_call("test-topic", ANY, 0, "utf-8")
mock_mqtt.async_subscribe.assert_any_call("avty-topic", ANY, 0, "utf-8")
mock_mqtt.async_subscribe.reset_mock()
registry.async_update_entity("sensor.beer", new_entity_id="sensor.milk")
await hass.async_block_till_done()
state = hass.states.get("sensor.beer")
assert state is None
state = hass.states.get("sensor.milk")
assert state is not None
assert mock_mqtt.async_subscribe.call_count == 2
mock_mqtt.async_subscribe.assert_any_call("test-topic", ANY, 0, "utf-8")
mock_mqtt.async_subscribe.assert_any_call("avty-topic", ANY, 0, "utf-8")
async def test_entity_device_info_with_hub(hass, mqtt_mock):
"""Test MQTT sensor device registry integration."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, "homeassistant", {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
hub = registry.async_get_or_create(
config_entry_id="123",
connections=set(),
identifiers={("mqtt", "hub-id")},
manufacturer="manufacturer",
model="hub",
)
data = json.dumps(
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"device": {"identifiers": ["helloworld"], "via_device": "hub-id"},
"unique_id": "veryunique",
}
)
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.via_device_id == hub.id
|
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common.i18n import _
from heat.engine import clients
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
class MonascaAlarmDefinition(resource.Resource):
"""Heat Template Resource for Monasca Alarm definition.
This plug-in requires python-monascaclient>=1.0.22. So to enable this
plug-in, install this client library and restart the heat-engine.
"""
support_status = support.SupportStatus(
version='5.0.0',
status=support.UNSUPPORTED)
default_client_name = 'monasca'
entity = 'alarm_definitions'
SEVERITY_LEVELS = (
LOW, MEDIUM, HIGH, CRITICAL
) = (
'low', 'medium', 'high', 'critical'
)
PROPERTIES = (
NAME, DESCRIPTION, EXPRESSION, MATCH_BY, SEVERITY,
OK_ACTIONS, ALARM_ACTIONS, UNDETERMINED_ACTIONS,
ACTIONS_ENABLED
) = (
'name', 'description', 'expression', 'match_by', 'severity',
'ok_actions', 'alarm_actions', 'undetermined_actions',
'actions_enabled'
)
properties_schema = {
NAME: properties.Schema(
properties.Schema.STRING,
_('Name of the alarm. By default, physical resource name is '
'used.'),
update_allowed=True
),
DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description of the alarm.'),
update_allowed=True
),
EXPRESSION: properties.Schema(
properties.Schema.STRING,
_('Expression of the alarm to evaluate.'),
update_allowed=False,
required=True
),
MATCH_BY: properties.Schema(
properties.Schema.LIST,
_('The metric dimensions to match to the alarm dimensions. '
'One or more dimension key names separated by a comma.')
),
SEVERITY: properties.Schema(
properties.Schema.STRING,
_('Severity of the alarm.'),
update_allowed=True,
constraints=[constraints.AllowedValues(
SEVERITY_LEVELS
)],
default=LOW
),
OK_ACTIONS: properties.Schema(
properties.Schema.LIST,
_('The notification methods to use when an alarm state is OK.'),
update_allowed=True,
schema=properties.Schema(
properties.Schema.STRING,
_('Monasca notification'),
constraints=[constraints.CustomConstraint(
'monasca.notification')
]
)
),
ALARM_ACTIONS: properties.Schema(
properties.Schema.LIST,
_('The notification methods to use when an alarm state is ALARM.'),
update_allowed=True,
schema=properties.Schema(
properties.Schema.STRING,
_('Monasca notification'),
constraints=[constraints.CustomConstraint(
'monasca.notification')
]
)
),
UNDETERMINED_ACTIONS: properties.Schema(
properties.Schema.LIST,
_('The notification methods to use when an alarm state is '
'UNDETERMINED.'),
update_allowed=True,
schema=properties.Schema(
properties.Schema.STRING,
_('Monasca notification'),
constraints=[constraints.CustomConstraint(
'monasca.notification')
]
)
),
ACTIONS_ENABLED: properties.Schema(
properties.Schema.BOOLEAN,
_('Whether to enable the actions or not.'),
update_allowed=True,
default=True,
),
}
def handle_create(self):
args = dict(
name=(self.properties[self.NAME] or
self.physical_resource_name()),
description=self.properties[self.DESCRIPTION],
expression=self.properties[self.EXPRESSION],
match_by=self.properties[self.MATCH_BY],
severity=self.properties[self.SEVERITY],
ok_actions=self.properties[self.OK_ACTIONS],
alarm_actions=self.properties[self.ALARM_ACTIONS],
undetermined_actions=self.properties[
self.UNDETERMINED_ACTIONS]
)
alarm = self.client().alarm_definitions.create(**args)
self.resource_id_set(alarm['id'])
# Monasca enables action by default
actions_enabled = self.properties[self.ACTIONS_ENABLED]
if not actions_enabled:
self.client().alarm_definitions.patch(
alarm_id=self.resource_id,
actions_enabled=actions_enabled
)
def handle_update(self, prop_diff, json_snippet=None, tmpl_diff=None):
args = dict(alarm_id=self.resource_id)
if prop_diff.get(self.NAME):
args['name'] = prop_diff.get(self.NAME)
if prop_diff.get(self.DESCRIPTION):
args['description'] = prop_diff.get(self.DESCRIPTION)
if prop_diff.get(self.SEVERITY):
args['severity'] = prop_diff.get(self.SEVERITY)
if prop_diff.get(self.OK_ACTIONS):
args['ok_actions'] = prop_diff.get(self.OK_ACTIONS)
if prop_diff.get(self.ALARM_ACTIONS):
args['alarm_actions'] = prop_diff.get(self.ALARM_ACTIONS)
if prop_diff.get(self.UNDETERMINED_ACTIONS):
args['undetermined_actions'] = prop_diff.get(
self.UNDETERMINED_ACTIONS
)
if prop_diff.get(self.ACTIONS_ENABLED):
args['actions_enabled'] = prop_diff.get(self.ACTIONS_ENABLED)
self.client().alarm_definitions.patch(**args)
def handle_delete(self):
if self.resource_id is not None:
try:
self.client().alarm_definitions.delete(
alarm_id=self.resource_id)
except Exception as ex:
self.client_plugin().ignore_not_found(ex)
# FIXME(kanagaraj-manickam) Remove this method once monasca defect 1484900
# is fixed.
def _show_resource(self):
return self.client().alarm_definitions.get(self.resource_id)
def resource_mapping():
return {
'OS::Monasca::AlarmDefinition': MonascaAlarmDefinition
}
def available_resource_mapping():
if not clients.has_client(MonascaAlarmDefinition.default_client_name):
return {}
return resource_mapping()
|
|
# This module is part of Hypatia and is released under the
# MIT License: http://opensource.org/licenses/MIT
"""Why stuff is drawn; logic flow for the game.
Game logic, game component interaction.
Glues various modules/game components together with behaviors defined
in methods belonging to Game().
Note:
I have not decided firmly on the approach to take. Expect heavy
changes in the future.
Sorry for the poor documentation, I have not devised an actual
architecture for this particular module. I have not decided
firmly on the approach to take. Here, I'm sort of imitating
Flask's app.
"""
import os
import sys
import xml.etree.ElementTree as ET
try:
import ConfigParser as configparser
except ImportError:
import configparser
import pygame
from hypatia import tiles
from hypatia import dialog
from hypatia import render
from hypatia import player
from hypatia import sprites
from hypatia import physics
from hypatia import resources
from hypatia import constants
from hypatia import controllers
class TMXException(Exception):
"""Base class for all exceptions related to TMX.
See Also:
:class:`TMX`
"""
pass
class TMXMissingPlayerStartPosition(TMXException):
"""TMX file parsed does not have a player start
position, which is required to create scenes.
See Also:
:class:`TMX`
"""
def __init__(self):
message = "TMX file missing player_start_position"
super(TMXMissingPlayerStartPosition, self).__init__(message)
class TMXTooManyTilesheets(TMXException):
"""A TMX file was attempted to be imported through
`TileMap.from_tmx()`, but the TMX defined more than
one tilesheet. This is a feature Hypatia does not
support.
See Also:
:meth:`TileMap.from_tmx()` and :class:`TMX`.
"""
def __init__(self):
"""The exception message is this class' docstring.
Note:
Mostly scaffolding, plus won't be here for long.
"""
message = TMXTooManyTilesheets.__docstring__
super(TMXTooManyTilesheets, self).__init__(message)
class TMXVersionUnsupported(TMXException):
"""Attempted to create a TileMap from a TMX map, but
the TMX map version is unsupported.
Attribs:
map_version (str): the version which was attempted
"""
def __init__(self, map_version):
"""
Args:
map_version (str): the map version which is
unsupported. This becomes the map_version
attribute.
"""
message = 'version %s unsupported' % map_version
super(TMXVersionUnsupported, self).__init__(message)
self.map_version = map_version
class TMXLayersNotCSV(TMXException):
"""The data encoding used for layers during Tilemap.from_tmx()
is not supported. Only CSV is supported.
Attribs:
data_encoding (str): the failed data encoding.
"""
def __init__(self, data_encoding):
"""
Args:
data_encoding (str): the failed data encoding
"""
message = 'tmx layer data encoding %s unsupported' % data_encoding
super(TMXLayersNotCSV, self).__init__(message)
self.data_encodign = data_encoding
# not in use
class Hypatia(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
class Game(object):
"""Simulates the interaction between game components."""
def __init__(self, screen=None, scene=None,
viewport_size=None, dialogbox=None):
self.screen = screen or render.Screen()
self.viewport = render.Viewport(viewport_size)
self.dialogbox = dialogbox or dialog.DialogBox(self.viewport.rect.size)
# everything has been added, run runtime_setup() on each
# relevant item
self.scene = scene
self.scene.runtime_setup()
self.start_loop()
# will be removed
def old_render(self):
"""Drawing behavior for game objects.
Parts of this should go to their respective classes, .e.g,
scene.
Needs to be updated to use sprite groups.
"""
first_tilemap_layer = self.scene.tilemap.layer_images[0]
self.viewport.center_on(self.scene.human_player.walkabout,
first_tilemap_layer.get_rect())
self.viewport.blit(first_tilemap_layer)
self.scene.tilemap.blit_layer_animated_tiles(self.viewport, 0)
# render each npc walkabout
for npc in self.scene.npcs:
npc.walkabout.blit(self.screen.clock,
self.viewport.surface,
self.viewport.rect.topleft)
# finally human and rest map layers last
self.scene.human_player.walkabout.blit(self.screen.clock,
self.viewport.surface,
self.viewport.rect.topleft)
for i, layer in enumerate(self.scene.tilemap.layer_images[1:], 1):
self.viewport.blit(layer)
self.scene.tilemap.blit_layer_animated_tiles(self.viewport, i)
self.dialogbox.blit(self.viewport.surface)
def render(self):
"""Drawing behavior for game objects.
Parts of this should go to their respective classes, .e.g,
scene.
Needs to be updated to use sprite groups.
"""
self.scene.render(self.viewport, self.screen.clock)
self.dialogbox.blit(self.viewport.surface)
def start_loop(self):
controller = controllers.WorldController(self)
while controller.handle_input():
controller.handle_input()
self.screen.update(self.viewport.surface)
self.render()
pygame.quit()
sys.exit()
class Scene(object):
"""A map with configuration data/meta, e.g., NPCs.
Attributes:
tilemap (hypatia.tiles.Tilemap): --
player_start_position (tuple): (x, y); two integer tuple
denoting the starting position for human player.
human_player (hypatia.player.Player): the human player object.
npcs (list): a list of hypatia.player.NPC objects
Notes:
Should have methods for managing npcs, e.g., add/remove.
"""
def __init__(self, tilemap, player_start_position,
human_player, npcs=None):
"""
Args:
tilemap (tiles.TileMap): --
player_start_position (tuple): x, y pixel coordinates
for the human player's starting position.
human_player (players.HumanPlayer): --
npcs (List[players.Npc]): --
npc_sprite_group (pygame.sprite.Group): --
"""
self.tilemap = tilemap
self.player_start_position = player_start_position
self.human_player = human_player
self.npcs = npcs or []
npc_walkabouts = [n.walkabout for n in self.npcs]
self.npc_sprite_group = pygame.sprite.Group(*npc_walkabouts)
@staticmethod
def create_human_player(start_position):
"""Currently mostly scaffolding for creating/loading the
human character into the scene.
Args:
start_position (tuple): x, y pixel coordinates
for the human player's starting position.
Returns:
player.HumanPlayer: --
"""
# .. create player with player scene data
bow = sprites.Walkabout('bow')
human_walkabout = sprites.Walkabout('slime',
position=start_position,
children=[bow])
velocity = physics.Velocity(20, 20)
human_player = player.HumanPlayer(walkabout=human_walkabout,
velocity=velocity)
return human_player
def to_tmx_resource(self, tmx_name):
"""Scaffolding.
"""
pass
@classmethod
def from_tmx_resource(cls, tmx_name):
"""Create a scene from a Tiled editor TMX file in
the scenes resource directory.
Returns:
Scene: A scene created using all compatible
data from designated TMX file.
"""
file_path = os.path.join('resources', 'scenes', tmx_name + '.tmx')
tmx = TMX(file_path)
human_player = cls.create_human_player(tmx.player_start_position)
return Scene(tilemap=tmx.tilemap,
player_start_position=tmx.player_start_position,
human_player=human_player,
npcs=tmx.npcs)
@classmethod
def from_resource(self, scene_name):
"""The native format, and hopefully most reliable,
stable, and generally best way of saving, loading,
or creating Hypatia scenes.
This defines the standard by which all
other Scene constructors must follow.
Args:
scene_name (str): the name of the directory which corresponds
to the map you want to load from resources/maps.
"""
# load the scene zip from the scene resource and read
# the general scene configuration, first.
resource = resources.Resource('scenes', scene_name)
scene_ini = resource['scene.ini']
# Construct a TileMap from the tilemap.txt
# contents from the scene resource.
tilemap_string = resource['tilemap.txt']
tilemap = tiles.TileMap.from_string(tilemap_string)
# Get the player's starting position from the
# general scene configuration.
player_start_x = scene_ini.getint('general', 'player_start_x')
player_start_y = scene_ini.getint('general', 'player_start_y')
player_start_position = (player_start_x, player_start_y)
# Create a player using the player
# start position found.
human_player = self.create_human_player(player_start_position)
# npcs.ini
#
# Create a list of NPCs using a configuration file
# from the scene resource.
npcs_ini = resource['npcs.ini']
npcs = []
# each section title is the npc's name,
# each sections key/value pairs are
# the NPC's attributes.
for npc_name in npcs_ini.sections():
if npcs_ini.has_option(npc_name, 'walkabout'):
# The NPC's walkabout resource name
walkabout_name = npcs_ini.get(npc_name, 'walkabout')
# the required (x, y) pixel coordinates referring
# to the position of this NPC
position_x = npcs_ini.getint(npc_name, 'position_x')
position_y = npcs_ini.getint(npc_name, 'position_y')
position = (position_x, position_y)
# create the NPC's walkabout using the
# designated walkabout name and position
# from the NPC's config.
npc_walkabout = sprites.Walkabout(walkabout_name,
position=position)
if npcs_ini.has_option(npc_name, 'say'):
# Load some say text for the NPC, so when
# an actor uses talk() on them, they say
# this message--the say_text!
say_text = npcs_ini.get(npc_name, 'say')
else:
say_text = None
npc = player.Npc(walkabout=npc_walkabout, say_text=say_text)
npcs.append(npc)
return Scene(tilemap=tilemap,
player_start_position=player_start_position,
human_player=human_player,
npcs=npcs)
def collide_check(self, rect):
"""Returns True if there are collisions with rect.
Args:
rect (pygame.Rect): The area/rectangle which
to test for collisions against NPCs and
the tilemap's wallmap.
Notes:
Should use pygame.sprite.spritecollide()
"""
possible_collisions = self.tilemap.impassable_rects
for npc in self.npcs:
possible_collisions.append(npc.walkabout.rect)
return rect.collidelist(possible_collisions) != -1
def runtime_setup(self):
"""Initialize all the NPCs, tilemap, etc.
Is this a horrible way of doing this? I dunno,
not the fondest...
"""
npcs_to_setup = tuple(npc.walkabout for npc in self.npcs)
objects_to_setup = (self.tilemap, self.human_player.walkabout,)
objects_to_setup = objects_to_setup + npcs_to_setup
for object_to_setup in objects_to_setup + npcs_to_setup:
object_to_setup.runtime_setup()
def render(self, viewport, clock):
"""Render this Scene onto viewport.
Args:
viewport (render.Viewport): The global/master viewport,
where stuff will be blitted to. Also used for some
calculations.
clock (pygame.time.Clock): Global/master/the game
clock used for timing in this game.
"""
(self.tilemap.tilesheet.animated_tiles_group.
update(clock, viewport.surface, viewport.rect.topleft))
first_tilemap_layer = self.tilemap.layer_images[0]
viewport.center_on(self.human_player.walkabout,
first_tilemap_layer.get_rect())
viewport.blit(first_tilemap_layer)
self.tilemap.blit_layer_animated_tiles(viewport, 0)
# render each npc walkabout
# should use group draw
for npc in self.npcs:
npc.walkabout.blit(clock,
viewport.surface,
viewport.rect.topleft)
# finally human and rest map layers last
self.human_player.walkabout.blit(clock,
viewport.surface,
viewport.rect.topleft)
for i, layer in enumerate(self.tilemap.layer_images[1:], 1):
viewport.blit(layer)
self.tilemap.blit_layer_animated_tiles(viewport, i)
class TMX(object):
"""`TMX` object to represent and "translate"
supported Scene data from a TMX file.
TMX files are capable of providing the information
required to instantiate TileMap and Scene.
TMX file must have the following settings:
* orientation: orthogonal
* tile layer format: csv
* tile render order: right down
You must also specify the tilesheet name you want to use
in Hypatia, as your tileset image name. You may only use
one image.
Constants:
SUPPORTED (str): the TMX file format which is supported.
Attributes:
root (ElementTree): the XML ElementTree root of the TMX file.
player_start_position (tuple): (x, y) coordinate in which
the player begins this scene at.
layers (list): a 3D list of tile IDs referring to a tile
by id in a Tilesheet. This data is extrapolated from
a CSV-format list of tile IDs.
npcs (List[players.Npc]): --
See Also:
http://doc.mapeditor.org/reference/tmx-map-format/
"""
SUPPORTED = '1.0'
def __init__(self, path_or_readable):
"""Read XML from path_or_readable, validate the TMX as being
supported by Hypatia, and set all supported information as
attributes.
Args:
path_or_readable (str|file-like-object): This is
plopped right into ElementTree.parse().
Note:
This method is under-documented!
"""
# parse TMXML for TileMap-specific/supported data
tree = ET.parse(path_or_readable)
self.root = tree.getroot() # <map ...>
# check the version first, make sure it's supported
map_version = self.root.attrib['version']
if map_version != self.SUPPORTED:
raise TMXVersionUnsupported(map_version)
# Get the Tilesheet (tileset) name from the tileset
tileset_images = self.root.findall('.//tileset/image')
if len(tileset_images) > 1:
# too many tilesets!
raise TMXTooManyTilesheets()
tileset = self.root.find('.//tileset')
tilesheet_name = tileset.attrib['name']
# get the 3D constructor/blueprint of TileMap,
# which simply references, by integer, the
# tile from tilesheet.
layers = []
for layer_data in self.root.findall(".//layer/data"):
data_encoding = layer_data.attrib['encoding']
if data_encoding != 'csv':
raise TMXLayersNotCSV(data_encoding)
layer_csv = layer_data.text.strip()
rows = layer_csv.split('\n')
parsed_rows = []
for row in rows:
# TMX tilesets start their ids at 1, Hypatia Tilesheets
# starts ids at 0.
cells = row.split(',')[:-1] # trailing comma
parsed_row = [int(tile_id) - 1 for tile_id in cells]
parsed_rows.append(parsed_row)
layers.append(parsed_rows)
self.tilemap = tiles.TileMap(tilesheet_name, layers)
# loop through objects in the object layer to find the player's
# start position and NPC information.
self.npcs = []
self.player_start_position = None
for tmx_object in self.root.findall(".//objectgroup/object"):
object_type = tmx_object.attrib['type']
x = int(tmx_object.attrib['x'])
y = int(tmx_object.attrib['y'])
if object_type == 'player_start_position':
self.player_start_position = (x, y)
elif object_type == 'npc':
properties = tmx_object.find('properties')
xpath = ".//property[@name='%s']"
position = (x, y)
walkabout_name = (properties.find(xpath % 'walkabout').
attrib['value'])
walkabout = sprites.Walkabout(walkabout_name, position)
say_text = properties.find(xpath % 'say').attrib['value']
npc = player.Npc(walkabout=walkabout, say_text=say_text)
self.npcs.append(npc)
# should use xpath before loading all npcs...
if self.player_start_position is None:
raise TMXMissingPlayerStartPosition()
|
|
"""
PureMVC Python Demo - wxPython Employee Admin
By Toby de Havilland <toby.de.havilland@puremvc.org>
Copyright(c) 2007-08 Toby de Havilland, Some rights reserved.
"""
import puremvc.interfaces
import puremvc.patterns.mediator
import model, enum, main, wx, vo
class DialogMediator(puremvc.patterns.mediator.Mediator, puremvc.interfaces.IMediator):
NAME = 'DialogMediator'
def __init__(self, viewComponent):
super(DialogMediator, self).__init__(DialogMediator.NAME, viewComponent)
def listNotificationInterests(self):
return [
main.AppFacade.SHOW_DIALOG,
]
def handleNotification(self, note):
noteName = note.getName()
if noteName == main.AppFacade.SHOW_DIALOG:
dlg = wx.MessageDialog(self.viewComponent, note.getBody(),'Alert',style=wx.OK|wx.ICON_EXCLAMATION)
result = dlg.ShowModal()
dlg.Destroy()
class UserFormMediator(puremvc.patterns.mediator.Mediator, puremvc.interfaces.IMediator):
NAME = 'UserFormMediator'
userProxy = None
def __init__(self, viewComponent):
super(UserFormMediator, self).__init__(UserFormMediator.NAME, viewComponent)
self.userProxy = self.facade.retrieveProxy(model.UserProxy.NAME)
self.viewComponent.updateDepartmentCombo(enum.DeptList, enum.DEPT_NONE_SELECTED)
self.viewComponent.Bind(self.viewComponent.EVT_ADD,self.onAdd)
self.viewComponent.Bind(self.viewComponent.EVT_UPDATE,self.onUpdate)
self.viewComponent.Bind(self.viewComponent.EVT_CANCEL,self.onCancel)
def listNotificationInterests(self):
return [
main.AppFacade.NEW_USER,
main.AppFacade.USER_DELETED,
main.AppFacade.USER_SELECTED
]
def handleNotification(self, note):
noteName = note.getName()
if noteName == main.AppFacade.NEW_USER:
self.viewComponent.updateMode(self.viewComponent.MODE_ADD)
self.clearForm()
self.viewComponent.firstInput.SetFocus()
if noteName == main.AppFacade.USER_DELETED:
self.viewComponent.user = None
self.clearForm()
if noteName == main.AppFacade.USER_SELECTED:
self.viewComponent.updateUser(note.getBody())
self.viewComponent.updateMode(self.viewComponent.MODE_EDIT)
def clearForm(self):
self.viewComponent.user = None
self.viewComponent.usernameInput.SetValue('')
self.viewComponent.firstInput.SetValue('')
self.viewComponent.lastInput.SetValue('')
self.viewComponent.emailInput.SetValue('')
self.viewComponent.passwordInput.SetValue('')
self.viewComponent.confirmInput.SetValue('')
self.viewComponent.departmentCombo.SetValue(enum.DEPT_NONE_SELECTED)
def onAdd(self, evt):
user = vo.UserVO(self.viewComponent.usernameInput.GetValue(),
self.viewComponent.firstInput.GetValue(),
self.viewComponent.lastInput.GetValue(),
self.viewComponent.emailInput.GetValue(),
self.viewComponent.passwordInput.GetValue(),
self.viewComponent.departmentCombo.GetValue())
self.viewComponent.user = user
self.userProxy.addItem(user)
self.sendNotification(main.AppFacade.USER_ADDED, user)
self.clearForm()
def onUpdate(self, evt):
user = vo.UserVO(self.viewComponent.usernameInput.GetValue(),
self.viewComponent.firstInput.GetValue(),
self.viewComponent.lastInput.GetValue(),
self.viewComponent.emailInput.GetValue(),
self.viewComponent.passwordInput.GetValue(),
self.viewComponent.departmentCombo.GetValue())
self.viewComponent.user = user
self.userProxy.updateItem(user)
self.sendNotification(main.AppFacade.USER_UPDATED, user)
self.clearForm()
def onCancel(self, evt):
self.sendNotification(main.AppFacade.CANCEL_SELECTED)
self.clearForm()
class UserListMediator(puremvc.patterns.mediator.Mediator, puremvc.interfaces.IMediator):
NAME = 'UserListMediator'
userProxy = None
def __init__(self, viewComponent):
super(UserListMediator, self).__init__(UserListMediator.NAME, viewComponent)
self.userProxy = self.facade.retrieveProxy(model.UserProxy.NAME)
self.viewComponent.updateUserGrid(self.userProxy.getUsers())
self.viewComponent.Bind(self.viewComponent.EVT_USER_SELECTED,self.onSelect)
self.viewComponent.Bind(self.viewComponent.EVT_NEW,self.onNew)
self.viewComponent.Bind(self.viewComponent.EVT_DELETE,self.onDelete)
def listNotificationInterests(self):
return [
main.AppFacade.CANCEL_SELECTED,
main.AppFacade.USER_UPDATED,
main.AppFacade.USER_ADDED,
main.AppFacade.USER_DELETED
]
def handleNotification(self, note):
noteName = note.getName()
if noteName == main.AppFacade.CANCEL_SELECTED:
self.viewComponent.deSelect()
self.viewComponent.updateUserGrid(self.userProxy.getUsers())
elif noteName == main.AppFacade.USER_UPDATED:
self.viewComponent.deSelect()
self.viewComponent.updateUserGrid(self.userProxy.getUsers())
elif noteName == main.AppFacade.USER_ADDED:
self.viewComponent.deSelect()
self.viewComponent.updateUserGrid(self.userProxy.getUsers())
elif noteName == main.AppFacade.USER_DELETED:
self.viewComponent.deSelect()
self.viewComponent.updateUserGrid(self.userProxy.getUsers())
def onSelect(self, evt):
self.sendNotification(main.AppFacade.USER_SELECTED,self.viewComponent.selectedUser)
def onNew(self, evt):
user = vo.UserVO()
self.sendNotification(main.AppFacade.NEW_USER, user)
def onDelete(self, evt):
self.sendNotification(main.AppFacade.DELETE_USER,self.viewComponent.selectedUser)
class RolePanelMediator(puremvc.patterns.mediator.Mediator, puremvc.interfaces.IMediator):
NAME = 'RolePanelMediator'
roleProxy = None
def __init__(self, viewComponent):
super(RolePanelMediator, self).__init__(RolePanelMediator.NAME, viewComponent)
self.roleProxy = self.facade.retrieveProxy(model.RoleProxy.NAME)
self.viewComponent.updateRoleCombo(enum.RoleList, enum.ROLE_NONE_SELECTED)
self.viewComponent.Bind(self.viewComponent.EVT_ADD_ROLE,self.onAddRole)
self.viewComponent.Bind(self.viewComponent.EVT_REMOVE_ROLE,self.onRemoveRole)
def getRolePanel(self):
return viewComponent
def onAddRole(self,evt):
self.roleProxy.addRoleToUser(self.viewComponent.user, self.viewComponent.selectedRole)
def onRemoveRole(self,evt):
self.roleProxy.removeRoleFromUser(self.viewComponent.user, self.viewComponent.selectedRole)
self.viewComponent.updateRoleList(self.roleProxy.getUserRoles(self.viewComponent.user.username))
def listNotificationInterests(self):
return [
main.AppFacade.NEW_USER,
main.AppFacade.USER_ADDED,
main.AppFacade.USER_UPDATED,
main.AppFacade.USER_DELETED,
main.AppFacade.CANCEL_SELECTED,
main.AppFacade.USER_SELECTED,
main.AppFacade.ADD_ROLE_RESULT
]
def handleNotification(self, note):
noteName = note.getName()
if noteName == main.AppFacade.NEW_USER:
self.clearForm()
elif noteName == main.AppFacade.USER_ADDED:
self.viewComponent.user = note.getBody()
roleVO = vo.RoleVO(self.viewComponent.user.username)
self.roleProxy.addItem(roleVO)
self.clearForm()
elif noteName == main.AppFacade.USER_UPDATED:
self.clearForm()
elif noteName == main.AppFacade.USER_DELETED:
self.clearForm()
elif noteName == main.AppFacade.CANCEL_SELECTED:
self.clearForm()
elif noteName == main.AppFacade.USER_SELECTED:
self.viewComponent.user = note.getBody()
self.viewComponent.updateRoleList(self.roleProxy.getUserRoles(self.viewComponent.user.username))
elif noteName == main.AppFacade.ADD_ROLE_RESULT:
self.viewComponent.updateRoleList(self.roleProxy.getUserRoles(self.viewComponent.user.username))
def clearForm(self):
self.viewComponent.user = None
self.viewComponent.updateRoleList([])
self.viewComponent.roleCombo.SetStringSelection(enum.ROLE_NONE_SELECTED)
|
|
from __future__ import division, print_function, absolute_import
import signal
from functools import wraps
from .util import range, class_from_instancemethod
def _set_metainfo(method, key, value):
key = '_benchmarking_%s' % key
setattr(method, key, value)
def _get_metainfo(method, key):
key = '_benchmarking_%s' % key
return getattr(method, key, getattr(class_from_instancemethod(method), key, None))
def project(name):
"""Set project name."""
def f(method):
_set_metainfo(method, 'project', name)
return method
return f
def calls(number):
"""Specifies a number of benchmark method calls per single repeat."""
def f(method):
@wraps(method)
def wrapper(*args, **kwargs):
for _ in range(number):
method(*args, **kwargs)
return number
return wrapper
return f
def seconds(func=None, max_seconds=3):
"""Specifies a number of seconds for single repeat."""
def f(method):
@wraps(method)
def wrapper(*args, **kwargs):
cycle = {'stopped': False}
def stop_cycle(_, __):
cycle['stopped'] = True
signal.signal(signal.SIGALRM, stop_cycle)
signal.setitimer(signal.ITIMER_REAL, max_seconds)
calls = 0
try:
while not cycle['stopped']:
method(*args, **kwargs)
calls += 1
finally:
signal.signal(signal.SIGALRM, signal.SIG_DFL)
signal.setitimer(signal.ITIMER_REAL, 0)
return calls
return wrapper
if func is None:
return f
else:
return f(func)
def repeats(number):
"""Specifies a number of repeats."""
def f(method):
_set_metainfo(method, 'repeats', number)
return method
return f
def data(*args):
"""Specifies data arguments for benchmark."""
def f(method):
_set_metainfo(method, 'data_function', lambda: zip(args, args))
return method
return f
def data_function(func):
"""Specifies data function for benchmark."""
def f(method):
_set_metainfo(method, 'data_function', func)
return method
return f
def deferred_data_function(func):
"""Wraps up data function for benchmark when it returns Deferred."""
def deferred_generator():
for (label, data) in func():
data = deferred(lambda: data)()
yield (label, data)
return data_function(deferred_generator)
class TimeoutError(Exception):
pass
class ReactorError(Exception):
pass
def deferred(func_or_class=None, max_seconds=120):
"""
Class or function decorator that makes deferred synchronous with reactor stop/start around.
For function: wraps up deferred function to become synchronous with reactor stop/start around.
For class: wraps up all class methods starting with 'benchmark_' with deferred decorator, makes
setUp and tearDown deferred and run inside the same reactor start/stop pair.
@param max_seconds: maximum running time for reactor
@type max_seconds: C{int}
"""
def _deferred(func_or_class):
if isinstance(func_or_class, type):
klass = func_or_class
setUp = klass.setUp
tearDown = klass.tearDown
klass.setUp = lambda self: None
klass.tearDown = lambda self: None
for method in dir(klass):
if method.startswith('benchmark_'):
setattr(klass, method, deferred(max_seconds=max_seconds)(deferred_setup_teardown(setUp=setUp, tearDown=tearDown)(getattr(klass, method))))
return klass
else:
func = func_or_class
@wraps(func)
def wrapper(*args, **kwargs):
"""
Waits for deffered to callback.
@type d: deferred; otherwise returns it as is.
"""
from twisted.internet import defer, reactor
d = func(*args, **kwargs)
if not isinstance(d, defer.Deferred):
return d
res = {}
def store_result(result):
res['result'] = result
def store_exception(failure):
res['exception'] = failure.value
d.addCallbacks(store_result, store_exception)
def stop_reactor():
if timeout_guard.active():
timeout_guard.cancel()
reactor.iterate(0)
reactor.disconnectAll()
reactor.crash()
if reactor.threadpool is not None:
reactor._stopThreadPool()
if len(reactor.getDelayedCalls()) != 0:
calls = reactor.getDelayedCalls()
for call in calls:
call.cancel()
if 'exception' not in res:
res['exception'] = ReactorError("Reactor unclean: delayed calls %s" % (map(str, calls), ))
timeout_guard = reactor.callLater(max_seconds,
lambda: d.errback(TimeoutError("%r is still running after %d seconds" % (d, max_seconds))))
reactor.callWhenRunning(d.addCallback, lambda _: reactor.callLater(0, stop_reactor))
reactor.run()
if 'exception' in res:
raise res['exception']
return res['result']
return wrapper
if func_or_class is None:
return _deferred
else:
return _deferred(func_or_class)
def deferred_setup_teardown(setUp, tearDown):
"""
@param setUp: function to be called before running the deferred
@type setUp: C{func}
@param tearDown: function to be called after running the deferred
@type tearDown: C{func}
"""
def _deferred_setup_teardown(func):
@wraps(func)
def wrapper(*args, **kwargs):
"""
Waits for deffered to callback.
@type d: deferred; otherwise returns it as is.
"""
from twisted.internet import defer
return defer.maybeDeferred(setUp, args[0]).addCallback(lambda _: func(*args, **kwargs) \
.addBoth(lambda result: defer.maybeDeferred(tearDown, args[0]).addCallback(lambda _: result)))
return wrapper
return _deferred_setup_teardown
def async(func=None, concurrency=1, requests=None, duration=None):
"""
Asynchronous benchmark runner.
Runs wrapped deferred action with concurrency and limiting number of requests OR test duration.
One of C{requests} or C{duration} should be specified, but not both.
Example::
@async(concurrency=10)
def benchmark_example():
return defer.succeed(None)
@param concurrency: maximum number of concurrent actions
@type concurrency: C{int}
@param requests: overall number of calls to perform
@type requests: C{int}
@param duration: length of test in seconds
@type duration: C{float}
"""
assert requests is not None or duration is not None, "either duration or requests should be specified"
assert requests is None or duration is None, "can't specify both duration and requests"
def _async(func):
@wraps(func)
def wrapper(*args, **kwargs):
from twisted.internet import defer, task, reactor
d = defer.Deferred()
sem = defer.DeferredSemaphore(concurrency)
req = {'done': 0}
if requests is not None:
req['left'] = requests
else:
def finish():
starter.stop()
if sem.tokens == concurrency:
d.callback(req['done'])
reactor.callLater(duration, finish)
def startMore():
def release(_):
req['done'] += 1
sem.release()
if not starter.running and sem.tokens == concurrency:
d.callback(req['done'])
return _
def gotError(fail):
if not d.called:
d.errback(fail)
else:
print(fail)
def acquired(_):
d = defer.maybeDeferred(func, *args, **kwargs).addErrback(gotError).addBoth(release)
if sem.tokens > 0 and not d.called:
startMore()
if requests is not None:
req['left'] -= 1
if req['left'] == 0:
starter.stop()
return sem.acquire().addCallback(acquired)
starter = task.LoopingCall(startMore)
starter.start(0, True)
return d
return wrapper
if func is None:
return _async
else:
return _async(func)
|
|
# Copyright 2015 Huawei Technologies Co.,LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_serialization import jsonutils
from magnum.conductor import k8s_monitor
from magnum.conductor import mesos_monitor
from magnum.conductor import monitors
from magnum.conductor import swarm_monitor
from magnum import objects
from magnum.tests import base
from magnum.tests.unit.db import utils
from magnum.tests.unit.objects import utils as obj_utils
class MonitorsTestCase(base.TestCase):
test_metrics_spec = {
'metric1': {
'unit': 'metric1_unit',
'func': 'metric1_func',
},
'metric2': {
'unit': 'metric2_unit',
'func': 'metric2_func',
},
}
def setUp(self):
super(MonitorsTestCase, self).setUp()
bay = utils.get_test_bay(node_addresses=['1.2.3.4'],
api_address='https://5.6.7.8:2376',
master_addresses=['10.0.0.6'])
self.bay = objects.Bay(self.context, **bay)
self.monitor = swarm_monitor.SwarmMonitor(self.context, self.bay)
self.k8s_monitor = k8s_monitor.K8sMonitor(self.context, self.bay)
self.mesos_monitor = mesos_monitor.MesosMonitor(self.context,
self.bay)
p = mock.patch('magnum.conductor.swarm_monitor.SwarmMonitor.'
'metrics_spec', new_callable=mock.PropertyMock)
self.mock_metrics_spec = p.start()
self.mock_metrics_spec.return_value = self.test_metrics_spec
self.addCleanup(p.stop)
def test_create_monitor_success(self):
self.bay.baymodel = obj_utils.get_test_baymodel(
self.context, uuid=self.bay.baymodel_id, coe='swarm')
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsInstance(monitor, swarm_monitor.SwarmMonitor)
def test_create_monitor_k8s_bay(self):
self.bay.baymodel = obj_utils.get_test_baymodel(
self.context, uuid=self.bay.baymodel_id, coe='kubernetes')
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsInstance(monitor, k8s_monitor.K8sMonitor)
def test_create_monitor_mesos_bay(self):
self.bay.baymodel = obj_utils.get_test_baymodel(
self.context, uuid=self.bay.baymodel_id, coe='mesos')
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsInstance(monitor, mesos_monitor.MesosMonitor)
@mock.patch('magnum.common.docker_utils.docker_for_bay')
def test_swarm_monitor_pull_data_success(self, mock_docker_for_bay):
mock_docker = mock.MagicMock()
mock_docker.info.return_value = {'DriverStatus': [[
u' \u2514 Reserved Memory', u'0 B / 1 GiB']]}
mock_docker.containers.return_value = [mock.MagicMock()]
mock_docker.inspect_container.return_value = 'test_container'
mock_docker_for_bay.return_value.__enter__.return_value = mock_docker
self.monitor.pull_data()
self.assertEqual([{'MemTotal': 1073741824.0}],
self.monitor.data['nodes'])
self.assertEqual(['test_container'], self.monitor.data['containers'])
@mock.patch('magnum.common.docker_utils.docker_for_bay')
def test_swarm_monitor_pull_data_raise(self, mock_docker_for_bay):
mock_container = mock.MagicMock()
mock_docker = mock.MagicMock()
mock_docker.info.return_value = {'DriverStatus': [[
u' \u2514 Reserved Memory', u'0 B / 1 GiB']]}
mock_docker.containers.return_value = [mock_container]
mock_docker.inspect_container.side_effect = Exception("inspect error")
mock_docker_for_bay.return_value.__enter__.return_value = mock_docker
self.monitor.pull_data()
self.assertEqual([{'MemTotal': 1073741824.0}],
self.monitor.data['nodes'])
self.assertEqual([mock_container], self.monitor.data['containers'])
def test_swarm_monitor_get_metric_names(self):
names = self.monitor.get_metric_names()
self.assertEqual(sorted(['metric1', 'metric2']), sorted(names))
def test_swarm_monitor_get_metric_unit(self):
unit = self.monitor.get_metric_unit('metric1')
self.assertEqual('metric1_unit', unit)
def test_swarm_monitor_compute_metric_value(self):
mock_func = mock.MagicMock()
mock_func.return_value = 'metric1_value'
self.monitor.metric1_func = mock_func
value = self.monitor.compute_metric_value('metric1')
self.assertEqual('metric1_value', value)
def test_swarm_monitor_compute_memory_util(self):
test_data = {
'nodes': [
{
'Name': 'node',
'MemTotal': 20,
},
],
'containers': [
{
'Name': 'container',
'HostConfig': {
'Memory': 10,
},
},
],
}
self.monitor.data = test_data
mem_util = self.monitor.compute_memory_util()
self.assertEqual(50, mem_util)
test_data = {
'nodes': [],
'containers': [],
}
self.monitor.data = test_data
mem_util = self.monitor.compute_memory_util()
self.assertEqual(0, mem_util)
@mock.patch('magnum.conductor.k8s_api.create_k8s_api')
def test_k8s_monitor_pull_data_success(self, mock_k8s_api):
mock_nodes = mock.MagicMock()
mock_node = mock.MagicMock()
mock_node.status = mock.MagicMock()
mock_node.status.capacity = "{'memory': '2000Ki', 'cpu': '1'}"
mock_nodes.items = [mock_node]
mock_k8s_api.return_value.list_namespaced_node.return_value = (
mock_nodes)
mock_pods = mock.MagicMock()
mock_pod = mock.MagicMock()
mock_pod.spec = mock.MagicMock()
mock_container = mock.MagicMock()
mock_container.resources = mock.MagicMock()
mock_container.resources.limits = "{'memory': '100Mi', 'cpu': '500m'}"
mock_pod.spec.containers = [mock_container]
mock_pods.items = [mock_pod]
mock_k8s_api.return_value.list_namespaced_pod.return_value = mock_pods
self.k8s_monitor.pull_data()
self.assertEqual(self.k8s_monitor.data['nodes'],
[{'Memory': 2048000.0, 'Cpu': 1}])
self.assertEqual(self.k8s_monitor.data['pods'],
[{'Memory': 104857600.0, 'Cpu': 0.5}])
def test_k8s_monitor_get_metric_names(self):
k8s_metric_spec = 'magnum.conductor.k8s_monitor.K8sMonitor.'\
'metrics_spec'
with mock.patch(k8s_metric_spec,
new_callable=mock.PropertyMock) as mock_k8s_metric:
mock_k8s_metric.return_value = self.test_metrics_spec
names = self.k8s_monitor.get_metric_names()
self.assertEqual(sorted(['metric1', 'metric2']), sorted(names))
def test_k8s_monitor_get_metric_unit(self):
k8s_metric_spec = 'magnum.conductor.k8s_monitor.K8sMonitor.' \
'metrics_spec'
with mock.patch(k8s_metric_spec,
new_callable=mock.PropertyMock) as mock_k8s_metric:
mock_k8s_metric.return_value = self.test_metrics_spec
unit = self.k8s_monitor.get_metric_unit('metric1')
self.assertEqual('metric1_unit', unit)
def test_k8s_monitor_compute_memory_util(self):
test_data = {
'nodes': [
{
'Memory': 20,
},
],
'pods': [
{
'Memory': 10,
},
],
}
self.k8s_monitor.data = test_data
mem_util = self.k8s_monitor.compute_memory_util()
self.assertEqual(50, mem_util)
test_data = {
'nodes': [],
'pods': [],
}
self.k8s_monitor.data = test_data
mem_util = self.k8s_monitor.compute_memory_util()
self.assertEqual(0, mem_util)
def test_k8s_monitor_compute_cpu_util(self):
test_data = {
'nodes': [
{
'Cpu': 1,
},
],
'pods': [
{
'Cpu': 0.5,
},
],
}
self.k8s_monitor.data = test_data
cpu_util = self.k8s_monitor.compute_cpu_util()
self.assertEqual(50, cpu_util)
test_data = {
'nodes': [],
'pods': [],
}
self.k8s_monitor.data = test_data
cpu_util = self.k8s_monitor.compute_cpu_util()
self.assertEqual(0, cpu_util)
def _test_mesos_monitor_pull_data(
self, mock_url_get, state_json, expected_mem_total,
expected_mem_used, expected_cpu_total, expected_cpu_used):
state_json = jsonutils.dumps(state_json)
mock_url_get.return_value = state_json
self.mesos_monitor.pull_data()
self.assertEqual(self.mesos_monitor.data['mem_total'],
expected_mem_total)
self.assertEqual(self.mesos_monitor.data['mem_used'],
expected_mem_used)
self.assertEqual(self.mesos_monitor.data['cpu_total'],
expected_cpu_total)
self.assertEqual(self.mesos_monitor.data['cpu_used'],
expected_cpu_used)
@mock.patch('magnum.common.urlfetch.get')
def test_mesos_monitor_pull_data_success(self, mock_url_get):
state_json = {
'leader': 'master@10.0.0.6:5050',
'pid': 'master@10.0.0.6:5050',
'slaves': [{
'resources': {
'mem': 100,
'cpus': 1,
},
'used_resources': {
'mem': 50,
'cpus': 0.2,
}
}]
}
self._test_mesos_monitor_pull_data(mock_url_get, state_json,
100, 50, 1, 0.2)
@mock.patch('magnum.common.urlfetch.get')
def test_mesos_monitor_pull_data_success_not_leader(self, mock_url_get):
state_json = {
'leader': 'master@10.0.0.6:5050',
'pid': 'master@1.1.1.1:5050',
'slaves': []
}
self._test_mesos_monitor_pull_data(mock_url_get, state_json,
0, 0, 0, 0)
@mock.patch('magnum.common.urlfetch.get')
def test_mesos_monitor_pull_data_success_no_master(self, mock_url_get):
self.bay.master_addresses = []
self._test_mesos_monitor_pull_data(mock_url_get, {}, 0, 0, 0, 0)
def test_mesos_monitor_get_metric_names(self):
mesos_metric_spec = 'magnum.conductor.mesos_monitor.MesosMonitor.'\
'metrics_spec'
with mock.patch(mesos_metric_spec,
new_callable=mock.PropertyMock) as mock_mesos_metric:
mock_mesos_metric.return_value = self.test_metrics_spec
names = self.mesos_monitor.get_metric_names()
self.assertEqual(sorted(['metric1', 'metric2']), sorted(names))
def test_mesos_monitor_get_metric_unit(self):
mesos_metric_spec = 'magnum.conductor.mesos_monitor.MesosMonitor.' \
'metrics_spec'
with mock.patch(mesos_metric_spec,
new_callable=mock.PropertyMock) as mock_mesos_metric:
mock_mesos_metric.return_value = self.test_metrics_spec
unit = self.mesos_monitor.get_metric_unit('metric1')
self.assertEqual('metric1_unit', unit)
def test_mesos_monitor_compute_memory_util(self):
test_data = {
'mem_total': 100,
'mem_used': 50
}
self.mesos_monitor.data = test_data
mem_util = self.mesos_monitor.compute_memory_util()
self.assertEqual(50, mem_util)
test_data = {
'mem_total': 0,
'pods': 0,
}
self.mesos_monitor.data = test_data
mem_util = self.mesos_monitor.compute_memory_util()
self.assertEqual(0, mem_util)
test_data = {
'mem_total': 100,
'mem_used': 0,
'pods': 0,
}
self.mesos_monitor.data = test_data
mem_util = self.mesos_monitor.compute_memory_util()
self.assertEqual(0, mem_util)
def test_mesos_monitor_compute_cpu_util(self):
test_data = {
'cpu_total': 1,
'cpu_used': 0.2,
}
self.mesos_monitor.data = test_data
cpu_util = self.mesos_monitor.compute_cpu_util()
self.assertEqual(20, cpu_util)
test_data = {
'cpu_total': 100,
'cpu_used': 0,
}
self.mesos_monitor.data = test_data
cpu_util = self.mesos_monitor.compute_cpu_util()
self.assertEqual(0, cpu_util)
|
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Applies an issue from Rietveld.
"""
import getpass
import json
import logging
import optparse
import os
import subprocess
import sys
import urllib2
import annotated_gclient
import auth
import checkout
import fix_encoding
import gclient_utils
import rietveld
import scm
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
RETURN_CODE_OK = 0
RETURN_CODE_OTHER_FAILURE = 1 # any other failure, likely patch apply one.
RETURN_CODE_ARGPARSE_FAILURE = 2 # default in python.
RETURN_CODE_INFRA_FAILURE = 3 # considered as infra failure.
class Unbuffered(object):
"""Disable buffering on a file object."""
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
def _get_arg_parser():
parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
parser.add_option(
'-v', '--verbose', action='count', default=0,
help='Prints debugging infos')
parser.add_option(
'-e', '--email',
help='Email address to access rietveld. If not specified, anonymous '
'access will be used.')
parser.add_option(
'-E', '--email-file',
help='File containing the email address to access rietveld. '
'If not specified, anonymous access will be used.')
parser.add_option(
'-k', '--private-key-file',
help='Path to file containing a private key in p12 format for OAuth2 '
'authentication with "notasecret" password (as generated by Google '
'Cloud Console).')
parser.add_option(
'-i', '--issue', type='int', help='Rietveld issue number')
parser.add_option(
'-p', '--patchset', type='int', help='Rietveld issue\'s patchset number')
parser.add_option(
'-r',
'--root_dir',
default=os.getcwd(),
help='Root directory to apply the patch')
parser.add_option(
'-s',
'--server',
default='http://codereview.chromium.org',
help='Rietveld server')
parser.add_option('--no-auth', action='store_true',
help='Do not attempt authenticated requests.')
parser.add_option('--revision-mapping', default='{}',
help='When running gclient, annotate the got_revisions '
'using the revision-mapping.')
parser.add_option('-f', '--force', action='store_true',
help='Really run apply_issue, even if .update.flag '
'is detected.')
parser.add_option('-b', '--base_ref', help='DEPRECATED do not use.')
parser.add_option('--whitelist', action='append', default=[],
help='Patch only specified file(s).')
parser.add_option('--blacklist', action='append', default=[],
help='Don\'t patch specified file(s).')
parser.add_option('-d', '--ignore_deps', action='store_true',
help='Don\'t run gclient sync on DEPS changes.')
auth.add_auth_options(parser)
return parser
def main():
# TODO(pgervais,tandrii): split this func, it's still too long.
sys.stdout = Unbuffered(sys.stdout)
parser = _get_arg_parser()
options, args = parser.parse_args()
auth_config = auth.extract_auth_config_from_options(options)
if options.whitelist and options.blacklist:
parser.error('Cannot specify both --whitelist and --blacklist')
if options.email and options.email_file:
parser.error('-e and -E options are incompatible')
if (os.path.isfile(os.path.join(os.getcwd(), 'update.flag'))
and not options.force):
print 'update.flag file found: bot_update has run and checkout is already '
print 'in a consistent state. No actions will be performed in this step.'
return 0
logging.basicConfig(
format='%(levelname)5s %(module)11s(%(lineno)4d): %(message)s',
level=[logging.WARNING, logging.INFO, logging.DEBUG][
min(2, options.verbose)])
if args:
parser.error('Extra argument(s) "%s" not understood' % ' '.join(args))
if not options.issue:
parser.error('Require --issue')
options.server = options.server.rstrip('/')
if not options.server:
parser.error('Require a valid server')
options.revision_mapping = json.loads(options.revision_mapping)
# read email if needed
if options.email_file:
if not os.path.exists(options.email_file):
parser.error('file does not exist: %s' % options.email_file)
with open(options.email_file, 'rb') as f:
options.email = f.read().strip()
print('Connecting to %s' % options.server)
# Always try un-authenticated first, except for OAuth2
if options.private_key_file:
# OAuth2 authentication
rietveld_obj = rietveld.JwtOAuth2Rietveld(options.server,
options.email,
options.private_key_file)
try:
properties = rietveld_obj.get_issue_properties(options.issue, False)
except urllib2.URLError:
logging.exception('failed to fetch issue properties')
sys.exit(RETURN_CODE_INFRA_FAILURE)
else:
# Passing None as auth_config disables authentication.
rietveld_obj = rietveld.Rietveld(options.server, None)
properties = None
# Bad except clauses order (HTTPError is an ancestor class of
# ClientLoginError)
# pylint: disable=E0701
try:
properties = rietveld_obj.get_issue_properties(options.issue, False)
except urllib2.HTTPError as e:
if e.getcode() != 302:
raise
if options.no_auth:
exit('FAIL: Login detected -- is issue private?')
# TODO(maruel): A few 'Invalid username or password.' are printed first,
# we should get rid of those.
except urllib2.URLError:
logging.exception('failed to fetch issue properties')
return RETURN_CODE_INFRA_FAILURE
except rietveld.upload.ClientLoginError as e:
# Fine, we'll do proper authentication.
pass
if properties is None:
rietveld_obj = rietveld.Rietveld(options.server, auth_config,
options.email)
try:
properties = rietveld_obj.get_issue_properties(options.issue, False)
except rietveld.upload.ClientLoginError as e:
print('Accessing the issue requires proper credentials.')
return RETURN_CODE_OTHER_FAILURE
except urllib2.URLError:
logging.exception('failed to fetch issue properties')
return RETURN_CODE_INFRA_FAILURE
if not options.patchset:
options.patchset = properties['patchsets'][-1]
print('No patchset specified. Using patchset %d' % options.patchset)
issues_patchsets_to_apply = [(options.issue, options.patchset)]
try:
depends_on_info = rietveld_obj.get_depends_on_patchset(
options.issue, options.patchset)
except urllib2.URLError:
logging.exception('failed to fetch depends_on_patchset')
return RETURN_CODE_INFRA_FAILURE
while depends_on_info:
depends_on_issue = int(depends_on_info['issue'])
depends_on_patchset = int(depends_on_info['patchset'])
try:
depends_on_info = rietveld_obj.get_depends_on_patchset(depends_on_issue,
depends_on_patchset)
issues_patchsets_to_apply.insert(0, (depends_on_issue,
depends_on_patchset))
except urllib2.HTTPError:
print ('The patchset that was marked as a dependency no longer '
'exists: %s/%d/#ps%d' % (
options.server, depends_on_issue, depends_on_patchset))
print 'Therefore it is likely that this patch will not apply cleanly.'
print
depends_on_info = None
except urllib2.URLError:
logging.exception('failed to fetch dependency issue')
return RETURN_CODE_INFRA_FAILURE
num_issues_patchsets_to_apply = len(issues_patchsets_to_apply)
if num_issues_patchsets_to_apply > 1:
print
print 'apply_issue.py found %d dependent CLs.' % (
num_issues_patchsets_to_apply - 1)
print 'They will be applied in the following order:'
num = 1
for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply:
print ' #%d %s/%d/#ps%d' % (
num, options.server, issue_to_apply, patchset_to_apply)
num += 1
print
for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply:
issue_url = '%s/%d/#ps%d' % (options.server, issue_to_apply,
patchset_to_apply)
print('Downloading patch from %s' % issue_url)
try:
patchset = rietveld_obj.get_patch(issue_to_apply, patchset_to_apply)
except urllib2.HTTPError:
print(
'Failed to fetch the patch for issue %d, patchset %d.\n'
'Try visiting %s/%d') % (
issue_to_apply, patchset_to_apply,
options.server, issue_to_apply)
# If we got this far, then this is likely missing patchset.
# Thus, it's not infra failure.
return RETURN_CODE_OTHER_FAILURE
except urllib2.URLError:
logging.exception(
'Failed to fetch the patch for issue %d, patchset %d',
issue_to_apply, patchset_to_apply)
return RETURN_CODE_INFRA_FAILURE
if options.whitelist:
patchset.patches = [patch for patch in patchset.patches
if patch.filename in options.whitelist]
if options.blacklist:
patchset.patches = [patch for patch in patchset.patches
if patch.filename not in options.blacklist]
for patch in patchset.patches:
print(patch)
full_dir = os.path.abspath(options.root_dir)
scm_type = scm.determine_scm(full_dir)
if scm_type == 'svn':
scm_obj = checkout.SvnCheckout(full_dir, None, None, None, None)
elif scm_type == 'git':
scm_obj = checkout.GitCheckout(full_dir, None, None, None, None)
elif scm_type == None:
scm_obj = checkout.RawCheckout(full_dir, None, None)
else:
parser.error('Couldn\'t determine the scm')
# TODO(maruel): HACK, remove me.
# When run a build slave, make sure buildbot knows that the checkout was
# modified.
if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot':
# See sourcedirIsPatched() in:
# http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/
# chromium_commands.py?view=markup
open('.buildbot-patched', 'w').close()
print('\nApplying the patch from %s' % issue_url)
try:
scm_obj.apply_patch(patchset, verbose=True)
except checkout.PatchApplicationFailed as e:
print(str(e))
print('CWD=%s' % os.getcwd())
print('Checkout path=%s' % scm_obj.project_path)
return RETURN_CODE_OTHER_FAILURE
if ('DEPS' in map(os.path.basename, patchset.filenames)
and not options.ignore_deps):
gclient_root = gclient_utils.FindGclientRoot(full_dir)
if gclient_root and scm_type:
print(
'A DEPS file was updated inside a gclient checkout, running gclient '
'sync.')
gclient_path = os.path.join(BASE_DIR, 'gclient')
if sys.platform == 'win32':
gclient_path += '.bat'
with annotated_gclient.temp_filename(suffix='gclient') as f:
cmd = [
gclient_path, 'sync',
'--nohooks',
'--delete_unversioned_trees',
]
if scm_type == 'svn':
cmd.extend(['--revision', 'BASE'])
if options.revision_mapping:
cmd.extend(['--output-json', f])
retcode = subprocess.call(cmd, cwd=gclient_root)
if retcode == 0 and options.revision_mapping:
revisions = annotated_gclient.parse_got_revision(
f, options.revision_mapping)
annotated_gclient.emit_buildprops(revisions)
return retcode
return RETURN_CODE_OK
if __name__ == "__main__":
fix_encoding.fix_encoding()
try:
sys.exit(main())
except KeyboardInterrupt:
sys.stderr.write('interrupted\n')
sys.exit(RETURN_CODE_OTHER_FAILURE)
|
|
# NOTE - Still seems to be a leak here somewhere
# gateway count doesnt hit zero. Hence the print statements!
import sys; sys.coinit_flags=0 # Must be free-threaded!
import win32api, pythoncom, time
import pywintypes
import os
import winerror
import win32com
import win32com.client.connect
from win32com.test.util import CheckClean
from win32com.client import constants, DispatchBaseClass, CastTo, VARIANT
from win32com.test.util import RegisterPythonServer
from pywin32_testutil import str2memory
import datetime
import decimal
import win32timezone
importMsg = "**** PyCOMTest is not installed ***\n PyCOMTest is a Python test specific COM client and server.\n It is likely this server is not installed on this machine\n To install the server, you must get the win32com sources\n and build it using MS Visual C++"
error = Exception
# This test uses a Python implemented COM server - ensure correctly registered.
RegisterPythonServer(os.path.join(os.path.dirname(__file__), '..', "servers", "test_pycomtest.py"),
"Python.Test.PyCOMTest")
from win32com.client import gencache
try:
gencache.EnsureModule('{6BCDCB60-5605-11D0-AE5F-CADD4C000000}', 0, 1, 1)
except pythoncom.com_error:
print("The PyCOMTest module can not be located or generated.")
print(importMsg)
raise RuntimeError(importMsg)
# We had a bg where RegisterInterfaces would fail if gencache had
# already been run - exercise that here
from win32com import universal
universal.RegisterInterfaces('{6BCDCB60-5605-11D0-AE5F-CADD4C000000}', 0, 1, 1)
verbose = 0
# convert a normal int to a long int - used to avoid, eg, '1L' for py3k
# friendliness
def ensure_long(int_val):
if sys.version_info > (3,):
# py3k - no such thing as a 'long'
return int_val
# on py2x, we just use an expression that results in a long
return 0x100000000-0x100000000+int_val
def check_get_set(func, arg):
got = func(arg)
if got != arg:
raise error("%s failed - expected %r, got %r" % (func, arg, got))
def check_get_set_raises(exc, func, arg):
try:
got = func(arg)
except exc as e:
pass # what we expect!
else:
raise error("%s with arg %r didn't raise %s - returned %r" % (func, arg, exc, got))
def progress(*args):
if verbose:
for arg in args:
print(arg, end=' ')
print()
def TestApplyResult(fn, args, result):
try:
fnName = str(fn).split()[1]
except:
fnName = str(fn)
progress("Testing ", fnName)
pref = "function " + fnName
rc = fn(*args)
if rc != result:
raise error("%s failed - result not %r but %r" % (pref, result, rc))
def TestConstant(constName, pyConst):
try:
comConst = getattr(constants, constName)
except:
raise error("Constant %s missing" % (constName,))
if comConst != pyConst:
raise error("Constant value wrong for %s - got %s, wanted %s" % (constName, comConst, pyConst))
# Simple handler class. This demo only fires one event.
class RandomEventHandler:
def _Init(self):
self.fireds = {}
def OnFire(self, no):
try:
self.fireds[no] = self.fireds[no] + 1
except KeyError:
self.fireds[no] = 0
def OnFireWithNamedParams(self, no, a_bool, out1, out2):
# This test exists mainly to help with an old bug, where named
# params would come in reverse.
Missing = pythoncom.Missing
if no is not Missing:
# We know our impl called 'OnFire' with the same ID
assert no in self.fireds
assert no+1==out1, "expecting 'out1' param to be ID+1"
assert no+2==out2, "expecting 'out2' param to be ID+2"
# The middle must be a boolean.
assert a_bool is Missing or type(a_bool)==bool, "middle param not a bool"
return out1+2, out2+2
def _DumpFireds(self):
if not self.fireds:
print("ERROR: Nothing was received!")
for firedId, no in self.fireds.items():
progress("ID %d fired %d times" % (firedId, no))
# A simple handler class that derives from object (ie, a "new style class") -
# only relevant for Python 2.x (ie, the 2 classes should be identical in 3.x)
class NewStyleRandomEventHandler(object):
def _Init(self):
self.fireds = {}
def OnFire(self, no):
try:
self.fireds[no] = self.fireds[no] + 1
except KeyError:
self.fireds[no] = 0
def OnFireWithNamedParams(self, no, a_bool, out1, out2):
# This test exists mainly to help with an old bug, where named
# params would come in reverse.
Missing = pythoncom.Missing
if no is not Missing:
# We know our impl called 'OnFire' with the same ID
assert no in self.fireds
assert no+1==out1, "expecting 'out1' param to be ID+1"
assert no+2==out2, "expecting 'out2' param to be ID+2"
# The middle must be a boolean.
assert a_bool is Missing or type(a_bool)==bool, "middle param not a bool"
return out1+2, out2+2
def _DumpFireds(self):
if not self.fireds:
print("ERROR: Nothing was received!")
for firedId, no in self.fireds.items():
progress("ID %d fired %d times" % (firedId, no))
# Test everything which can be tested using both the "dynamic" and "generated"
# COM objects (or when there are very subtle differences)
def TestCommon(o, is_generated):
progress("Getting counter")
counter = o.GetSimpleCounter()
TestCounter(counter, is_generated)
progress("Checking default args")
rc = o.TestOptionals()
if rc[:-1] != ("def", 0, 1) or abs(rc[-1]-3.14)>.01:
print(rc)
raise error("Did not get the optional values correctly")
rc = o.TestOptionals("Hi", 2, 3, 1.1)
if rc[:-1] != ("Hi", 2, 3) or abs(rc[-1]-1.1)>.01:
print(rc)
raise error("Did not get the specified optional values correctly")
rc = o.TestOptionals2(0)
if rc != (0, "", 1):
print(rc)
raise error("Did not get the optional2 values correctly")
rc = o.TestOptionals2(1.1, "Hi", 2)
if rc[1:] != ("Hi", 2) or abs(rc[0]-1.1)>.01:
print(rc)
raise error("Did not get the specified optional2 values correctly")
progress("Checking getting/passing IUnknown")
check_get_set(o.GetSetUnknown, o)
progress("Checking getting/passing IDispatch")
if not isinstance(o.GetSetDispatch(o), o.__class__):
raise error("GetSetDispatch failed: %r" % (o.GetSetDispatch(o),))
progress("Checking getting/passing IDispatch of known type")
if o.GetSetInterface(o).__class__ != o.__class__:
raise error("GetSetDispatch failed")
progress("Checking misc args")
check_get_set(o.GetSetVariant, 4)
check_get_set(o.GetSetVariant, "foo")
check_get_set(o.GetSetVariant, o)
# signed/unsigned.
check_get_set(o.GetSetInt, 0)
check_get_set(o.GetSetInt, -1)
check_get_set(o.GetSetInt, 1)
check_get_set(o.GetSetUnsignedInt, 0)
check_get_set(o.GetSetUnsignedInt, 1)
check_get_set(o.GetSetUnsignedInt, 0x80000000)
if o.GetSetUnsignedInt(-1) != 0xFFFFFFFF:
# -1 is a special case - we accept a negative int (silently converting to
# unsigned) but when getting it back we convert it to a long.
raise error("unsigned -1 failed")
check_get_set(o.GetSetLong, 0)
check_get_set(o.GetSetLong, -1)
check_get_set(o.GetSetLong, 1)
check_get_set(o.GetSetUnsignedLong, 0)
check_get_set(o.GetSetUnsignedLong, 1)
check_get_set(o.GetSetUnsignedLong, 0x80000000)
# -1 is a special case - see above.
if o.GetSetUnsignedLong(-1) != 0xFFFFFFFF:
raise error("unsigned -1 failed")
# We want to explicitly test > 32 bits. py3k has no 'maxint' and
# 'maxsize+1' is no good on 64bit platforms as its 65 bits!
big = 2147483647 # sys.maxint on py2k
for l in big, big+1, 1 << 65:
check_get_set(o.GetSetVariant, l)
progress("Checking structs")
r = o.GetStruct()
assert r.int_value == 99 and str(r.str_value)=="Hello from C++"
assert o.DoubleString("foo") == "foofoo"
progress("Checking var args")
o.SetVarArgs("Hi", "There", "From", "Python", 1)
if o.GetLastVarArgs() != ("Hi", "There", "From", "Python", 1):
raise error("VarArgs failed -" + str(o.GetLastVarArgs()))
progress("Checking arrays")
l=[]
TestApplyResult(o.SetVariantSafeArray, (l,), len(l))
l=[1,2,3,4]
TestApplyResult(o.SetVariantSafeArray, (l,), len(l))
TestApplyResult(o.CheckVariantSafeArray, ((1,2,3,4,),), 1)
# and binary
TestApplyResult(o.SetBinSafeArray, (str2memory('foo\0bar'),), 7)
progress("Checking properties")
o.LongProp = 3
if o.LongProp != 3 or o.IntProp != 3:
raise error("Property value wrong - got %d/%d" % (o.LongProp,o.IntProp))
o.LongProp = o.IntProp = -3
if o.LongProp != -3 or o.IntProp != -3:
raise error("Property value wrong - got %d/%d" % (o.LongProp,o.IntProp))
# This number fits in an unsigned long. Attempting to set it to a normal
# long will involve overflow, which is to be expected. But we do
# expect it to work in a property explicitly a VT_UI4.
check = 3 *10 **9
o.ULongProp = check
if o.ULongProp != check:
raise error("Property value wrong - got %d (expected %d)" % (o.ULongProp, check))
TestApplyResult(o.Test, ("Unused", 99), 1) # A bool function
TestApplyResult(o.Test, ("Unused", -1), 1) # A bool function
TestApplyResult(o.Test, ("Unused", 1==1), 1) # A bool function
TestApplyResult(o.Test, ("Unused", 0), 0)
TestApplyResult(o.Test, ("Unused", 1==0), 0)
assert o.DoubleString("foo") == "foofoo"
TestConstant("ULongTest1", ensure_long(0xFFFFFFFF))
TestConstant("ULongTest2", ensure_long(0x7FFFFFFF))
TestConstant("LongTest1", ensure_long(-0x7FFFFFFF))
TestConstant("LongTest2", ensure_long(0x7FFFFFFF))
TestConstant("UCharTest", 255)
TestConstant("CharTest", -1)
# 'Hello Loraine', but the 'r' is the "Registered" sign (\xae)
TestConstant("StringTest", "Hello Lo\xaeaine")
progress("Checking dates and times")
if issubclass(pywintypes.TimeType, datetime.datetime):
# For now *all* times passed must be tz-aware.
now = win32timezone.now()
# but conversion to and from a VARIANT loses sub-second...
now = now.replace(microsecond=0)
later = now + datetime.timedelta(seconds=1)
TestApplyResult(o.EarliestDate, (now, later), now)
else:
# old PyTime object
now = pythoncom.MakeTime(time.gmtime(time.time()))
later = pythoncom.MakeTime(time.gmtime(time.time()+1))
TestApplyResult(o.EarliestDate, (now, later), now)
# But it can still *accept* tz-naive datetime objects...
now = datetime.datetime.now()
expect = pythoncom.MakeTime(now)
TestApplyResult(o.EarliestDate, (now, now), expect)
progress("Checking currency")
# currency.
pythoncom.__future_currency__ = 1
if o.CurrencyProp != 0:
raise error("Expecting 0, got %r" % (o.CurrencyProp,))
for val in ("1234.5678", "1234.56", "1234"):
o.CurrencyProp = decimal.Decimal(val)
if o.CurrencyProp != decimal.Decimal(val):
raise error("%s got %r" % (val, o.CurrencyProp))
v1 = decimal.Decimal("1234.5678")
TestApplyResult(o.DoubleCurrency, (v1,), v1*2)
v2 = decimal.Decimal("9012.3456")
TestApplyResult(o.AddCurrencies, (v1, v2), v1+v2)
TestTrickyTypesWithVariants(o, is_generated)
progress("Checking win32com.client.VARIANT")
TestPyVariant(o, is_generated)
def TestTrickyTypesWithVariants(o, is_generated):
# Test tricky stuff with type handling and generally only works with
# "generated" support but can be worked around using VARIANT.
if is_generated:
got = o.TestByRefVariant(2)
else:
v = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_VARIANT, 2)
o.TestByRefVariant(v)
got = v.value
if got != 4:
raise error("TestByRefVariant failed")
if is_generated:
got = o.TestByRefString("Foo")
else:
v = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_BSTR, "Foo")
o.TestByRefString(v)
got = v.value
if got != "FooFoo":
raise error("TestByRefString failed")
# check we can pass ints as a VT_UI1
vals=[1,2,3,4]
if is_generated:
arg = vals
else:
arg = VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_UI1, vals)
TestApplyResult(o.SetBinSafeArray, (arg,), len(vals))
# safearrays of doubles and floats
vals = [0, 1.1, 2.2, 3.3]
if is_generated:
arg = vals
else:
arg = VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_R8, vals)
TestApplyResult(o.SetDoubleSafeArray, (arg,), len(vals))
if is_generated:
arg = vals
else:
arg = VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_R4, vals)
TestApplyResult(o.SetFloatSafeArray, (arg,), len(vals))
vals=[1.1, 2.2, 3.3, 4.4]
expected = (1.1*2, 2.2*2, 3.3*2, 4.4*2)
if is_generated:
TestApplyResult(o.ChangeDoubleSafeArray, (vals,), expected)
else:
arg = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_ARRAY | pythoncom.VT_R8, vals)
o.ChangeDoubleSafeArray(arg)
if arg.value != expected:
raise error("ChangeDoubleSafeArray got the wrong value")
if is_generated:
got = o.DoubleInOutString("foo")
else:
v = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_BSTR, "foo")
o.DoubleInOutString(v)
got = v.value
assert got == "foofoo", got
val = decimal.Decimal("1234.5678")
if is_generated:
got = o.DoubleCurrencyByVal(val)
else:
v = VARIANT(pythoncom.VT_BYREF | pythoncom.VT_CY, val)
o.DoubleCurrencyByVal(v)
got = v.value
assert got == val * 2
def TestDynamic():
progress("Testing Dynamic")
import win32com.client.dynamic
o = win32com.client.dynamic.DumbDispatch("PyCOMTest.PyCOMTest")
TestCommon(o, False)
counter = win32com.client.dynamic.DumbDispatch("PyCOMTest.SimpleCounter")
TestCounter(counter, False)
# Dynamic doesn't know this should be an int, so we get a COM
# TypeMismatch error.
try:
check_get_set_raises(ValueError, o.GetSetInt, "foo")
raise error("no exception raised")
except pythoncom.com_error as exc:
if exc.hresult != winerror.DISP_E_TYPEMISMATCH:
raise
# damn - props with params don't work for dynamic objects :(
# o.SetParamProp(0, 1)
# if o.ParamProp(0) != 1:
# raise RuntimeError, o.paramProp(0)
def TestGenerated():
# Create an instance of the server.
from win32com.client.gencache import EnsureDispatch
o = EnsureDispatch("PyCOMTest.PyCOMTest")
TestCommon(o, True)
counter = EnsureDispatch("PyCOMTest.SimpleCounter")
TestCounter(counter, True)
# XXX - this is failing in dynamic tests, but should work fine.
i1, i2 = o.GetMultipleInterfaces()
if not isinstance(i1, DispatchBaseClass) or not isinstance(i2, DispatchBaseClass):
# Yay - is now an instance returned!
raise error("GetMultipleInterfaces did not return instances - got '%s', '%s'" % (i1, i2))
del i1
del i2
# Generated knows to only pass a 32bit int, so should fail.
check_get_set_raises(OverflowError, o.GetSetInt, 0x80000000)
check_get_set_raises(OverflowError, o.GetSetLong, 0x80000000)
# Generated knows this should be an int, so raises ValueError
check_get_set_raises(ValueError, o.GetSetInt, "foo")
check_get_set_raises(ValueError, o.GetSetLong, "foo")
# Pass some non-sequence objects to our array decoder, and watch it fail.
try:
o.SetVariantSafeArray("foo")
raise error("Expected a type error")
except TypeError:
pass
try:
o.SetVariantSafeArray(666)
raise error("Expected a type error")
except TypeError:
pass
o.GetSimpleSafeArray(None)
TestApplyResult(o.GetSimpleSafeArray, (None,), tuple(range(10)))
resultCheck = tuple(range(5)), tuple(range(10)), tuple(range(20))
TestApplyResult(o.GetSafeArrays, (None, None, None), resultCheck)
l=[]
TestApplyResult(o.SetIntSafeArray, (l,), len(l))
l=[1,2,3,4]
TestApplyResult(o.SetIntSafeArray, (l,), len(l))
ll=[1,2,3,0x100000000]
TestApplyResult(o.SetLongLongSafeArray, (ll,), len(ll))
TestApplyResult(o.SetULongLongSafeArray, (ll,), len(ll))
# Tell the server to do what it does!
TestApplyResult(o.Test2, (constants.Attr2,), constants.Attr2)
TestApplyResult(o.Test3, (constants.Attr2,), constants.Attr2)
TestApplyResult(o.Test4, (constants.Attr2,), constants.Attr2)
TestApplyResult(o.Test5, (constants.Attr2,), constants.Attr2)
TestApplyResult(o.Test6, (constants.WideAttr1,), constants.WideAttr1)
TestApplyResult(o.Test6, (constants.WideAttr2,), constants.WideAttr2)
TestApplyResult(o.Test6, (constants.WideAttr3,), constants.WideAttr3)
TestApplyResult(o.Test6, (constants.WideAttr4,), constants.WideAttr4)
TestApplyResult(o.Test6, (constants.WideAttr5,), constants.WideAttr5)
o.SetParamProp(0, 1)
if o.ParamProp(0) != 1:
raise RuntimeError(o.paramProp(0))
# Make sure CastTo works - even though it is only casting it to itself!
o2 = CastTo(o, "IPyCOMTest")
if o != o2:
raise error("CastTo should have returned the same object")
# Do the connection point thing...
# Create a connection object.
progress("Testing connection points")
o2 = win32com.client.DispatchWithEvents(o, RandomEventHandler)
TestEvents(o2, o2)
o2 = win32com.client.DispatchWithEvents(o, NewStyleRandomEventHandler)
TestEvents(o2, o2)
# and a plain "WithEvents".
handler = win32com.client.WithEvents(o, RandomEventHandler)
TestEvents(o, handler)
handler = win32com.client.WithEvents(o, NewStyleRandomEventHandler)
TestEvents(o, handler)
progress("Finished generated .py test.")
def TestEvents(o, handler):
sessions = []
handler._Init()
try:
for i in range(3):
session = o.Start()
sessions.append(session)
time.sleep(.5)
finally:
# Stop the servers
for session in sessions:
o.Stop(session)
handler._DumpFireds()
handler.close()
def _TestPyVariant(o, is_generated, val, checker = None):
if is_generated:
vt, got = o.GetVariantAndType(val)
else:
# Gotta supply all 3 args with the last 2 being explicit variants to
# get the byref behaviour.
var_vt = VARIANT(pythoncom.VT_UI2 | pythoncom.VT_BYREF, 0)
var_result = VARIANT(pythoncom.VT_VARIANT | pythoncom.VT_BYREF, 0)
o.GetVariantAndType(val, var_vt, var_result)
vt = var_vt.value
got = var_result.value
if checker is not None:
checker(got)
return
# default checking.
assert vt == val.varianttype, (vt, val.varianttype)
# Handle our safe-array test - if the passed value is a list of variants,
# compare against the actual values.
if type(val.value) in (tuple, list):
check = [v.value if isinstance(v, VARIANT) else v for v in val.value]
# pythoncom always returns arrays as tuples.
got = list(got)
else:
check = val.value
assert type(check) == type(got), (type(check), type(got))
assert check == got, (check, got)
def _TestPyVariantFails(o, is_generated, val, exc):
try:
_TestPyVariant(o, is_generated, val)
raise error("Setting %r didn't raise %s" % (val, exc))
except exc:
pass
def TestPyVariant(o, is_generated):
_TestPyVariant(o, is_generated, VARIANT(pythoncom.VT_UI1, 1))
_TestPyVariant(o, is_generated, VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_UI4, [1,2,3]))
_TestPyVariant(o, is_generated, VARIANT(pythoncom.VT_BSTR, "hello"))
_TestPyVariant(o, is_generated, VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_BSTR, ["hello", "there"]))
def check_dispatch(got):
assert isinstance(got._oleobj_, pythoncom.TypeIIDs[pythoncom.IID_IDispatch])
_TestPyVariant(o, is_generated, VARIANT(pythoncom.VT_DISPATCH, o), check_dispatch)
_TestPyVariant(o, is_generated, VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_DISPATCH, [o]))
# an array of variants each with a specific type.
v = VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_VARIANT,
[VARIANT(pythoncom.VT_UI4, 1),
VARIANT(pythoncom.VT_UI4, 2),
VARIANT(pythoncom.VT_UI4, 3)
]
)
_TestPyVariant(o, is_generated, v)
# and failures
_TestPyVariantFails(o, is_generated, VARIANT(pythoncom.VT_UI1, "foo"), ValueError)
def TestCounter(counter, bIsGenerated):
# Test random access into container
progress("Testing counter", repr(counter))
import random
for i in range(50):
num = int(random.random() * len(counter))
try:
# XXX - this appears broken by commit 08a14d4deb374eaa06378509cf44078ad467b9dc -
# We shouldn't need to do generated differently than dynamic.
if bIsGenerated:
ret = counter.Item(num+1)
else:
ret = counter[num]
if ret != num+1:
raise error("Random access into element %d failed - return was %s" % (num,repr(ret)))
except IndexError:
raise error("** IndexError accessing collection element %d" % num)
num = 0
if bIsGenerated:
counter.SetTestProperty(1)
counter.TestProperty = 1 # Note this has a second, default arg.
counter.SetTestProperty(1,2)
if counter.TestPropertyWithDef != 0:
raise error("Unexpected property set value!")
if counter.TestPropertyNoDef(1) != 1:
raise error("Unexpected property set value!")
else:
pass
# counter.TestProperty = 1
counter.LBound=1
counter.UBound=10
if counter.LBound != 1 or counter.UBound!=10:
print("** Error - counter did not keep its properties")
if bIsGenerated:
bounds = counter.GetBounds()
if bounds[0]!=1 or bounds[1]!=10:
raise error("** Error - counter did not give the same properties back")
counter.SetBounds(bounds[0], bounds[1])
for item in counter:
num = num + 1
if num != len(counter):
raise error("*** Length of counter and loop iterations dont match ***")
if num != 10:
raise error("*** Unexpected number of loop iterations ***")
counter = iter(counter)._iter_.Clone() # Test Clone() and enum directly
counter.Reset()
num = 0
for item in counter:
num = num + 1
if num != 10:
raise error("*** Unexpected number of loop iterations - got %d ***" % num)
progress("Finished testing counter")
def TestLocalVTable(ob):
# Python doesn't fully implement this interface.
if ob.DoubleString("foo") != "foofoo":
raise error("couldn't foofoo")
###############################
##
## Some vtable tests of the interface
##
def TestVTable(clsctx=pythoncom.CLSCTX_ALL):
# Any vtable interfaces marked as dual *should* be able to be
# correctly implemented as IDispatch.
ob = win32com.client.Dispatch("Python.Test.PyCOMTest")
TestLocalVTable(ob)
# Now test it via vtable - use some C++ code to help here as Python can't do it directly yet.
tester = win32com.client.Dispatch("PyCOMTest.PyCOMTest")
testee = pythoncom.CoCreateInstance("Python.Test.PyCOMTest", None, clsctx, pythoncom.IID_IUnknown)
# check we fail gracefully with None passed.
try:
tester.TestMyInterface(None)
except pythoncom.com_error as details:
pass
# and a real object.
tester.TestMyInterface(testee)
def TestVTable2():
# We once crashed creating our object with the native interface as
# the first IID specified. We must do it _after_ the tests, so that
# Python has already had the gateway registered from last run.
ob = win32com.client.Dispatch("Python.Test.PyCOMTest")
iid = pythoncom.InterfaceNames["IPyCOMTest"]
clsid = "Python.Test.PyCOMTest"
clsctx = pythoncom.CLSCTX_SERVER
try:
testee = pythoncom.CoCreateInstance(clsid, None, clsctx, iid)
except TypeError:
# Python can't actually _use_ this interface yet, so this is
# "expected". Any COM error is not.
pass
def TestVTableMI():
clsctx = pythoncom.CLSCTX_SERVER
ob = pythoncom.CoCreateInstance("Python.Test.PyCOMTestMI", None, clsctx, pythoncom.IID_IUnknown)
# This inherits from IStream.
ob.QueryInterface(pythoncom.IID_IStream)
# This implements IStorage, specifying the IID as a string
ob.QueryInterface(pythoncom.IID_IStorage)
# IDispatch should always work
ob.QueryInterface(pythoncom.IID_IDispatch)
iid = pythoncom.InterfaceNames["IPyCOMTest"]
try:
ob.QueryInterface(iid)
except TypeError:
# Python can't actually _use_ this interface yet, so this is
# "expected". Any COM error is not.
pass
def TestQueryInterface(long_lived_server = 0, iterations=5):
tester = win32com.client.Dispatch("PyCOMTest.PyCOMTest")
if long_lived_server:
# Create a local server
t0 = win32com.client.Dispatch("Python.Test.PyCOMTest", clsctx=pythoncom.CLSCTX_LOCAL_SERVER)
# Request custom interfaces a number of times
prompt = [
"Testing QueryInterface without long-lived local-server #%d of %d...",
"Testing QueryInterface with long-lived local-server #%d of %d..."
]
for i in range(iterations):
progress(prompt[long_lived_server!=0] % (i+1, iterations))
tester.TestQueryInterface()
class Tester(win32com.test.util.TestCase):
def testVTableInProc(self):
# We used to crash running this the second time - do it a few times
for i in range(3):
progress("Testing VTables in-process #%d..." % (i+1))
TestVTable(pythoncom.CLSCTX_INPROC_SERVER)
def testVTableLocalServer(self):
for i in range(3):
progress("Testing VTables out-of-process #%d..." % (i+1))
TestVTable(pythoncom.CLSCTX_LOCAL_SERVER)
def testVTable2(self):
for i in range(3):
TestVTable2()
def testVTableMI(self):
for i in range(3):
TestVTableMI()
def testMultiQueryInterface(self):
TestQueryInterface(0,6)
# When we use the custom interface in the presence of a long-lived
# local server, i.e. a local server that is already running when
# we request an instance of our COM object, and remains afterwards,
# then after repeated requests to create an instance of our object
# the custom interface disappears -- i.e. QueryInterface fails with
# E_NOINTERFACE. Set the upper range of the following test to 2 to
# pass this test, i.e. TestQueryInterface(1,2)
TestQueryInterface(1,6)
def testDynamic(self):
TestDynamic()
def testGenerated(self):
TestGenerated()
if __name__=='__main__':
# XXX - todo - Complete hack to crank threading support.
# Should NOT be necessary
def NullThreadFunc():
pass
import _thread
_thread.start_new( NullThreadFunc, () )
if "-v" in sys.argv: verbose = 1
win32com.test.util.testmain()
|
|
# Copyright 2013 Mario Graff Guerrero
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from SimpleGP.simplegp import GP
from SimpleGP.pde import PDE
from SimpleGP.tree import PDEXO
from SimpleGP.Rprop import RPROP2
class GPPDE(GP):
def __init__(self, max_mem=500.0,
update_best_w_rprop=False,
**kwargs):
super(GPPDE, self).__init__(**kwargs)
self._max_mem = max_mem
self._update_best_w_rprop = update_best_w_rprop
self._p_st = np.empty(self._popsize, dtype=np.object)
self._used_mem = 0
def new_best(self, k):
flag = super(GPPDE, self).new_best(k)
if not self._update_best_w_rprop or not flag:
return flag
cons = self._p_constants[k].copy()
fit = self._fitness[k]
self.rprop(k)
flag = super(GPPDE, self).new_best(k)
if flag:
return flag
self._fitness[k] = fit
self._p_constants[k] = cons
return True
def stats(self):
flag = super(GPPDE, self).stats()
self.free_mem()
return flag
def update_mem(self, d, sign=1):
if d is not None:
d = d.nbytes / 1024. / 1024.
self._used_mem += (d * sign)
def max_mem_per_individual(self, xs=None):
if xs is None:
xs = self._x.shape[0]
p_st = np.empty((self._max_length, xs),
dtype=self._dtype, order='C').nbytes
p_der_st = np.ones((self._max_length,
xs),
dtype=self._dtype,
order='C').nbytes
return (p_der_st / 1024. / 1024.,
p_st / 1024. / 1024.)
def train(self, x, f):
super(GPPDE, self).train(x, f)
self.free_mem()
self._p_der = np.empty((self._max_length, self._x.shape[0]),
dtype=self._dtype)
self._pde = PDE(self._tree, self._p_der)
if self._x.shape[1] < 10:
self._tree.set_number_var_pm(self._x.shape[1])
return self
def load_prev_run(self):
r = super(GPPDE, self).load_prev_run()
if r:
self._fitness.fill(-np.inf)
gens_ind = self.gens_ind
for i in range(self._popsize):
self.fitness(i)
self.gens_ind = gens_ind
return r
def mem(self):
"""
Memory used
"""
return self._used_mem
def free_mem(self):
"""
This method free the memory when the memory used is more than
self._max_mem
"""
if self.mem() < self._max_mem:
return None
for i in range(self._popsize):
self.update_mem(self._p_st[i], -1)
self._p_st[i] = None
if hasattr(self, '_fitness'):
self._fitness[i] = -np.inf
def point_mutation(self, father1):
if self._ppm2 == 0:
self._npmutation = 1
return self.one_point_mutation(father1)
ind = father1.copy()
index = np.zeros_like(ind)
self.set_error_p_der()
c = self._pde.compute_pdepm(ind,
self._p_st[self._xo_father1],
index, self._ppm2,
self._pm_only_functions)
# print c, index
self._npmutation = c
if c == 0:
self._npmutation = 1
return self.one_point_mutation(father1)
constants = np.concatenate((self._p_constants[self._xo_father1],
np.empty(c, dtype=self._dtype)))
ncons = self._p_constants[self._xo_father1].shape[0]
st = self._p_st[self._xo_father1]
for i in index[:c]:
e = np.sign(self._p_der[i])
if ind[i] < self.nfunc:
func = self._tree.pmutation_func_change(ind, i, st,
e, self._eval)
ind[i] = func
else:
ncons += self._tree.pmutation_terminal_change(ind,
i, st,
e,
self._x,
constants,
ncons,
self._eval)
# print ind, "*", index[:c]
ind = self.simplify(ind, constants)
# print ind, "-", index[:c]
if ind.shape[0] > self._max_length or ind.shape[0] < self._min_length:
return self.create_random_ind()
return ind
def one_point_mutation(self, father1):
sel_type = self._tree.get_type_xpoint_selection()
self._tree.set_type_xpoint_selection(1)
p1 = self._tree.father1_crossing_point(father1)
if self._pm_only_functions:
while father1[p1] >= self.nfunc:
p1 = self._tree.father1_crossing_point(father1)
self._tree.set_type_xpoint_selection(sel_type)
ind = father1.copy()
st = self._p_st[self._xo_father1]
e = self.get_error(p1)
if self.isfunc(ind[p1]):
func = self._tree.pmutation_func_change(father1,
p1, st, e, self._eval)
ind[p1] = func
constants = self._p_constants[self._xo_father1].copy()
else:
constants = np.concatenate((self._p_constants[self._xo_father1],
np.empty(1, dtype=self._dtype)))
ncons = self._p_constants[self._xo_father1].shape[0]
ncons += self._tree.pmutation_terminal_change(ind,
p1, st,
e,
self._x,
constants,
ncons,
self._eval)
# print self._func[father1[p1]], self._func[func]
ind = self.simplify(ind,
constants)
if ind.shape[0] > self._max_length or ind.shape[0] < self._min_length:
return self.create_random_ind()
return ind
def mutation(self, father1):
if father1.shape[0] > 1 and np.random.rand() < self._ppm:
return self.point_mutation(father1)
kill = self.tournament(neg=True)
while kill == self._xo_father1 or kill == self._best:
kill = self.tournament(neg=True)
d = np.random.randint(self._min_depth,
self._mutation_depth)
son = self.create_random_ind(depth=d,
first_call=True)
self.kill_ind(kill, son)
self._xo_father2 = kill
self.fitness(kill)
ind = self.crossover(father1, son)
return ind
def tree_params(self, type_xpoint_selection=0):
self._type_xpoint_selection = type_xpoint_selection
self._tree_length = np.empty(self._max_length,
dtype=self._ind_dtype)
self._tree_mask = np.empty(self._max_length,
dtype=self._ind_dtype)
self._tree = PDEXO(self._nop,
self._tree_length,
self._tree_mask,
self._min_length,
self._max_length,
type_xpoint_selection=type_xpoint_selection)
def set_error_p_der(self):
self._computing_fitness = self._xo_father1
e, g = self.compute_error_pr(None)
self._p_der[self._output] = e.T
def get_error(self, p1):
self.set_error_p_der()
self._pde.compute(self._p[self._xo_father1], p1,
self._p_st[self._xo_father1])
e = np.sign(self._p_der[p1])
return e
def crossover(self, father1, father2, p1=-1, p2=-1,
force_xo=False):
if p1 == -1:
p1 = self._tree.father1_crossing_point(father1)
if p2 == -1:
e = self.get_error(p1)
s = self._p_st[self._xo_father2]
p = self._p_st[self._xo_father1][p1]
self._tree.father2_xp_extras(e, p, s)
p2 = self._tree.father2_crossing_point(father1, father2, p1)
return super(GPPDE, self).crossover(father1, father2,
p1, p2)
def get_st(self, ind):
if self._computing_fitness is None:
if self._st is None:
self._st = np.empty((ind.shape[0], self._x.shape[0]),
dtype=self._dtype, order='C')
elif self._st.shape[0] < ind.shape[0]:
self._st.resize((ind.shape[0], self._x.shape[0]))
return self._st
else:
k = self._computing_fitness
l = ind.shape[0]
if self._p_st[k] is None:
self._p_st[k] = np.empty((ind.shape[0], self._x.shape[0]),
dtype=self._dtype, order='C')
self.update_mem(self._p_st[k])
elif self._p_st[k].shape[0] < l:
self.update_mem(self._p_st[k], -1)
self._p_st[k].resize(l, self._x.shape[0])
self.update_mem(self._p_st[k])
return self._p_st[k]
def compute_error_pr(self, ind, pos=0, constants=None, epoch=0):
if epoch == 0:
g = self._p_st[self._computing_fitness][self._output].T
else:
if ind is None:
g = self.eval(self._computing_fitness)
else:
g = self.eval_ind(ind, pos=pos, constants=constants)
# e = - 2 * ( self._f - g)
e = 2 * (g - self._f)
return e, g
def rprop(self, k, epochs=10000):
"""Update the constants of the tree using RPROP"""
self._computing_fitness = k
ind = self._p[k]
constants = self._p_constants[k]
self._computing_fitness = k
if not self.any_constant(ind):
return None
best_cons = constants.copy()
fit_best = self._fitness[k]
epoch_best = 0
rprop = RPROP2(ind, constants,
self._p_der, self._tree)
e, g = self.compute_error_pr(None)
self._p_der[self._output] = e.T
for i in range(epochs):
if i > 0:
self.gens_ind += 1
self._pde.compute_constants(ind, self._p_st[k])
rprop.update_constants_rprop()
e, g = self.compute_error_pr(None, epoch=i)
fit = - self.distance(self._f, g)
if fit > fit_best and not np.isnan(fit):
fit_best = fit
best_cons = constants.copy()
epoch_best = i
if i < epochs - 1:
self._p_der[self._output] = e.T
if i - epoch_best >= self._max_n_worst_epochs:
break
constants[:] = best_cons[:]
self._fitness[k] = fit_best
e, g = self.compute_error_pr(None, epoch=i)
@classmethod
def init_cl(cls, training_size=None,
max_length=1024, max_mem=500, argmax_nargs=2,
func=["+", "-", "*", "/", 'abs', 'exp', 'sqrt', 'sin',
'cos', 'sigmoid', 'if', 'max', 'min', 'ln', 'sq',
'argmax'], seed=0, **kwargs):
ins = cls(max_mem=max_mem, max_length=max_length,
argmax_nargs=argmax_nargs, func=func, seed=seed,
**kwargs)
if training_size is None:
return ins
base, pr = ins.max_mem_per_individual(training_size)
if (pr * ins._popsize) + base > ins._max_mem:
mm = ins._max_mem - base
assert mm > 0
popsize = np.floor(mm / np.float(pr)).astype(np.int)
nind = ins.generations * ins.popsize
popsize = filter(lambda x: (nind % x) == 0,
range(2, popsize+1))[-1]
ins._generations = np.floor(nind / popsize).astype(np.int)
ins._popsize = popsize
return ins
@classmethod
def run_cl(cls, x, f, training_size=None, **kwargs):
"""
Returns a trained system that does not output nan or inf neither
in the training set (i.e., x) or test set (i.e., test).
"""
if training_size is None:
training_size = x.shape[0]
return super(GPPDE, cls).run_cl(x, f,
training_size=training_size, **kwargs)
|
|
from securityhandlerhelper import securityhandlerhelper
dateTimeFormat = '%Y-%m-%d %H:%M'
import arcrest
from arcrest.agol import FeatureLayer
from arcrest.agol import FeatureService
from arcrest.hostedservice import AdminFeatureService
import datetime, time
import json
import os
import common
import gc
import arcpy
#----------------------------------------------------------------------
def trace():
"""
trace finds the line, the filename
and error message and returns it
to the user
"""
import traceback, inspect, sys
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
filename = inspect.getfile(inspect.currentframe())
# script name + line number
line = tbinfo.split(", ")[1]
# Get Python syntax error
#
synerror = traceback.format_exc().splitlines()[-1]
return line, filename, synerror
class featureservicetools(securityhandlerhelper):
#----------------------------------------------------------------------
def EnableEditingOnService(self, url, definition = None):
adminFS = AdminFeatureService(url=url, securityHandler=self._securityHandler)
if definition is None:
definition = {}
definition['capabilities'] = "Create,Delete,Query,Update,Editing"
definition['allowGeometryUpdates'] = True
existingDef = {}
existingDef['capabilities'] = adminFS.capabilities
existingDef['allowGeometryUpdates'] = adminFS.allowGeometryUpdates
enableResults = adminFS.updateDefinition(json_dict=definition)
if 'error' in enableResults:
return enableResults['error']
adminFS = None
del adminFS
return existingDef
#----------------------------------------------------------------------
def disableSync(self, url, definition = None):
adminFS = AdminFeatureService(url=url, securityHandler=self._securityHandler)
cap = str(adminFS.capabilities)
existingDef = {}
enableResults = 'skipped'
if 'Sync' in cap:
capItems = cap.split(',')
if 'Sync' in capItems:
capItems.remove('Sync')
existingDef['capabilities'] = ','.join(capItems)
enableResults = adminFS.updateDefinition(json_dict=existingDef)
if 'error' in enableResults:
return enableResults['error']
adminFS = None
del adminFS
return enableResults
def GetFeatureService(self,itemId,returnURLOnly=False):
admin = None
item = None
try:
admin = arcrest.manageorg.Administration(securityHandler=self._securityHandler)
if self._securityHandler.valid == False:
self._valid = self._securityHandler.valid
self._message = self._securityHandler.message
return None
item = admin.content.item(itemId=itemId)
if item.itemType == "Feature Service":
if returnURLOnly:
return item.url
else:
return FeatureService(
url=item.url,
securityHandler=self._securityHandler)
return None
except:
line, filename, synerror = trace()
raise common.ArcRestHelperError({
"function": "GetFeatureService",
"line": line,
"filename": filename,
"synerror": synerror,
}
)
finally:
admin = None
item = None
del item
del admin
gc.collect()
#----------------------------------------------------------------------
def GetLayerFromFeatureServiceByURL(self,url,layerName="",returnURLOnly=False):
fs = None
try:
fs = FeatureService(
url=url,
securityHandler=self._securityHandler)
return self.GetLayerFromFeatureService(fs=fs,layerName=layerName,returnURLOnly=returnURLOnly)
except:
line, filename, synerror = trace()
raise common.ArcRestHelperError({
"function": "GetLayerFromFeatureServiceByURL",
"line": line,
"filename": filename,
"synerror": synerror,
}
)
finally:
fs = None
del fs
gc.collect()
#----------------------------------------------------------------------
def GetLayerFromFeatureService(self,fs,layerName="",returnURLOnly=False):
layers = None
table = None
layer = None
sublayer = None
try:
layers = fs.layers
for layer in layers:
if layer.name == layerName:
if returnURLOnly:
return fs.url + '/' + str(layer.id)
else:
return layer
elif not layer.subLayers is None:
for sublayer in layer.subLayers:
if sublayer == layerName:
return sublayer
for table in fs.tables:
if table.name == layerName:
if returnURLOnly:
return fs.url + '/' + str(layer.id)
else:
return table
return None
except:
line, filename, synerror = trace()
raise common.ArcRestHelperError({
"function": "GetLayerFromFeatureService",
"line": line,
"filename": filename,
"synerror": synerror,
}
)
finally:
layers = None
table = None
layer = None
sublayer = None
del layers
del table
del layer
del sublayer
gc.collect()
#----------------------------------------------------------------------
def AddFeaturesToFeatureLayer(self,url,pathToFeatureClass,chunksize=0):
fl = None
try:
fl = FeatureLayer(
url=url,
securityHandler=self._securityHandler)
if chunksize > 0:
messages = {'addResults':[]}
total = arcpy.GetCount_management(pathToFeatureClass)
arcpy.env.overwriteOutput = True
inDesc = arcpy.Describe(pathToFeatureClass)
oidName = arcpy.AddFieldDelimiters(pathToFeatureClass,inDesc.oidFieldName)
sql = '%s = (select min(%s) from %s)' % (oidName,oidName,os.path.basename(pathToFeatureClass))
cur = arcpy.da.SearchCursor(pathToFeatureClass,[inDesc.oidFieldName],sql)
minOID = cur.next()[0]
del cur, sql
sql = '%s = (select max(%s) from %s)' % (oidName,oidName,os.path.basename(pathToFeatureClass))
cur = arcpy.da.SearchCursor(pathToFeatureClass,[inDesc.oidFieldName],sql)
maxOID = cur.next()[0]
del cur, sql
breaks = range(minOID,maxOID)[0:-1:chunksize]
breaks.append(maxOID+1)
exprList = [oidName + ' >= ' + str(breaks[b]) + ' and ' + \
oidName + ' < ' + str(breaks[b+1]) for b in range(len(breaks)-1)]
for expr in exprList:
UploadLayer = arcpy.MakeFeatureLayer_management(pathToFeatureClass, 'TEMPCOPY', expr).getOutput(0)
result = fl.addFeatures(fc=UploadLayer)
if messages is None:
messages = result
else:
if 'addResults' in result:
if 'addResults' in messages:
messages['addResults'] = messages['addResults'] + result['addResults']
print "%s/%s features added" % (len(messages['addResults']),total)
else:
messages['addResults'] = result['addResults']
print "%s/%s features added" % (len(messages['addResults']),total)
else:
messages['errors'] = result
return messages
else:
return fl.addFeatures(fc=pathToFeatureClass)
except arcpy.ExecuteError:
line, filename, synerror = trace()
raise common.ArcRestHelperError({
"function": "create_report_layers_using_config",
"line": line,
"filename": filename,
"synerror": synerror,
"arcpyError": arcpy.GetMessages(2),
}
)
except:
line, filename, synerror = trace()
raise common.ArcRestHelperError({
"function": "AddFeaturesToFeatureLayer",
"line": line,
"filename": filename,
"synerror": synerror,
}
)
finally:
fl = None
del fl
gc.collect()
#----------------------------------------------------------------------
def DeleteFeaturesFromFeatureLayer(self,url,sql,chunksize=0):
fl = None
try:
fl = FeatureLayer(
url=url,
securityHandler=self._securityHandler)
totalDeleted = 0
if chunksize > 0:
qRes = fl.query(where=sql, returnIDsOnly=True)
if 'error' in qRes:
print qRes
return qRes
elif 'objectIds' in qRes:
oids = qRes['objectIds']
total = len(oids)
if total == 0:
return "No features matched the query"
minId = min(oids)
maxId = max(oids)
i = 0
print "%s features to be deleted" % total
while(i <= len(oids)):
oidsDelete = ','.join(str(e) for e in oids[i:i+chunksize])
if oidsDelete == '':
continue
else:
results = fl.deleteFeatures(objectIds=oidsDelete)
if 'deleteResults' in results:
totalDeleted += len(results['deleteResults'])
print "%s%% Completed: %s/%s " % (int(totalDeleted / float(total) *100), totalDeleted, total)
i += chunksize
else:
print results
return "%s deleted" % totalDeleted
qRes = fl.query(where=sql, returnIDsOnly=True)
if 'objectIds' in qRes:
oids = qRes['objectIds']
if len(oids)> 0 :
print "%s features to be deleted" % len(oids)
results = fl.deleteFeatures(where=sql)
if 'deleteResults' in results:
totalDeleted += len(results['deleteResults'])
return "%s deleted" % totalDeleted
else:
return results
return "%s deleted" % totalDeleted
else:
print qRes
else:
results = fl.deleteFeatures(where=sql)
if 'deleteResults' in results:
return totalDeleted + len(results['deleteResults'])
else:
return results
except:
line, filename, synerror = trace()
raise common.ArcRestHelperError({
"function": "DeleteFeaturesFromFeatureLayer",
"line": line,
"filename": filename,
"synerror": synerror,
}
)
finally:
fl = None
del fl
gc.collect()
|
|
# postgresql/on_conflict.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from ...sql.elements import ClauseElement, _literal_as_binds
from ...sql.dml import Insert as StandardInsert
from ...sql.expression import alias
from ...sql import schema
from ...util.langhelpers import public_factory
from ...sql.base import _generative
from ... import util
from . import ext
__all__ = ('Insert', 'insert')
class Insert(StandardInsert):
"""Postgresql-specific implementation of INSERT.
Adds methods for PG-specific syntaxes such as ON CONFLICT.
.. versionadded:: 1.1
"""
@util.memoized_property
def excluded(self):
"""Provide the ``excluded`` namespace for an ON CONFLICT statement
PG's ON CONFLICT clause allows reference to the row that would
be inserted, known as ``excluded``. This attribute provides
all columns in this row to be referenaceable.
.. seealso::
:ref:`postgresql_insert_on_conflict` - example of how
to use :attr:`.Insert.excluded`
"""
return alias(self.table, name='excluded').columns
@_generative
def on_conflict_do_update(
self,
constraint=None, index_elements=None,
index_where=None, set_=None, where=None):
"""
Specifies a DO UPDATE SET action for ON CONFLICT clause.
Either the ``constraint`` or ``index_elements`` argument is
required, but only one of these can be specified.
:param constraint:
The name of a unique or exclusion constraint on the table,
or the constraint object itself if it has a .name attribute.
:param index_elements:
A sequence consisting of string column names, :class:`.Column`
objects, or other column expression objects that will be used
to infer a target index.
:param index_where:
Additional WHERE criterion that can be used to infer a
conditional target index.
:param set_:
Required argument. A dictionary or other mapping object
with column names as keys and expressions or literals as values,
specifying the ``SET`` actions to take.
.. warning:: This dictionary does **not** take into account
Python-specified default UPDATE values or generation functions,
e.g. those specified using :paramref:`.Column.onupdate`.
These values will not be exercised for an ON CONFLICT style of
UPDATE, unless they are manually specified in the
:paramref:`.Insert.on_conflict_do_update.set_` dictionary.
:param where:
Optional argument. If present, can be a literal SQL
string or an acceptable expression for a ``WHERE`` clause
that restricts the rows affected by ``DO UPDATE SET``. Rows
not meeting the ``WHERE`` condition will not be updated
(effectively a ``DO NOTHING`` for those rows).
.. versionadded:: 1.1
.. seealso::
:ref:`postgresql_insert_on_conflict`
"""
self._post_values_clause = OnConflictDoUpdate(
constraint, index_elements, index_where, set_, where)
return self
@_generative
def on_conflict_do_nothing(
self,
constraint=None, index_elements=None, index_where=None):
"""
Specifies a DO NOTHING action for ON CONFLICT clause.
The ``constraint`` and ``index_elements`` arguments
are optional, but only one of these can be specified.
:param constraint:
The name of a unique or exclusion constraint on the table,
or the constraint object itself if it has a .name attribute.
:param index_elements:
A sequence consisting of string column names, :class:`.Column`
objects, or other column expression objects that will be used
to infer a target index.
:param index_where:
Additional WHERE criterion that can be used to infer a
conditional target index.
.. versionadded:: 1.1
.. seealso::
:ref:`postgresql_insert_on_conflict`
"""
self._post_values_clause = OnConflictDoNothing(
constraint, index_elements, index_where)
return self
insert = public_factory(Insert, '.dialects.postgresql.insert')
class OnConflictClause(ClauseElement):
def __init__(
self,
constraint=None,
index_elements=None,
index_where=None):
if constraint is not None:
if not isinstance(constraint, util.string_types) and \
isinstance(constraint, (
schema.Index, schema.Constraint,
ext.ExcludeConstraint)):
constraint = getattr(constraint, 'name') or constraint
if constraint is not None:
if index_elements is not None:
raise ValueError(
"'constraint' and 'index_elements' are mutually exclusive")
if isinstance(constraint, util.string_types):
self.constraint_target = constraint
self.inferred_target_elements = None
self.inferred_target_whereclause = None
elif isinstance(constraint, schema.Index):
index_elements = constraint.expressions
index_where = \
constraint.dialect_options['postgresql'].get("where")
elif isinstance(constraint, ext.ExcludeConstraint):
index_elements = constraint.columns
index_where = constraint.where
else:
index_elements = constraint.columns
index_where = \
constraint.dialect_options['postgresql'].get("where")
if index_elements is not None:
self.constraint_target = None
self.inferred_target_elements = index_elements
self.inferred_target_whereclause = index_where
elif constraint is None:
self.constraint_target = self.inferred_target_elements = \
self.inferred_target_whereclause = None
class OnConflictDoNothing(OnConflictClause):
__visit_name__ = 'on_conflict_do_nothing'
class OnConflictDoUpdate(OnConflictClause):
__visit_name__ = 'on_conflict_do_update'
def __init__(
self,
constraint=None,
index_elements=None,
index_where=None,
set_=None,
where=None):
super(OnConflictDoUpdate, self).__init__(
constraint=constraint,
index_elements=index_elements,
index_where=index_where)
if self.inferred_target_elements is None and \
self.constraint_target is None:
raise ValueError(
"Either constraint or index_elements, "
"but not both, must be specified unless DO NOTHING")
if (not isinstance(set_, dict) or not set_):
raise ValueError("set parameter must be a non-empty dictionary")
self.update_values_to_set = [
(key, _literal_as_binds(value))
for key, value in set_.items()
]
self.update_whereclause = where
|
|
"""XJPATH simplifies access to the python data structures using relatively
simple path syntax. It doesn't not only lookup value, it also can
validate found data type as well as create value if a target structure is a
dictionary.
The typical scenarios are:
- you need to lookup an element from nested dicts.
- you need to lookup and element from array that is a value of nested dictionary
- you need to get a list of X values from multiple nested dictionaries.
- you just want to operate with a complex data structure in the way you
operate with the dictionary.
- you want to make sure that found data has an expected type.
The expression syntax is trivial it looks like:
'key1.key2.key3'
Each key name is a nested data index/key. Key may refer to a dictionary,
an array index or iterator.
To refer a dictionary key, just use its name as in the example above.
An array index is prepended with '@' symbol:
@2 - Means seconds element.
@-2 - Means second element from the end.
@last - Means last element.
@first - Means first element of the array.
In case if dictionary key contains any reserved symbols, just escape them.
'2.\@2' - will lookup key 2 and then key '@2'.
You also can specify a type of expected value as a postfix for expected value:
'keyname[]', '@last[], '@first{}', 'data$', 'data#'
[] - Expected value is a list.
() - Expected value is a tuple.
{} - Expected value is a dictionary.
# - Expected value is an integer.
% - Expected value is a float.
$ - Expected value is a string.
Here is a bunch of examples:
>>> d = {'data': {
'a_array': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
'b_dict': {'a': 'xxx', 'b': 'yyy', 'c': 'zzz'},
'c_array': [{'v': 'vdata1'}, {'v': 'vdata2'}]}}
>>> xj = xjpath.XJPath(d)
To get 'a_array' array:
>>> xj['data.a_array']
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
>>> xj['data.a_array{}']
IndexError: ('Path error: data.a_array{}', 'Key a_array expects type "dict", but found value type is "list"')
To get a last element of 'a_array' array:
>>> xj['data.a_array.@last']
10
To get the first element of 'a_array' array:
>>> xj['data.a_array.@first']
0
To get 9th element from 'a_array':
>>> xj['data.a_array.@9']
9
To get third element from the back from 'a_array':
>>> xj['data.a_array.@-3']
8
To get all values that are stored in dictionaries with key 'v1' of array c_array:
>>> xj['data.c_array.*.v']
('vdata1', 'vdata2')
To return a frozen copy of a_array:
>>> xj['data.a_array.*']
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
To get all values of b_dict dictionary:
>>> xj['data.b_dict.*']
('zzz', 'yyy', 'xxx')
If you don't like a dictionary like interface. Feel free to use path_lookup
function instead that returns a found value as well as a boolean value telling
you if result is found or not.
Author: vburenin@gmail.com
"""
ESCAPE_STR1 = '111' * 5
ESCAPE_STR2 = '222' * 5
ESCAPE_SEQ = '\\' # '\' character used as an escape sequence in xjpath.
DOUBLE_ESCAPE_SEQ = ESCAPE_SEQ + ESCAPE_SEQ
class XJPathError(Exception):
pass
def split(inp_str, sep_char, maxsplit=-1, escape_char='\\'):
"""Separates a string on a character, taking into account escapes.
:param str inp_str: string to split.
:param str sep_char: separator character.
:param int maxsplit: maximum number of times to split from left.
:param str escape_char: escape character.
:rtype: __generator[str]
:return: sub-strings generator separated on the `sep_char`.
"""
word_chars = []
word_chars_append = word_chars.append
inp_str_iter = iter(inp_str)
for c in inp_str_iter:
word_chars_append(c)
if c == escape_char:
try:
next_char = next(inp_str_iter)
except StopIteration:
continue
if next_char == sep_char:
word_chars[-1] = next_char
else:
word_chars.append(next_char)
elif c == sep_char:
word_chars.pop()
yield ''.join(word_chars)
maxsplit -= 1
if maxsplit == 0:
yield ''.join(inp_str_iter)
return
del word_chars[:]
yield ''.join(word_chars)
def _full_sub_array(data_obj, xj_path, create_dict_path):
"""Retrieves all array or dictionary elements for '*' JSON path marker.
:param dict|list data_obj: The current data object.
:param str xj_path: A json path.
:param bool create_dict_path create a dict path.
:return: tuple with two values: first is a result and second
a boolean flag telling if this value exists or not.
"""
if isinstance(data_obj, list):
if xj_path:
res = []
for d in data_obj:
val, exists = path_lookup(d, xj_path, create_dict_path)
if exists:
res.append(val)
return tuple(res), True
else:
return tuple(data_obj), True
elif isinstance(data_obj, dict):
if xj_path:
res = []
for d in data_obj.values():
val, exists = path_lookup(d, xj_path, create_dict_path)
if exists:
res.append(val)
return tuple(res), True
else:
return tuple(data_obj.values()), True
else:
return None, False
def _get_array_index(array_path):
"""Translates @first @last @1 @-1 expressions into an actual array index.
:param str array_path: Array path in XJ notation.
:rtype: int
:return: Array index.
"""
if not array_path.startswith('@'):
raise XJPathError('Array index must start from @ symbol.')
array_path = array_path[1:]
if array_path == 'last':
return -1
if array_path == 'first':
return 0
if array_path.isdigit() or (array_path.startswith('-')
and array_path[1:].isdigit()):
return int(array_path)
else:
raise XJPathError('Unknown index reference', (array_path,))
def _single_array_element(data_obj, xj_path, array_path, create_dict_path):
"""Retrieves a single array for a '@' JSON path marker.
:param list data_obj: The current data object.
:param str xj_path: A json path.
:param str array_path: A lookup key.
:param bool create_dict_path create a dict path.
"""
val_type, array_path = _clean_key_type(array_path)
array_idx = _get_array_index(array_path)
if data_obj and isinstance(data_obj, (list, tuple)):
try:
value = data_obj[array_idx]
if val_type is not None and not isinstance(value, val_type):
raise XJPathError('Index array "%s" of "%s" type does not '
'match expected type "%s"' %
(array_idx, type(value).__name__,
val_type.__name__))
if xj_path:
return path_lookup(value, xj_path, create_dict_path)
else:
return value, True
except IndexError:
return None, False
else:
if val_type is not None:
raise XJPathError('Expected the list element type, but "%s" found' %
type(data_obj).__name__)
return None, False
def _split_path(xj_path):
"""Extract the last piece of XJPath.
:param str xj_path: A XJPath expression.
:rtype: tuple[str|None, str]
:return: A tuple where first element is a root XJPath and the second is
a last piece of key.
"""
res = xj_path.rsplit('.', 1)
root_key = res[0]
if len(res) > 1:
return root_key, res[1]
else:
if root_key and root_key != '.':
return None, root_key
else:
raise XJPathError('Path cannot be empty', (xj_path,))
def validate_path(xj_path):
"""Validates XJ path.
:param str xj_path: XJ Path
:raise: XJPathError if validation fails.
"""
if not isinstance(xj_path, str):
raise XJPathError('XJPath must be a string')
for path in split(xj_path, '.'):
if path == '*':
continue
if path.startswith('@'):
if path == '@first' or path == '@last':
continue
try:
int(path[1:])
except ValueError:
raise XJPathError('Array index must be either integer or '
'@first or @last')
_KEY_SPLIT = {
'$': str,
'#': int,
'%': float,
'{}': dict,
'[]': list,
'()': tuple,
}
def unescape(in_str, escape_char=ESCAPE_SEQ):
str_iter = iter(in_str)
chars = []
chars_append = chars.append
try:
for c in str_iter:
if c == escape_char:
chars_append(next(str_iter))
else:
chars_append(c)
except StopIteration:
pass
return ''.join(chars)
def _clean_key_type(key_name, escape_char=ESCAPE_SEQ):
"""Removes type specifier returning detected type and
a key name without type specifier.
:param str key_name: A key name containing type postfix.
:rtype: tuple[type|None, str]
:returns: Type definition and cleaned key name.
"""
for i in (2, 1):
if len(key_name) < i:
return None, key_name
type_v = key_name[-i:]
if type_v in _KEY_SPLIT:
if len(key_name) <= i:
return _KEY_SPLIT[type_v], ''
esc_cnt = 0
for pos in range(-i - 1, -len(key_name) - 1, -1):
if key_name[pos] == escape_char:
esc_cnt += 1
else:
break
if esc_cnt % 2 == 0:
return _KEY_SPLIT[type_v], key_name[:-i]
else:
return None, key_name
return None, key_name
def path_lookup(data_obj, xj_path, create_dict_path=False):
"""Looks up a xj path in the data_obj.
:param dict|list data_obj: An object to look into.
:param str xj_path: A path to extract data from.
:param bool create_dict_path: Create an element if type is specified.
:return: A tuple where 0 value is an extracted value and a second
field that tells if value either was found or not found.
"""
if not xj_path or xj_path == '.':
return data_obj, True
res = list(split(xj_path, '.', maxsplit=1))
top_key = res[0]
leftover = res[1] if len(res) > 1 else None
if top_key == '*':
return _full_sub_array(data_obj, leftover, create_dict_path)
elif top_key.startswith('@'):
return _single_array_element(data_obj, leftover, top_key,
create_dict_path)
else:
val_type, top_key = _clean_key_type(top_key)
top_key = unescape(top_key)
if top_key in data_obj:
value = data_obj[top_key]
if val_type is not None and not isinstance(value, val_type):
raise XJPathError(
'Key %s expects type "%s", but found value type is "%s"' %
(top_key, val_type.__name__, type(value).__name__))
if leftover:
return path_lookup(value, leftover, create_dict_path)
else:
return value, True
else:
if val_type is not None:
if not isinstance(data_obj, dict):
raise XJPathError('Accessed object must be a dict type '
'for the key: "%s"' % top_key)
if create_dict_path:
data_obj[top_key] = val_type()
else:
return None, False
if leftover:
return path_lookup(data_obj[top_key], leftover,
create_dict_path)
else:
return data_obj[top_key], True
return None, False
def strict_path_lookup(data_obj, xj_path, force_type=None):
"""Looks up a xj path in the data_obj.
:param dict|list data_obj: An object to look into.
:param str xj_path: A path to extract data from.
:param type force_type: A type that excepted to be.
:return: Returns result or throws an exception if value is not found.
"""
value, exists = path_lookup(data_obj, xj_path)
if exists:
if force_type is not None:
if not isinstance(value, force_type):
raise XJPathError('Found value is a wrong type',
(xj_path, force_type))
return value
else:
raise XJPathError('Path does not exist', (xj_path,))
class XJPath(object):
def __init__(self, data_structure):
self.data_structure = data_structure
def __getitem__(self, item):
try:
value, exists = path_lookup(self.data_structure, item)
except XJPathError as e:
raise IndexError('Path error: %s' % str(item), *e.args)
except TypeError as e:
raise IndexError('Path error: %s' % str(item), *e.args)
if exists:
return value
else:
raise IndexError('Path does not exist %s' % str(item))
def get(self, path, default=None):
try:
return self[path]
except IndexError:
return default
if __name__ == '__main__':
import argparse
import json
import sys
parser = argparse.ArgumentParser(
description='JSON data structure lookup. This utility performs a XJPath'
' lookup on a given data structure and writes the result as JSON.')
parser.add_argument('-i', '--input-file', default=None,
help='Path to JSON data structure. Default is STDIN.')
parser.add_argument('-o', '--output-file', default=None,
help='Where to write XJPath result. Default is STDOUT.')
parser.add_argument('-m', '--multiple-lines', action='store_true',
help='Expect multiple newline-deliminated JSON objects.')
parser.add_argument('path', type=str,
help='XJPath expression to apply to data structure.')
args = parser.parse_args()
input_file = sys.stdin if args.input_file is None else open(args.input_file)
output_file = (sys.stdout if args.output_file is None
else open(args.output_file, 'w'))
def dump_xjpath(obj):
xj = XJPath(obj)
output_file.write(json.dumps(xj[args.path]))
output_file.write('\n')
with input_file, output_file:
if args.multiple_lines:
for line in input_file:
line = line.strip()
if line:
dump_xjpath(json.loads(line))
else:
dump_xjpath(json.load(input_file))
|
|
# Copyright 2012 Michael Still and Canonical Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Image cache manager.
The cache manager implements the specification at
http://wiki.openstack.org/nova-image-cache-management.
"""
import hashlib
import os
import re
import time
from oslo.config import cfg
from oslo.serialization import jsonutils
from nova.i18n import _LE
from nova.i18n import _LI
from nova.i18n import _LW
from nova.openstack.common import fileutils
from nova.openstack.common import log as logging
from nova.openstack.common import processutils
from nova import utils
from nova.virt import imagecache
from nova.virt.libvirt import utils as libvirt_utils
LOG = logging.getLogger(__name__)
imagecache_opts = [
cfg.StrOpt('image_info_filename_pattern',
default='$instances_path/$image_cache_subdirectory_name/'
'%(image)s.info',
help='Allows image information files to be stored in '
'non-standard locations'),
cfg.BoolOpt('remove_unused_kernels',
default=False,
help='Should unused kernel images be removed? This is only '
'safe to enable if all compute nodes have been updated '
'to support this option. This will be enabled by default '
'in future.'),
cfg.IntOpt('remove_unused_resized_minimum_age_seconds',
default=3600,
help='Unused resized base images younger than this will not be '
'removed'),
cfg.BoolOpt('checksum_base_images',
default=False,
help='Write a checksum for files in _base to disk'),
cfg.IntOpt('checksum_interval_seconds',
default=3600,
help='How frequently to checksum base images'),
]
CONF = cfg.CONF
CONF.register_opts(imagecache_opts, 'libvirt')
CONF.import_opt('instances_path', 'nova.compute.manager')
CONF.import_opt('image_cache_subdirectory_name', 'nova.virt.imagecache')
def get_cache_fname(images, key):
"""Return a filename based on the SHA1 hash of a given image ID.
Image files stored in the _base directory that match this pattern
are considered for cleanup by the image cache manager. The cache
manager considers the file to be in use if it matches an instance's
image_ref, kernel_id or ramdisk_id property.
However, in grizzly-3 and before, only the image_ref property was
considered. This means that it's unsafe to store kernel and ramdisk
images using this pattern until we're sure that all compute nodes
are running a cache manager newer than grizzly-3. For now, we
require admins to confirm that by setting the remove_unused_kernels
boolean but, at some point in the future, we'll be safely able to
assume this.
"""
image_id = str(images[key])
if ((not CONF.libvirt.remove_unused_kernels and
key in ['kernel_id', 'ramdisk_id'])):
return image_id
else:
return hashlib.sha1(image_id).hexdigest()
def get_info_filename(base_path):
"""Construct a filename for storing additional information about a base
image.
Returns a filename.
"""
base_file = os.path.basename(base_path)
return (CONF.libvirt.image_info_filename_pattern
% {'image': base_file})
def is_valid_info_file(path):
"""Test if a given path matches the pattern for info files."""
digest_size = hashlib.sha1().digestsize * 2
regexp = (CONF.libvirt.image_info_filename_pattern
% {'image': ('([0-9a-f]{%(digest_size)d}|'
'[0-9a-f]{%(digest_size)d}_sm|'
'[0-9a-f]{%(digest_size)d}_[0-9]+)'
% {'digest_size': digest_size})})
m = re.match(regexp, path)
if m:
return True
return False
def _read_possible_json(serialized, info_file):
try:
d = jsonutils.loads(serialized)
except ValueError as e:
LOG.error(_LE('Error reading image info file %(filename)s: '
'%(error)s'),
{'filename': info_file,
'error': e})
d = {}
return d
def read_stored_info(target, field=None, timestamped=False):
"""Read information about an image.
Returns an empty dictionary if there is no info, just the field value if
a field is requested, or the entire dictionary otherwise.
"""
info_file = get_info_filename(target)
if not os.path.exists(info_file):
# NOTE(mikal): Special case to handle essex checksums being converted.
# There is an assumption here that target is a base image filename.
old_filename = target + '.sha1'
if field == 'sha1' and os.path.exists(old_filename):
hash_file = open(old_filename)
hash_value = hash_file.read()
hash_file.close()
write_stored_info(target, field=field, value=hash_value)
os.remove(old_filename)
d = {field: hash_value}
else:
d = {}
else:
lock_name = 'info-%s' % os.path.split(target)[-1]
lock_path = os.path.join(CONF.instances_path, 'locks')
@utils.synchronized(lock_name, external=True, lock_path=lock_path)
def read_file(info_file):
LOG.debug('Reading image info file: %s', info_file)
with open(info_file, 'r') as f:
return f.read().rstrip()
serialized = read_file(info_file)
d = _read_possible_json(serialized, info_file)
if field:
if timestamped:
return (d.get(field, None), d.get('%s-timestamp' % field, None))
else:
return d.get(field, None)
return d
def write_stored_info(target, field=None, value=None):
"""Write information about an image."""
if not field:
return
info_file = get_info_filename(target)
LOG.info(_LI('Writing stored info to %s'), info_file)
fileutils.ensure_tree(os.path.dirname(info_file))
lock_name = 'info-%s' % os.path.split(target)[-1]
lock_path = os.path.join(CONF.instances_path, 'locks')
@utils.synchronized(lock_name, external=True, lock_path=lock_path)
def write_file(info_file, field, value):
d = {}
if os.path.exists(info_file):
with open(info_file, 'r') as f:
d = _read_possible_json(f.read(), info_file)
d[field] = value
d['%s-timestamp' % field] = time.time()
with open(info_file, 'w') as f:
f.write(jsonutils.dumps(d))
write_file(info_file, field, value)
def _hash_file(filename):
"""Generate a hash for the contents of a file."""
checksum = hashlib.sha1()
with open(filename) as f:
for chunk in iter(lambda: f.read(32768), b''):
checksum.update(chunk)
return checksum.hexdigest()
def read_stored_checksum(target, timestamped=True):
"""Read the checksum.
Returns the checksum (as hex) or None.
"""
return read_stored_info(target, field='sha1', timestamped=timestamped)
def write_stored_checksum(target):
"""Write a checksum to disk for a file in _base."""
write_stored_info(target, field='sha1', value=_hash_file(target))
class ImageCacheManager(imagecache.ImageCacheManager):
def __init__(self):
super(ImageCacheManager, self).__init__()
self.lock_path = os.path.join(CONF.instances_path, 'locks')
self._reset_state()
def _reset_state(self):
"""Reset state variables used for each pass."""
self.used_images = {}
self.image_popularity = {}
self.instance_names = set()
self.active_base_files = []
self.corrupt_base_files = []
self.originals = []
self.removable_base_files = []
self.unexplained_images = []
def _store_image(self, base_dir, ent, original=False):
"""Store a base image for later examination."""
entpath = os.path.join(base_dir, ent)
if os.path.isfile(entpath):
self.unexplained_images.append(entpath)
if original:
self.originals.append(entpath)
def _list_base_images(self, base_dir):
"""Return a list of the images present in _base.
Determine what images we have on disk. There will be other files in
this directory so we only grab the ones which are the right length
to be disk images.
"""
digest_size = hashlib.sha1().digestsize * 2
for ent in os.listdir(base_dir):
if len(ent) == digest_size:
self._store_image(base_dir, ent, original=True)
elif (len(ent) > digest_size + 2 and
ent[digest_size] == '_' and
not is_valid_info_file(os.path.join(base_dir, ent))):
self._store_image(base_dir, ent, original=False)
return {'unexplained_images': self.unexplained_images,
'originals': self.originals}
def _list_backing_images(self):
"""List the backing images currently in use."""
inuse_images = []
for ent in os.listdir(CONF.instances_path):
if ent in self.instance_names:
LOG.debug('%s is a valid instance name', ent)
disk_path = os.path.join(CONF.instances_path, ent, 'disk')
if os.path.exists(disk_path):
LOG.debug('%s has a disk file', ent)
try:
backing_file = libvirt_utils.get_disk_backing_file(
disk_path)
except processutils.ProcessExecutionError:
# (for bug 1261442)
if not os.path.exists(disk_path):
LOG.debug('Failed to get disk backing file: %s',
disk_path)
continue
else:
raise
LOG.debug('Instance %(instance)s is backed by '
'%(backing)s',
{'instance': ent,
'backing': backing_file})
if backing_file:
backing_path = os.path.join(
CONF.instances_path,
CONF.image_cache_subdirectory_name,
backing_file)
if backing_path not in inuse_images:
inuse_images.append(backing_path)
if backing_path in self.unexplained_images:
LOG.warn(_LW('Instance %(instance)s is using a '
'backing file %(backing)s which '
'does not appear in the image '
'service'),
{'instance': ent,
'backing': backing_file})
self.unexplained_images.remove(backing_path)
return inuse_images
def _find_base_file(self, base_dir, fingerprint):
"""Find the base file matching this fingerprint.
Yields the name of the base file, a boolean which is True if the image
is "small", and a boolean which indicates if this is a resized image.
Note that it is possible for more than one yield to result from this
check.
If no base file is found, then nothing is yielded.
"""
# The original file from glance
base_file = os.path.join(base_dir, fingerprint)
if os.path.exists(base_file):
yield base_file, False, False
# An older naming style which can be removed sometime after Folsom
base_file = os.path.join(base_dir, fingerprint + '_sm')
if os.path.exists(base_file):
yield base_file, True, False
# Resized images
resize_re = re.compile('.*/%s_[0-9]+$' % fingerprint)
for img in self.unexplained_images:
m = resize_re.match(img)
if m:
yield img, False, True
def _verify_checksum(self, img_id, base_file, create_if_missing=True):
"""Compare the checksum stored on disk with the current file.
Note that if the checksum fails to verify this is logged, but no actual
action occurs. This is something sysadmins should monitor for and
handle manually when it occurs.
"""
if not CONF.libvirt.checksum_base_images:
return None
lock_name = 'hash-%s' % os.path.split(base_file)[-1]
# Protect against other nova-computes performing checksums at the same
# time if we are using shared storage
@utils.synchronized(lock_name, external=True, lock_path=self.lock_path)
def inner_verify_checksum():
(stored_checksum, stored_timestamp) = read_stored_checksum(
base_file, timestamped=True)
if stored_checksum:
# NOTE(mikal): Checksums are timestamped. If we have recently
# checksummed (possibly on another compute node if we are using
# shared storage), then we don't need to checksum again.
if (stored_timestamp and
time.time() - stored_timestamp <
CONF.libvirt.checksum_interval_seconds):
return True
# NOTE(mikal): If there is no timestamp, then the checksum was
# performed by a previous version of the code.
if not stored_timestamp:
write_stored_info(base_file, field='sha1',
value=stored_checksum)
current_checksum = _hash_file(base_file)
if current_checksum != stored_checksum:
LOG.error(_LE('image %(id)s at (%(base_file)s): image '
'verification failed'),
{'id': img_id,
'base_file': base_file})
return False
else:
return True
else:
LOG.info(_LI('image %(id)s at (%(base_file)s): image '
'verification skipped, no hash stored'),
{'id': img_id,
'base_file': base_file})
# NOTE(mikal): If the checksum file is missing, then we should
# create one. We don't create checksums when we download images
# from glance because that would delay VM startup.
if CONF.libvirt.checksum_base_images and create_if_missing:
LOG.info(_LI('%(id)s (%(base_file)s): generating '
'checksum'),
{'id': img_id,
'base_file': base_file})
write_stored_checksum(base_file)
return None
return inner_verify_checksum()
def _remove_base_file(self, base_file):
"""Remove a single base file if it is old enough.
Returns nothing.
"""
if not os.path.exists(base_file):
LOG.debug('Cannot remove %s, it does not exist',
base_file)
return
mtime = os.path.getmtime(base_file)
age = time.time() - mtime
maxage = CONF.libvirt.remove_unused_resized_minimum_age_seconds
if base_file in self.originals:
maxage = CONF.remove_unused_original_minimum_age_seconds
if age < maxage:
LOG.info(_LI('Base file too young to remove: %s'),
base_file)
else:
LOG.info(_LI('Removing base file: %s'), base_file)
try:
os.remove(base_file)
signature = get_info_filename(base_file)
if os.path.exists(signature):
os.remove(signature)
except OSError as e:
LOG.error(_LE('Failed to remove %(base_file)s, '
'error was %(error)s'),
{'base_file': base_file,
'error': e})
def _handle_base_image(self, img_id, base_file):
"""Handle the checks for a single base image."""
image_bad = False
image_in_use = False
LOG.info(_LI('image %(id)s at (%(base_file)s): checking'),
{'id': img_id,
'base_file': base_file})
if base_file in self.unexplained_images:
self.unexplained_images.remove(base_file)
if (base_file and os.path.exists(base_file)
and os.path.isfile(base_file)):
# _verify_checksum returns True if the checksum is ok, and None if
# there is no checksum file
checksum_result = self._verify_checksum(img_id, base_file)
if checksum_result is not None:
image_bad = not checksum_result
# Give other threads a chance to run
time.sleep(0)
instances = []
if img_id in self.used_images:
local, remote, instances = self.used_images[img_id]
if local > 0 or remote > 0:
image_in_use = True
LOG.info(_LI('image %(id)s at (%(base_file)s): '
'in use: on this node %(local)d local, '
'%(remote)d on other nodes sharing this instance '
'storage'),
{'id': img_id,
'base_file': base_file,
'local': local,
'remote': remote})
self.active_base_files.append(base_file)
if not base_file:
LOG.warn(_LW('image %(id)s at (%(base_file)s): warning '
'-- an absent base file is in use! '
'instances: %(instance_list)s'),
{'id': img_id,
'base_file': base_file,
'instance_list': ' '.join(instances)})
if image_bad:
self.corrupt_base_files.append(base_file)
if base_file:
if not image_in_use:
LOG.debug('image %(id)s at (%(base_file)s): image is not in '
'use',
{'id': img_id,
'base_file': base_file})
self.removable_base_files.append(base_file)
else:
LOG.debug('image %(id)s at (%(base_file)s): image is in '
'use',
{'id': img_id,
'base_file': base_file})
if os.path.exists(base_file):
libvirt_utils.chown(base_file, os.getuid())
os.utime(base_file, None)
def _age_and_verify_cached_images(self, context, all_instances, base_dir):
LOG.debug('Verify base images')
# Determine what images are on disk because they're in use
for img in self.used_images:
fingerprint = hashlib.sha1(img).hexdigest()
LOG.debug('Image id %(id)s yields fingerprint %(fingerprint)s',
{'id': img,
'fingerprint': fingerprint})
for result in self._find_base_file(base_dir, fingerprint):
base_file, image_small, image_resized = result
self._handle_base_image(img, base_file)
if not image_small and not image_resized:
self.originals.append(base_file)
# Elements remaining in unexplained_images might be in use
inuse_backing_images = self._list_backing_images()
for backing_path in inuse_backing_images:
if backing_path not in self.active_base_files:
self.active_base_files.append(backing_path)
# Anything left is an unknown base image
for img in self.unexplained_images:
LOG.warn(_LW('Unknown base file: %s'), img)
self.removable_base_files.append(img)
# Dump these lists
if self.active_base_files:
LOG.info(_LI('Active base files: %s'),
' '.join(self.active_base_files))
if self.corrupt_base_files:
LOG.info(_LI('Corrupt base files: %s'),
' '.join(self.corrupt_base_files))
if self.removable_base_files:
LOG.info(_LI('Removable base files: %s'),
' '.join(self.removable_base_files))
if self.remove_unused_base_images:
for base_file in self.removable_base_files:
self._remove_base_file(base_file)
# That's it
LOG.debug('Verification complete')
def _get_base(self):
# NOTE(mikal): The new scheme for base images is as follows -- an
# image is streamed from the image service to _base (filename is the
# sha1 hash of the image id). If CoW is enabled, that file is then
# resized to be the correct size for the instance (filename is the
# same as the original, but with an underscore and the resized size
# in bytes). This second file is then CoW'd to the instance disk. If
# CoW is disabled, the resize occurs as part of the copy from the
# cache to the instance directory. Files ending in _sm are no longer
# created, but may remain from previous versions.
base_dir = os.path.join(CONF.instances_path,
CONF.image_cache_subdirectory_name)
if not os.path.exists(base_dir):
LOG.debug('Skipping verification, no base directory at %s',
base_dir)
return
return base_dir
def update(self, context, all_instances):
base_dir = self._get_base()
if not base_dir:
return
# reset the local statistics
self._reset_state()
# read the cached images
self._list_base_images(base_dir)
# read running instances data
running = self._list_running_instances(context, all_instances)
self.used_images = running['used_images']
self.image_popularity = running['image_popularity']
self.instance_names = running['instance_names']
# perform the aging and image verification
self._age_and_verify_cached_images(context, all_instances, base_dir)
|
|
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
from PyQt4 import QtCore, QtGui
from itertools import izip
import os
import string
from vistrails.core import debug
from vistrails.core.configuration import get_vistrails_configuration
from vistrails.core.modules.basic_modules import identifier as basic_identifier
from vistrails.core.modules.module_registry import get_module_registry
from vistrails.core.modules.utils import create_port_spec_string
from vistrails.core.vistrail.port_spec import PortSpec
from vistrails.core.system import vistrails_root_directory
from vistrails.gui.modules.utils import get_widget_class
from vistrails.gui.common_widgets import QToolWindowInterface
from vistrails.gui.port_documentation import QPortDocumentation
from vistrails.gui.theme import CurrentTheme
def letterIcon(letter, crossed=False):
""" Creates icon with a specific letter
"""
pixmap = QtGui.QPixmap(48,48)
pixmap.fill(QtCore.Qt.transparent)
painter = QtGui.QPainter(pixmap)
painter.setPen(QtGui.QColor(0, 0, 0, 255))
font = painter.font()
font.setPointSize(40)
painter.setFont(font)
painter.drawText(0, 0, 48, 48, QtCore.Qt.AlignCenter, letter)
if crossed:
painter.drawLine(0,0,48,48)
painter.drawLine(0,48,48,0)
painter.end()
return QtGui.QIcon(pixmap)
class AliasLabel(QtGui.QLabel):
"""
AliasLabel is a QLabel that supports hover actions similar
to a hot link
"""
def __init__(self, alias='', text='', default_label='', parent=None):
""" AliasLabel(alias:str , text: str, default_label: str,
parent: QWidget) -> QHoverAliasLabel
Initialize the label with a text
"""
QtGui.QLabel.__init__(self, parent)
self.alias = alias
self.caption = text
# catch None
if default_label:
self.default_label = default_label
else:
self.default_label = ""
self.updateText()
self.setAttribute(QtCore.Qt.WA_Hover)
self.setCursor(QtCore.Qt.PointingHandCursor)
self.setToolTip(alias)
self.palette().setColor(QtGui.QPalette.WindowText,
CurrentTheme.HOVER_DEFAULT_COLOR)
def updateText(self):
""" updateText() -> None
Update the label text to contain the alias name when appropriate
"""
if self.alias != '':
self.setText(self.alias + ': ' + self.caption)
elif self.default_label != '':
self.setText(self.default_label + ': ' + self.caption)
else:
self.setText(self.caption)
def event(self, event):
""" event(event: QEvent) -> Event Result
Override to handle hover enter and leave events for hot links
"""
if event.type()==QtCore.QEvent.HoverEnter:
self.palette().setColor(QtGui.QPalette.WindowText,
CurrentTheme.HOVER_SELECT_COLOR)
if event.type()==QtCore.QEvent.HoverLeave:
self.palette().setColor(QtGui.QPalette.WindowText,
CurrentTheme.HOVER_DEFAULT_COLOR)
return QtGui.QLabel.event(self, event)
# return super(QHoverAliasLabel, self).event(event)
def mousePressEvent(self, event):
""" mousePressEvent(event: QMouseEvent) -> None
If mouse click on the label, show up a dialog to change/add
the alias name
"""
if event.button()==QtCore.Qt.LeftButton:
(text, ok) = QtGui.QInputDialog.getText(self,
'Set Parameter Alias',
'Enter the parameter alias',
QtGui.QLineEdit.Normal,
self.alias)
while ok and self.parent().check_alias(str(text)):
msg =" This alias is already being used.\
Please enter a different parameter alias "
(text, ok) = QtGui.QInputDialog.getText(self,
'Set Parameter Alias',
msg,
QtGui.QLineEdit.Normal,
text)
if ok and str(text)!=self.alias:
if not self.parent().check_alias(str(text)):
self.alias = str(text).strip()
self.updateText()
self.parent().updateMethod()
class Parameter(object):
def __init__(self, desc, psi=None):
self.type = desc.name
self.identifier = desc.identifier
self.namespace = None if not desc.namespace else desc.namespace
self.strValue = ''
self.alias = ''
self.queryMethod = None
self.port_spec_item = psi
self.param_exists = False
class Function(object):
def __init__(self, name, params, port_spec=None):
self.name = name
self.parameters = params
self.port_spec = port_spec
def get_spec(self, port_type):
return self.port_spec
class ParameterEntry(QtGui.QTreeWidgetItem):
plus_icon = QtGui.QIcon(os.path.join(vistrails_root_directory(),
'gui/resources/images/plus.png'))
minus_icon = QtGui.QIcon(os.path.join(vistrails_root_directory(),
'gui/resources/images/minus.png'))
def __init__(self, port_spec, function=None, types_visible=True, parent=None):
QtGui.QTreeWidgetItem.__init__(self, parent)
self.setFirstColumnSpanned(True)
self.port_spec = port_spec
self.function = function
self.types_visible = types_visible
def build_widget(self, widget_accessor, with_alias=True):
reg = get_module_registry()
# widget = QtGui.QDockWidget()
# widget.setFeatures(QtGui.QDockWidget.DockWidgetClosable |
# QtGui.QDockWidget.DockWidgetVerticalTitleBar)
widget = QtGui.QWidget()
h_layout = QtGui.QHBoxLayout()
h_layout.insertSpacing(0, 10)
h_layout.setMargin(2)
h_layout.setSpacing(2)
v_layout = QtGui.QVBoxLayout()
v_layout.setAlignment(QtCore.Qt.AlignVCenter)
delete_button = QtGui.QToolButton()
delete_button.setIconSize(QtCore.QSize(8,8))
delete_button.setIcon(ParameterEntry.minus_icon)
def delete_method():
if self.function is not None:
self.group_box.parent().parent().parent().delete_method(
self, self.port_spec.name, self.function.real_id)
else:
self.group_box.parent().parent().parent().delete_method(
self, self.port_spec.name, None)
QtCore.QObject.connect(delete_button, QtCore.SIGNAL("clicked()"),
delete_method)
v_layout.addWidget(delete_button)
add_button = QtGui.QToolButton()
add_button.setIcon(ParameterEntry.plus_icon)
add_button.setIconSize(QtCore.QSize(8,8))
def add_method():
self.group_box.parent().parent().parent().add_method(
self.port_spec.name)
QtCore.QObject.connect(add_button, QtCore.SIGNAL("clicked()"),
add_method)
v_layout.addWidget(add_button)
h_layout.addLayout(v_layout)
self.my_widgets = []
self.my_labels = []
self.group_box = QtGui.QGroupBox()
self.group_box.setContentsMargins(0, 0, 0, 0)
layout = QtGui.QGridLayout()
layout.setMargin(5)
layout.setSpacing(5)
layout.setColumnStretch(1,1)
self.group_box.setFocusPolicy(QtCore.Qt.ClickFocus)
self.group_box.setSizePolicy(QtGui.QSizePolicy.Preferred,
QtGui.QSizePolicy.Fixed)
self.group_box.palette().setColor(QtGui.QPalette.Window,
CurrentTheme.METHOD_SELECT_COLOR)
if self.function is not None:
params = self.function.parameters
else:
params = [None,] * len(self.port_spec.descriptors())
for i, (psi, param) in enumerate(izip(self.port_spec.port_spec_items,
params)):
if psi.entry_type is not None:
# !!only pull off the prefix!! options follow in camelcase
prefix_end = len(psi.entry_type.lstrip(string.lowercase))
if prefix_end == 0:
entry_type = psi.entry_type
else:
entry_type = psi.entry_type[:-prefix_end]
else:
entry_type = None
widget_class = widget_accessor(psi.descriptor, entry_type)
if param is not None:
obj = param
else:
obj = Parameter(psi.descriptor)
obj.port_spec_item = psi
if self.types_visible:
if with_alias:
label = AliasLabel(obj.alias, obj.type, psi.label)
self.my_labels.append(label)
else:
label = QtGui.QLabel(obj.type)
layout.addWidget(label, i, 0)
layout.setAlignment(label, QtCore.Qt.AlignLeft)
param_widget = widget_class(obj, self.group_box)
self.my_widgets.append(param_widget)
layout.addWidget(param_widget, i, 1)
layout.addItem(QtGui.QSpacerItem(0,0, QtGui.QSizePolicy.MinimumExpanding), i, 2)
self.group_box.setLayout(layout)
def updateMethod():
if self.function is not None:
real_id = self.function.real_id
else:
real_id = -1
self.group_box.parent().parent().parent().update_method(
self, self.port_spec.name, self.my_widgets, self.my_labels, real_id)
def check_alias(name):
controller = self.group_box.parent().parent().parent().controller
if controller:
return controller.check_alias(name)
return False
self.group_box.updateMethod = updateMethod
self.group_box.check_alias = check_alias
h_layout.addWidget(self.group_box)
widget.setLayout(h_layout)
return widget
def get_widget(self):
return self.build_widget(get_widget_class, True)
class PortItem(QtGui.QTreeWidgetItem):
edit_show = QtGui.QIcon(os.path.join(vistrails_root_directory(),
'gui/resources/images/pencil.png'))
edit_hide = QtGui.QIcon(os.path.join(vistrails_root_directory(),
'gui/resources/images/pencil-disabled.png'))
eye_open_icon = \
QtGui.QIcon(os.path.join(vistrails_root_directory(),
'gui/resources/images/eye.png'))
eye_closed_icon = \
QtGui.QIcon(os.path.join(vistrails_root_directory(),
'gui/resources/images/eye_closed.png'))
eye_disabled_icon = \
QtGui.QIcon(os.path.join(vistrails_root_directory(),
'gui/resources/images/eye_gray.png'))
conn_icon = \
QtGui.QIcon(os.path.join(vistrails_root_directory(),
'gui/resources/images/connection.png'))
def __init__(self, port_spec, is_connected, is_optional, is_visible,
is_editable=False, parent=None):
QtGui.QTreeWidgetItem.__init__(self, parent)
# self.setFlags(QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsEnabled)
self.setFlags(QtCore.Qt.ItemIsEnabled)
# self.setCheckState(0, QtCore.Qt.Unchecked)
self.port_spec = port_spec
self.is_connected = is_connected
self.is_optional = is_optional
self.is_visible = is_visible
self.is_editable = is_editable
self.is_unset = False
self.build_item(port_spec, is_connected, is_optional, is_visible,
is_editable)
def visible(self):
return not self.is_optional or self.is_visible
def set_visible(self, visible):
self.is_visible = visible
if visible:
self.setIcon(0, PortItem.eye_open_icon)
else:
self.setIcon(0, PortItem.eye_closed_icon)
def set_editable(self, edit):
self.is_editable = edit
if edit:
self.setIcon(0, PortItem.edit_show)
else:
self.setIcon(0, PortItem.edit_hide)
def get_visible(self):
return self.visible_checkbox
def get_connected(self):
return self.connected_checkbox
def is_constant(self):
return (self.port_spec.is_valid and
get_module_registry().is_constant(self.port_spec))
def calcUnset(self):
self.is_unset = self.is_constant() and \
self.port_spec.is_mandatory() and \
not self.is_connected and \
not self.isExpanded()
if self.is_unset:
font = self.font(3)
font.setWeight(QtGui.QFont.Bold)
self.setFont(3, font)
def build_item(self, port_spec, is_connected, is_optional, is_visible, is_editable):
if not is_optional:
self.setIcon(1, PortItem.eye_disabled_icon)
elif is_visible:
self.setIcon(1, PortItem.eye_open_icon)
else:
self.setIcon(1, PortItem.eye_closed_icon)
if is_connected:
self.setIcon(2, PortItem.conn_icon)
self.setText(3, port_spec.name)
if self.is_constant():
if len(self.port_spec.port_spec_items)>0:
if is_editable:
self.setIcon(0, PortItem.edit_show)
else:
self.setIcon(0, PortItem.edit_hide)
else:
# if port_spec is not a method, make it gray
self.setForeground(3,
QtGui.QBrush(QtGui.QColor.fromRgb(128,128,128)))
self.visible_checkbox = QtGui.QCheckBox()
self.connected_checkbox = QtGui.QCheckBox()
def contextMenuEvent(self, event, widget):
if self.port_spec is None:
return
act = QtGui.QAction("View Documentation", widget)
act.setStatusTip("View method documentation")
QtCore.QObject.connect(act,
QtCore.SIGNAL("triggered()"),
self.view_documentation)
menu = QtGui.QMenu(widget)
menu.addAction(act)
menu.exec_(event.globalPos())
def view_documentation(self):
# descriptor = self.treeWidget().module.module_descriptor
module = self.treeWidget().module
port_type = self.treeWidget().port_type
widget = QPortDocumentation(module,
port_type,
self.port_spec.name)
widget.setAttribute(QtCore.Qt.WA_DeleteOnClose)
widget.exec_()
def __lt__(self, other):
# put unset mandatory ports first
if self.is_unset != other.is_unset:
return self.is_unset and not other.is_unset
# put set (expanded) functions first
if self.isExpanded() != other.isExpanded():
return self.isExpanded() and not other.isExpanded()
# otherwise use port name
return self.port_spec.name < other.port_spec.name
class PortsList(QtGui.QTreeWidget):
def __init__(self, port_type, parent=None):
QtGui.QTreeWidget.__init__(self, parent)
self.port_type = port_type
self.setColumnCount(4)
self.setColumnWidth(0,24)
self.setColumnWidth(1,24)
self.setColumnWidth(2,24)
self.setRootIsDecorated(False)
self.setIndentation(0)
self.setHeaderHidden(True)
self.connect(self, QtCore.SIGNAL("itemClicked(QTreeWidgetItem*, int)"),
self.item_clicked)
self.module = None
self.port_spec_items = {}
self.entry_klass = ParameterEntry
self.ports_visible = True
self.types_visible = True
def setReadOnly(self, read_only):
self.setEnabled(not read_only)
def set_entry_klass(self, entry_klass):
if entry_klass != entry_klass:
self.entry_klass = entry_klass
self.update_module(self.module)
def update_module(self, module):
""" update_module(module: Module) -> None
Setup this tree widget to show functions of module
"""
self.setColumnHidden(0, True)
self.setColumnHidden(1, not self.ports_visible)
# this is strange but if you try to clear the widget when the focus is
# in one of the items (after setting a parameter for example),
# VisTrails crashes on a Mac (Emanuele) This is probably a Qt bug
w = QtGui.QApplication.focusWidget()
if self.isAncestorOf(w):
w.clearFocus()
self.clear()
self.module = module
self.port_spec_items = {}
self.function_map = {}
if module and module.is_valid:
reg = get_module_registry()
descriptor = module.module_descriptor
if self.port_type == 'input':
self.setColumnHidden(0,not get_vistrails_configuration(
).check('showInlineParameterWidgets'))
port_specs = module.destinationPorts()
connected_ports = module.connected_input_ports
visible_ports = module.visible_input_ports
elif self.port_type == 'output':
port_specs = module.sourcePorts()
connected_ports = module.connected_output_ports
visible_ports = module.visible_output_ports
else:
raise TypeError("Unknown port type: '%s'" % self.port_type)
for port_spec in sorted(port_specs, key=lambda x: x.name):
connected = port_spec.name in connected_ports and \
connected_ports[port_spec.name] > 0
item = PortItem(port_spec,
connected,
port_spec.optional,
port_spec.name in visible_ports,
port_spec.name in module.editable_input_ports)
self.addTopLevelItem(item)
self.port_spec_items[port_spec.name] = (port_spec, item)
if self.port_type == 'input':
for function in module.functions:
if not function.is_valid:
continue
port_spec, item = self.port_spec_items[function.name]
subitem = self.entry_klass(port_spec, function,
self.types_visible)
self.function_map[function.real_id] = subitem
item.addChild(subitem)
subitem.setFirstColumnSpanned(True)
self.setItemWidget(subitem, 2, subitem.get_widget())
item.setExpanded(True)
# self.setItemWidget(item, 0, item.get_visible())
# self.setItemWidget(item, 1, item.get_connected())
# i = QTreeWidgetItem(self)
# self.addTopLevelItem(i)
# i.setText(2, port_spec.name)
# visible_checkbox = QtGui.QCheckBox()
# self.setItemWidget(i, 0, visible_checkbox)
# connceted_checkbox = QtGui.QCheckBox()
# connected_checkbox.setEnabled(False)
# self.setItemWidget(i, 1, connected_checkbox)
# Highlight unset ports
for _, item in self.port_spec_items.itervalues():
item.calcUnset()
self.sortItems(0, QtCore.Qt.AscendingOrder)
# base_items = {}
# # Create the base widget item for each descriptor
# for descriptor in moduleHierarchy:
# baseName = descriptor.name
# base_package = descriptor.identifier
# baseItem = QMethodTreeWidgetItem(None,
# None,
# self,
# ([]
# << baseName
# << ''))
# base_items[descriptor] = baseItem
# method_specs = {}
# # do this in reverse to ensure proper overloading
# # !!! NOTE: we have to use ***all*** input ports !!!
# # because a subclass can overload a port with a
# # type that isn't a method
# for descriptor in reversed(moduleHierarchy):
# method_specs.update((name, (descriptor, spec))
# for name, spec in \
# registry.module_ports('input',
# descriptor))
# # add local registry last so that it takes precedence
# method_specs.update((spec.name, (descriptor, spec))
# for spec in module.port_spec_list
# if spec.type == 'input')
# for _, (desc, method_spec) in sorted(method_specs.iteritems()):
# if registry.is_method(method_spec):
# baseItem = base_items[desc]
# sig = method_spec.short_sigstring
# QMethodTreeWidgetItem(module,
# method_spec,
# baseItem,
# ([]
# << method_spec.name
# << sig))
# self.expandAll()
# self.resizeColumnToContents(2)
# show invalid module attributes
if module and not module.is_valid and self.port_type == 'input':
for function in module.functions:
if function.name in self.port_spec_items:
port_spec, item = self.port_spec_items[function.name]
else:
sigstring = create_port_spec_string(
[(basic_identifier, "String")
for i in xrange(len(function.parameters))])
port_spec = PortSpec(name=function.name, type='input',
sigstring=sigstring)
item = PortItem(port_spec, False, False, False)
self.addTopLevelItem(item)
self.port_spec_items[port_spec.name] = (port_spec, item)
subitem = self.entry_klass(port_spec, function)
self.function_map[function.real_id] = subitem
item.addChild(subitem)
subitem.setFirstColumnSpanned(True)
self.setItemWidget(subitem, 2, subitem.get_widget())
item.setExpanded(True)
def item_clicked(self, item, col):
if item.parent() is not None:
return
if self.port_type == 'input':
visible_ports = self.module.visible_input_ports
editable_ports = self.module.editable_input_ports
elif self.port_type == 'output':
visible_ports = self.module.visible_output_ports
else:
raise TypeError("Unknown port type: '%s'" % self.port_type)
if col == 0:
if item.is_constant() and len(item.port_spec.port_spec_items)>0:
item.set_editable(not item.is_editable)
if item.is_editable:
editable_ports.add(item.port_spec.name)
else:
editable_ports.discard(item.port_spec.name)
self.controller.flush_delayed_actions()
self.controller.add_annotation((self.module.INLINE_WIDGET_ANNOTATION,
','.join(editable_ports)),
self.module.id)
self.controller.current_pipeline_scene.recreate_module(
self.controller.current_pipeline, self.module.id)
if col == 1:
if item.is_optional and not item.is_connected:
item.set_visible(not item.is_visible)
if item.is_visible:
visible_ports.add(item.port_spec.name)
else:
visible_ports.discard(item.port_spec.name)
self.controller.flush_delayed_actions()
self.controller.current_pipeline_scene.recreate_module(
self.controller.current_pipeline, self.module.id)
if col == 3:
if item.isExpanded():
item.setExpanded(False)
elif item.childCount() > 0:
item.setExpanded(True)
elif item.childCount() == 0 and item.is_constant():
self.do_add_method(item.port_spec, item)
def set_controller(self, controller):
self.controller = controller
def update_method(self, subitem, port_name, widgets, labels, real_id=-1):
#print 'updateMethod called', port_name
if self.controller:
_, item = self.port_spec_items[port_name]
str_values = []
query_methods = []
for w in widgets:
str_values.append(str(w.contents()))
if hasattr(w, 'query_method'):
query_methods.append(w.query_method())
if real_id < 0:
should_replace = False
else:
should_replace = True
self.controller.update_function(self.module,
port_name,
str_values,
real_id,
[str(label.alias)
for label in labels],
query_methods,
should_replace)
# FIXME need to get the function set on the item somehow
# HACK for now
for function in self.module.functions:
if function.real_id not in self.function_map:
self.function_map[function.real_id] = subitem
subitem.function = function
# make the scene display the fact that we have a parameter
# by dimming the port
# self.controller.flush_delayed_actions()
self.controller.current_pipeline_scene.update_module_functions(
self.controller.current_pipeline, self.module.id)
def delete_method(self, subitem, port_name, real_id=None):
_, item = self.port_spec_items[port_name]
item.removeChild(subitem)
if real_id is not None and self.controller:
#print "got to delete"
self.controller.delete_function(real_id, self.module.id)
# make the scene display the fact that we have lost the
# parameter by undimming the port
# self.controller.flush_delayed_actions()
self.controller.current_pipeline_scene.update_module_functions(
self.controller.current_pipeline, self.module.id)
# how to delete items...x
# subitem.deleteLater()
def do_add_method(self, port_spec, item):
"""do_add_method(port_spec: PortSpec,
item: PortItem) -> None
Displays a new method for the port.
"""
subitem = self.entry_klass(port_spec)
item.addChild(subitem)
subitem.setFirstColumnSpanned(True)
self.setItemWidget(subitem, 2, subitem.get_widget())
item.setExpanded(True)
if len(port_spec.descriptors()) == 0:
self.update_method(subitem, port_spec.name, [], [])
def add_method(self, port_name):
port_spec, item = self.port_spec_items[port_name]
self.do_add_method(port_spec, item)
def contextMenuEvent(self, event):
# Just dispatches the menu event to the widget item
item = self.itemAt(event.pos())
if item:
item.contextMenuEvent(event, self)
class QPortsPane(QtGui.QWidget, QToolWindowInterface):
def __init__(self, port_type, parent=None, flags=QtCore.Qt.Widget):
QtGui.QWidget.__init__(self, parent, flags)
self.port_type = port_type
self.build_widget()
self.controller = None
def build_widget(self):
self.tree_widget = PortsList(self.port_type)
layout = QtGui.QHBoxLayout()
layout.setMargin(0)
layout.addWidget(self.tree_widget)
self.setLayout(layout)
self.setWindowTitle('%s Ports' % self.port_type.capitalize())
def set_controller(self, controller):
self.controller = controller
self.tree_widget.set_controller(controller)
def update_module(self, module):
self.tree_widget.update_module(module)
|
|
"""Builds Swagger data model definitions using PAPI source docs."""
from __future__ import print_function
import argparse
import json
import modulefinder
import os
import re
import sys
def find_matching_obj_def(obj_defs, new_obj_def):
"""Find matching object definition."""
for obj_name in obj_defs:
existing_obj_def = obj_defs[obj_name]
if 'properties' in new_obj_def and 'properties' in existing_obj_def:
if new_obj_def['properties'] == existing_obj_def['properties']:
return obj_name
elif 'properties' not in existing_obj_def:
print('**** No properties: {}'.format(existing_obj_def))
return None
def find_or_add_obj_def(obj_defs, new_obj_def, new_obj_name):
"""Reuse existing object definition if exists or add new one."""
matching_obj = find_matching_obj_def(obj_defs, new_obj_def)
if matching_obj is not None:
return matching_obj
obj_defs[new_obj_name] = new_obj_def
return new_obj_name
def add_dependencies(module_dir, filename, modules):
finder = modulefinder.ModuleFinder()
finder.run_script(os.path.join(module_dir, filename))
for module in finder.modules.values():
# if the module comes from the module_dir then process it to get
# its dependencies.
if os.path.dirname(str(module.__file__)) == module_dir:
mod_filename = os.path.basename(module.__file__)
if mod_filename == filename:
continue
# if this module has not already been added then add it.
if (mod_filename.endswith('_types.py') or
(mod_filename.find('_types_v') != -1 and
mod_filename.endswith('.py'))):
if mod_filename not in modules:
# add the modules that this module is dependent on
add_dependencies(module_dir, mod_filename, modules)
modules.append(mod_filename)
def build_module_list(filenames, module_dir, modules):
for filename in filenames:
if (filename.endswith('_types.py') or (
filename.find('_types_v') != -1 and filename.endswith('.py'))):
if filename not in modules:
# add the modules that this module is dependent on
add_dependencies(module_dir, filename, modules)
modules.append(filename)
def find_best_type_for_prop(prop):
multiple_types = prop['type']
# delete it so that we throw an exception if none of types
# are non-'null'
del prop['type']
for one_type in multiple_types:
# sometimes the types are base types and sometimes they
# are sub objects
if isinstance(one_type, dict):
if one_type['type'] == 'null':
continue
if isinstance(one_type['type'], list):
one_type = find_best_type_for_prop(one_type)
prop = one_type
# favor more specific types over 'string'
if prop['type'] != 'string':
break
elif one_type != 'null':
prop['type'] = one_type
break
return prop
def plural_obj_name_to_singular(obj_name, post_fix='', post_fix_used=None):
# if it's two 'ss' on the end then don't remove the last one
if obj_name[-1] == 's' and obj_name[-2] != 's':
# if container object ends with 's' then trim off the 's'
# to (hopefully) create the singular version
if obj_name[-3:] == 'ies':
one_obj_name = obj_name[:-3].replace('_', '') + 'y'
else:
one_obj_name = obj_name[:-1].replace('_', '')
else:
one_obj_name = obj_name.replace('_', '') + post_fix
if post_fix_used is not None:
post_fix_used.flag = True
return one_obj_name
def add_if_new(full_obj_name, properties, prop_name, obj,
isi_obj_names, isi_obj_list):
if full_obj_name not in isi_obj_names:
isi_obj_list.append((full_obj_name, properties, prop_name, obj))
isi_obj_names[full_obj_name] = (properties, prop_name)
def isi_to_swagger_array_prop(prop, properties, prop_name, isi_obj_name,
isi_obj_list, isi_obj_names, obj_defs):
if 'items' not in prop:
if 'item' in prop:
prop['items'] = prop['item']
del prop['item']
else:
print('*** No items: {}_{} = {}'.format(
isi_obj_name, prop_name, properties[prop_name]))
# string will kind of work for anything
prop['items'] = {'type': 'string'}
if 'type' in prop['items'] and prop['items']['type'] == 'object':
item_obj_name = plural_obj_name_to_singular(prop_name, post_fix='Item')
full_obj_name = isi_obj_name + '_' + item_obj_name
add_if_new(
full_obj_name, properties[prop_name], 'items',
prop['items'], isi_obj_names, isi_obj_list)
elif ('type' in prop['items'] and
isinstance(prop['items']['type'], dict) and
'type' in prop['items']['type'] and
prop['items']['type']['type'] == 'object'):
item_obj_name = plural_obj_name_to_singular(prop_name, post_fix='Item')
full_obj_name = isi_obj_name + '_' + item_obj_name
add_if_new(
full_obj_name, properties[prop_name], 'items',
prop['items']['type'], isi_obj_names, isi_obj_list)
elif 'type' in prop['items'] and isinstance(prop['items']['type'], list):
best_prop = find_best_type_for_prop(prop['items'])
if 'type' in best_prop and best_prop['type'] == 'object':
item_obj_name = plural_obj_name_to_singular(
prop_name, post_fix='Item')
full_obj_name = isi_obj_name + '_' + item_obj_name
add_if_new(
full_obj_name, properties[prop_name], 'items',
best_prop, isi_obj_names, isi_obj_list)
else:
properties[prop_name]['items'] = best_prop
elif 'type' in prop['items'] and prop['items']['type'] == 'array':
isi_to_swagger_array_prop(
prop['items'], properties[prop_name], 'items',
isi_obj_name, isi_obj_list, isi_obj_names, obj_defs)
elif 'type' not in prop['items'] and '$ref' not in prop['items']:
print('*** Array with no type or $ref: {}: {}'.format(
isi_obj_name, prop))
# string will kind of work for anything
prop['items'] = {'type': 'string'}
def isi_to_swagger_object_def(isi_obj_name, isi_schema, obj_defs,
isi_obj_list, isi_obj_names):
if 'type' not in isi_schema:
# have seen this for empty responses
return 'Empty'
if isinstance(isi_schema['type'], list):
for schema_list_item in isi_schema['type']:
if 'type' not in schema_list_item:
# hack - just return empty object
return 'Empty'
# use the first single object schema (usually the 'list' type) is
# used to allow for multiple items to be created with a single
# call.
if schema_list_item['type'] == 'object':
isi_schema['type'] = 'object'
isi_schema['properties'] = schema_list_item['properties']
break
if isi_schema['type'] != 'object':
raise RuntimeError(
"isi_schema is not type 'object': {}".format(isi_schema))
# found a few empty objects that omit the properties field
if 'properties' not in isi_schema:
if 'settings' in isi_schema:
# saw this with /3/cluster/timezone
isi_schema['properties'] = isi_schema['settings']
del isi_schema['settings']
else:
isi_schema['properties'] = {}
required_props = []
for prop_name in isi_schema['properties']:
prop = isi_schema['properties'][prop_name]
if 'type' not in prop:
continue # must be a $ref
update_props = False
# check if this prop is required
if 'required' in prop:
if prop['required']:
required_props.append(prop_name)
del prop['required']
# check if there are multiple types for this prop
if isinstance(prop['type'], list):
# swagger doesn't like lists for types
# so use the first type that is not 'null'
prop = find_best_type_for_prop(prop)
update_props = True
if prop['type'] == 'object':
# save this object for later
full_obj_name = isi_obj_name + '_' + prop_name
add_if_new(
full_obj_name, isi_schema['properties'], prop_name,
prop, isi_obj_names, isi_obj_list)
elif (isinstance(prop['type'], dict) and
prop['type']['type'] == 'object'):
full_obj_name = isi_obj_name + '_' + prop_name
add_if_new(
full_obj_name, isi_schema['properties'], prop_name,
prop['type'], isi_obj_names, isi_obj_list)
elif prop['type'] == 'array':
isi_to_swagger_array_prop(
prop, isi_schema['properties'], prop_name,
isi_obj_name, isi_obj_list, isi_obj_names, obj_defs)
elif prop['type'] == 'string' and 'enum' in prop:
new_enum = []
for item in prop['enum']:
# swagger doesn't know how to interpret '@DEFAULT' values
if item[0] != '@':
new_enum.append(item)
if new_enum:
prop['enum'] = new_enum
else:
del prop['enum']
update_props = True
elif prop['type'] == 'any':
prop['type'] = 'string'
update_props = True
elif prop['type'] == 'int':
print('*** Invalid prop type in object {} prop {}: {}'.format(
isi_obj_name, prop_name, prop))
prop['type'] = 'integer'
update_props = True
elif prop['type'] == 'bool':
print('*** Invalid prop type in object {} prop {}: {}'.format(
isi_obj_name, prop_name, prop))
prop['type'] = 'boolean'
update_props = True
if update_props is True:
isi_schema['properties'][prop_name] = prop
update_props = False
# attach required props
if required_props:
isi_schema['required'] = required_props
return find_or_add_obj_def(obj_defs, isi_schema, isi_obj_name)
def build_unique_name(module_name, obj_name, isi_obj_names, swag_objs=None):
# check if there is already an object with this name and if so
# use the module_name to make it unique
swag_obj_name = obj_name.title().replace('_', '')
while swag_obj_name in isi_obj_names:
# check if there is a version number on the module
matches = re.search('(.*)(_types_v)(\\d+)', module_name)
if matches is not None:
version = matches.group(3)
# try adding the version number to the end
swag_obj_name += 'V' + version
if swag_obj_name not in isi_obj_names:
break
else:
version = ''
if swag_objs is not None:
# pull out the object whose name matched and update it
existing_mod_name, existing_obj_name = isi_obj_names[swag_obj_name]
existing_new_name = build_unique_name(
existing_mod_name, existing_obj_name, isi_obj_names)
del isi_obj_names[swag_obj_name]
swag_objs[existing_new_name] = swag_objs[swag_obj_name]
del swag_objs[swag_obj_name]
# try prepending the module name
swag_obj_namespace = module_name.replace(
'_types', '').title().replace('_', '')
swag_obj_name = swag_obj_namespace + swag_obj_name
if swag_obj_name not in isi_obj_names:
break
else:
# doesn't seem possible that i would get here, but just in case
raise RuntimeError(
'Unable to build unique name for {}: {} {}.'.format(
module_name, obj_name, swag_obj_name))
isi_obj_names[swag_obj_name] = (module_name, obj_name)
return swag_obj_name
def main():
argparser = argparse.ArgumentParser(
description=('Builds Swagger data model definitions '
'using the PAPI source docs.'))
argparser.add_argument(
'papiDocDir',
help='Path to the isilon/lib/isi_platform_api/doc-inc directory.')
argparser.add_argument('outputFile', help='Path to the output file.')
args = argparser.parse_args()
papiDocDir = os.path.abspath(args.papiDocDir)
if os.path.exists(papiDocDir) is False:
print('Invalid path: {}'.format(papiDocDir))
sys.exit(1)
sys.path.append(papiDocDir)
modules = []
build_module_list(os.listdir(papiDocDir), papiDocDir, modules)
swag_objs = {
'Error': {
'type': 'object',
'required': [
'code',
'message'
],
'properties': {
'code': {
'type': 'integer',
'format': 'int32'
},
'message': {
'type': 'string'
}
}
},
'Empty': {
'type': 'object',
'properties': {}
},
'CreateResponse': {
'properties': {
'id': {
'description': ('ID of created item that can be used to '
'refer to item in the collection-item '
'resource path.'),
'type': 'string'
}
},
'required': [
'id'
],
'type': 'object'
}
}
isi_objs = []
# list of unique object names (prevent double processing)
isi_obj_names = dict()
# process top-level objects
for module_filename in modules:
module_name = os.path.splitext(module_filename)[0]
module = __import__(module_name)
for obj_name in dir(module):
obj = getattr(module, obj_name)
if (isinstance(obj, dict) and 'type' in obj and
obj['type'] == 'object'):
# see if this object is already defined
if find_matching_obj_def(swag_objs, obj) is None:
swag_obj_name = build_unique_name(
module_name, obj_name, isi_obj_names, swag_objs)
isi_to_swagger_object_def(
swag_obj_name, obj, swag_objs, isi_objs, isi_obj_names)
# process objects referenced from inside other objects
for obj_value in isi_objs:
obj_name = obj_value[0]
props = obj_value[1]
prop_name = obj_value[2]
obj = obj_value[3]
ref_obj_name = isi_to_swagger_object_def(
obj_name, obj, swag_objs, isi_objs, isi_obj_names)
try:
prop_description = props[prop_name]['description']
except KeyError:
prop_description = ''
if 'description' in obj:
prop_description = obj['description']
elif ref_obj_name != obj_name:
# try to get the description from the ref'ed object
ref_obj = swag_objs[ref_obj_name]
if 'description' in ref_obj:
prop_description = ref_obj['description']
props[prop_name] = {
'description': prop_description,
'$ref': '#/definitions/' + ref_obj_name
}
with open(args.outputFile, 'w') as outputFile:
outputFile.write(json.dumps(swag_objs, indent=4, sort_keys=True))
if __name__ == '__main__':
main()
|
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_webfilter_profile
short_description: Configure Web filter profiles in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify webfilter feature and profile category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
version_added: 2.9
webfilter_profile:
description:
- Configure Web filter profiles.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
comment:
description:
- Optional comments.
type: str
extended_log:
description:
- Enable/disable extended logging for web filtering.
type: str
choices:
- enable
- disable
ftgd_wf:
description:
- FortiGuard Web Filter settings.
type: dict
suboptions:
exempt_quota:
description:
- Do not stop quota for these categories.
type: str
filters:
description:
- FortiGuard filters.
type: list
suboptions:
action:
description:
- Action to take for matches.
type: str
choices:
- block
- authenticate
- monitor
- warning
auth_usr_grp:
description:
- Groups with permission to authenticate.
type: str
suboptions:
name:
description:
- User group name. Source user.group.name.
required: true
type: str
category:
description:
- Categories and groups the filter examines.
type: int
id:
description:
- ID number.
required: true
type: int
log:
description:
- Enable/disable logging.
type: str
choices:
- enable
- disable
override_replacemsg:
description:
- Override replacement message.
type: str
warn_duration:
description:
- Duration of warnings.
type: str
warning_duration_type:
description:
- Re-display warning after closing browser or after a timeout.
type: str
choices:
- session
- timeout
warning_prompt:
description:
- Warning prompts in each category or each domain.
type: str
choices:
- per-domain
- per-category
max_quota_timeout:
description:
- Maximum FortiGuard quota used by single page view in seconds (excludes streams).
type: int
options:
description:
- Options for FortiGuard Web Filter.
type: str
choices:
- error-allow
- rate-server-ip
- connect-request-bypass
- ftgd-disable
ovrd:
description:
- Allow web filter profile overrides.
type: str
quota:
description:
- FortiGuard traffic quota settings.
type: list
suboptions:
category:
description:
- FortiGuard categories to apply quota to (category action must be set to monitor).
type: str
duration:
description:
- Duration of quota.
type: str
id:
description:
- ID number.
required: true
type: int
override_replacemsg:
description:
- Override replacement message.
type: str
type:
description:
- Quota type.
type: str
choices:
- time
- traffic
unit:
description:
- Traffic quota unit of measurement.
type: str
choices:
- B
- KB
- MB
- GB
value:
description:
- Traffic quota value.
type: int
rate_crl_urls:
description:
- Enable/disable rating CRL by URL.
type: str
choices:
- disable
- enable
rate_css_urls:
description:
- Enable/disable rating CSS by URL.
type: str
choices:
- disable
- enable
rate_image_urls:
description:
- Enable/disable rating images by URL.
type: str
choices:
- disable
- enable
rate_javascript_urls:
description:
- Enable/disable rating JavaScript by URL.
type: str
choices:
- disable
- enable
https_replacemsg:
description:
- Enable replacement messages for HTTPS.
type: str
choices:
- enable
- disable
inspection_mode:
description:
- Web filtering inspection mode.
type: str
choices:
- proxy
- flow-based
log_all_url:
description:
- Enable/disable logging all URLs visited.
type: str
choices:
- enable
- disable
name:
description:
- Profile name.
required: true
type: str
options:
description:
- Options.
type: str
choices:
- activexfilter
- cookiefilter
- javafilter
- block-invalid-url
- jscript
- js
- vbs
- unknown
- intrinsic
- wf-referer
- wf-cookie
- per-user-bwl
override:
description:
- Web Filter override settings.
type: dict
suboptions:
ovrd_cookie:
description:
- Allow/deny browser-based (cookie) overrides.
type: str
choices:
- allow
- deny
ovrd_dur:
description:
- Override duration.
type: str
ovrd_dur_mode:
description:
- Override duration mode.
type: str
choices:
- constant
- ask
ovrd_scope:
description:
- Override scope.
type: str
choices:
- user
- user-group
- ip
- browser
- ask
ovrd_user_group:
description:
- User groups with permission to use the override.
type: str
suboptions:
name:
description:
- User group name. Source user.group.name.
required: true
type: str
profile:
description:
- Web filter profile with permission to create overrides.
type: list
suboptions:
name:
description:
- Web profile. Source webfilter.profile.name.
required: true
type: str
profile_attribute:
description:
- Profile attribute to retrieve from the RADIUS server.
type: str
choices:
- User-Name
- NAS-IP-Address
- Framed-IP-Address
- Framed-IP-Netmask
- Filter-Id
- Login-IP-Host
- Reply-Message
- Callback-Number
- Callback-Id
- Framed-Route
- Framed-IPX-Network
- Class
- Called-Station-Id
- Calling-Station-Id
- NAS-Identifier
- Proxy-State
- Login-LAT-Service
- Login-LAT-Node
- Login-LAT-Group
- Framed-AppleTalk-Zone
- Acct-Session-Id
- Acct-Multi-Session-Id
profile_type:
description:
- Override profile type.
type: str
choices:
- list
- radius
ovrd_perm:
description:
- Permitted override types.
type: str
choices:
- bannedword-override
- urlfilter-override
- fortiguard-wf-override
- contenttype-check-override
post_action:
description:
- Action taken for HTTP POST traffic.
type: str
choices:
- normal
- block
replacemsg_group:
description:
- Replacement message group. Source system.replacemsg-group.name.
type: str
web:
description:
- Web content filtering settings.
type: dict
suboptions:
blacklist:
description:
- Enable/disable automatic addition of URLs detected by FortiSandbox to blacklist.
type: str
choices:
- enable
- disable
bword_table:
description:
- Banned word table ID. Source webfilter.content.id.
type: int
bword_threshold:
description:
- Banned word score threshold.
type: int
content_header_list:
description:
- Content header list. Source webfilter.content-header.id.
type: int
keyword_match:
description:
- Search keywords to log when match is found.
type: str
suboptions:
pattern:
description:
- Pattern/keyword to search for.
required: true
type: str
log_search:
description:
- Enable/disable logging all search phrases.
type: str
choices:
- enable
- disable
safe_search:
description:
- Safe search type.
type: str
choices:
- url
- header
urlfilter_table:
description:
- URL filter table ID. Source webfilter.urlfilter.id.
type: int
whitelist:
description:
- FortiGuard whitelist settings.
type: str
choices:
- exempt-av
- exempt-webcontent
- exempt-activex-java-cookie
- exempt-dlp
- exempt-rangeblock
- extended-log-others
youtube_restrict:
description:
- YouTube EDU filter level.
type: str
choices:
- none
- strict
- moderate
web_content_log:
description:
- Enable/disable logging logging blocked web content.
type: str
choices:
- enable
- disable
web_extended_all_action_log:
description:
- Enable/disable extended any filter action logging for web filtering.
type: str
choices:
- enable
- disable
web_filter_activex_log:
description:
- Enable/disable logging ActiveX.
type: str
choices:
- enable
- disable
web_filter_applet_log:
description:
- Enable/disable logging Java applets.
type: str
choices:
- enable
- disable
web_filter_command_block_log:
description:
- Enable/disable logging blocked commands.
type: str
choices:
- enable
- disable
web_filter_cookie_log:
description:
- Enable/disable logging cookie filtering.
type: str
choices:
- enable
- disable
web_filter_cookie_removal_log:
description:
- Enable/disable logging blocked cookies.
type: str
choices:
- enable
- disable
web_filter_js_log:
description:
- Enable/disable logging Java scripts.
type: str
choices:
- enable
- disable
web_filter_jscript_log:
description:
- Enable/disable logging JScripts.
type: str
choices:
- enable
- disable
web_filter_referer_log:
description:
- Enable/disable logging referrers.
type: str
choices:
- enable
- disable
web_filter_unknown_log:
description:
- Enable/disable logging unknown scripts.
type: str
choices:
- enable
- disable
web_filter_vbs_log:
description:
- Enable/disable logging VBS scripts.
type: str
choices:
- enable
- disable
web_ftgd_err_log:
description:
- Enable/disable logging rating errors.
type: str
choices:
- enable
- disable
web_ftgd_quota_usage:
description:
- Enable/disable logging daily quota usage.
type: str
choices:
- enable
- disable
web_invalid_domain_log:
description:
- Enable/disable logging invalid domain names.
type: str
choices:
- enable
- disable
web_url_log:
description:
- Enable/disable logging URL filtering.
type: str
choices:
- enable
- disable
wisp:
description:
- Enable/disable web proxy WISP.
type: str
choices:
- enable
- disable
wisp_algorithm:
description:
- WISP server selection algorithm.
type: str
choices:
- primary-secondary
- round-robin
- auto-learning
wisp_servers:
description:
- WISP servers.
type: list
suboptions:
name:
description:
- Server name. Source web-proxy.wisp.name.
required: true
type: str
youtube_channel_filter:
description:
- YouTube channel filter.
type: list
suboptions:
channel_id:
description:
- YouTube channel ID to be filtered.
type: str
comment:
description:
- Comment.
type: str
id:
description:
- ID.
required: true
type: int
youtube_channel_status:
description:
- YouTube channel filter status.
type: str
choices:
- disable
- blacklist
- whitelist
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure Web filter profiles.
fortios_webfilter_profile:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
webfilter_profile:
comment: "Optional comments."
extended_log: "enable"
ftgd_wf:
exempt_quota: "<your_own_value>"
filters:
-
action: "block"
auth_usr_grp:
-
name: "default_name_10 (source user.group.name)"
category: "11"
id: "12"
log: "enable"
override_replacemsg: "<your_own_value>"
warn_duration: "<your_own_value>"
warning_duration_type: "session"
warning_prompt: "per-domain"
max_quota_timeout: "18"
options: "error-allow"
ovrd: "<your_own_value>"
quota:
-
category: "<your_own_value>"
duration: "<your_own_value>"
id: "24"
override_replacemsg: "<your_own_value>"
type: "time"
unit: "B"
value: "28"
rate_crl_urls: "disable"
rate_css_urls: "disable"
rate_image_urls: "disable"
rate_javascript_urls: "disable"
https_replacemsg: "enable"
inspection_mode: "proxy"
log_all_url: "enable"
name: "default_name_36"
options: "activexfilter"
override:
ovrd_cookie: "allow"
ovrd_dur: "<your_own_value>"
ovrd_dur_mode: "constant"
ovrd_scope: "user"
ovrd_user_group:
-
name: "default_name_44 (source user.group.name)"
profile:
-
name: "default_name_46 (source webfilter.profile.name)"
profile_attribute: "User-Name"
profile_type: "list"
ovrd_perm: "bannedword-override"
post_action: "normal"
replacemsg_group: "<your_own_value> (source system.replacemsg-group.name)"
web:
blacklist: "enable"
bword_table: "54 (source webfilter.content.id)"
bword_threshold: "55"
content_header_list: "56 (source webfilter.content-header.id)"
keyword_match:
-
pattern: "<your_own_value>"
log_search: "enable"
safe_search: "url"
urlfilter_table: "61 (source webfilter.urlfilter.id)"
whitelist: "exempt-av"
youtube_restrict: "none"
web_content_log: "enable"
web_extended_all_action_log: "enable"
web_filter_activex_log: "enable"
web_filter_applet_log: "enable"
web_filter_command_block_log: "enable"
web_filter_cookie_log: "enable"
web_filter_cookie_removal_log: "enable"
web_filter_js_log: "enable"
web_filter_jscript_log: "enable"
web_filter_referer_log: "enable"
web_filter_unknown_log: "enable"
web_filter_vbs_log: "enable"
web_ftgd_err_log: "enable"
web_ftgd_quota_usage: "enable"
web_invalid_domain_log: "enable"
web_url_log: "enable"
wisp: "enable"
wisp_algorithm: "primary-secondary"
wisp_servers:
-
name: "default_name_83 (source web-proxy.wisp.name)"
youtube_channel_filter:
-
channel_id: "<your_own_value>"
comment: "Comment."
id: "87"
youtube_channel_status: "disable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_webfilter_profile_data(json):
option_list = ['comment', 'extended_log', 'ftgd_wf',
'https_replacemsg', 'inspection_mode', 'log_all_url',
'name', 'options', 'override',
'ovrd_perm', 'post_action', 'replacemsg_group',
'web', 'web_content_log', 'web_extended_all_action_log',
'web_filter_activex_log', 'web_filter_applet_log', 'web_filter_command_block_log',
'web_filter_cookie_log', 'web_filter_cookie_removal_log', 'web_filter_js_log',
'web_filter_jscript_log', 'web_filter_referer_log', 'web_filter_unknown_log',
'web_filter_vbs_log', 'web_ftgd_err_log', 'web_ftgd_quota_usage',
'web_invalid_domain_log', 'web_url_log', 'wisp',
'wisp_algorithm', 'wisp_servers', 'youtube_channel_filter',
'youtube_channel_status']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def webfilter_profile(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['webfilter_profile'] and data['webfilter_profile']:
state = data['webfilter_profile']['state']
else:
state = True
webfilter_profile_data = data['webfilter_profile']
filtered_data = underscore_to_hyphen(filter_webfilter_profile_data(webfilter_profile_data))
if state == "present":
return fos.set('webfilter',
'profile',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('webfilter',
'profile',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_webfilter(data, fos):
if data['webfilter_profile']:
resp = webfilter_profile(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"webfilter_profile": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"comment": {"required": False, "type": "str"},
"extended_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ftgd_wf": {"required": False, "type": "dict",
"options": {
"exempt_quota": {"required": False, "type": "str"},
"filters": {"required": False, "type": "list",
"options": {
"action": {"required": False, "type": "str",
"choices": ["block", "authenticate", "monitor",
"warning"]},
"auth_usr_grp": {"required": False, "type": "str",
"options": {
"name": {"required": True, "type": "str"}
}},
"category": {"required": False, "type": "int"},
"id": {"required": True, "type": "int"},
"log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"override_replacemsg": {"required": False, "type": "str"},
"warn_duration": {"required": False, "type": "str"},
"warning_duration_type": {"required": False, "type": "str",
"choices": ["session", "timeout"]},
"warning_prompt": {"required": False, "type": "str",
"choices": ["per-domain", "per-category"]}
}},
"max_quota_timeout": {"required": False, "type": "int"},
"options": {"required": False, "type": "str",
"choices": ["error-allow", "rate-server-ip", "connect-request-bypass",
"ftgd-disable"]},
"ovrd": {"required": False, "type": "str"},
"quota": {"required": False, "type": "list",
"options": {
"category": {"required": False, "type": "str"},
"duration": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"override_replacemsg": {"required": False, "type": "str"},
"type": {"required": False, "type": "str",
"choices": ["time", "traffic"]},
"unit": {"required": False, "type": "str",
"choices": ["B", "KB", "MB",
"GB"]},
"value": {"required": False, "type": "int"}
}},
"rate_crl_urls": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"rate_css_urls": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"rate_image_urls": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"rate_javascript_urls": {"required": False, "type": "str",
"choices": ["disable", "enable"]}
}},
"https_replacemsg": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"inspection_mode": {"required": False, "type": "str",
"choices": ["proxy", "flow-based"]},
"log_all_url": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"name": {"required": True, "type": "str"},
"options": {"required": False, "type": "str",
"choices": ["activexfilter", "cookiefilter", "javafilter",
"block-invalid-url", "jscript", "js",
"vbs", "unknown", "intrinsic",
"wf-referer", "wf-cookie", "per-user-bwl"]},
"override": {"required": False, "type": "dict",
"options": {
"ovrd_cookie": {"required": False, "type": "str",
"choices": ["allow", "deny"]},
"ovrd_dur": {"required": False, "type": "str"},
"ovrd_dur_mode": {"required": False, "type": "str",
"choices": ["constant", "ask"]},
"ovrd_scope": {"required": False, "type": "str",
"choices": ["user", "user-group", "ip",
"browser", "ask"]},
"ovrd_user_group": {"required": False, "type": "str",
"options": {
"name": {"required": True, "type": "str"}
}},
"profile": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"profile_attribute": {"required": False, "type": "str",
"choices": ["User-Name", "NAS-IP-Address", "Framed-IP-Address",
"Framed-IP-Netmask", "Filter-Id", "Login-IP-Host",
"Reply-Message", "Callback-Number", "Callback-Id",
"Framed-Route", "Framed-IPX-Network", "Class",
"Called-Station-Id", "Calling-Station-Id", "NAS-Identifier",
"Proxy-State", "Login-LAT-Service", "Login-LAT-Node",
"Login-LAT-Group", "Framed-AppleTalk-Zone", "Acct-Session-Id",
"Acct-Multi-Session-Id"]},
"profile_type": {"required": False, "type": "str",
"choices": ["list", "radius"]}
}},
"ovrd_perm": {"required": False, "type": "str",
"choices": ["bannedword-override", "urlfilter-override", "fortiguard-wf-override",
"contenttype-check-override"]},
"post_action": {"required": False, "type": "str",
"choices": ["normal", "block"]},
"replacemsg_group": {"required": False, "type": "str"},
"web": {"required": False, "type": "dict",
"options": {
"blacklist": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"bword_table": {"required": False, "type": "int"},
"bword_threshold": {"required": False, "type": "int"},
"content_header_list": {"required": False, "type": "int"},
"keyword_match": {"required": False, "type": "str",
"options": {
"pattern": {"required": True, "type": "str"}
}},
"log_search": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"safe_search": {"required": False, "type": "str",
"choices": ["url", "header"]},
"urlfilter_table": {"required": False, "type": "int"},
"whitelist": {"required": False, "type": "str",
"choices": ["exempt-av", "exempt-webcontent", "exempt-activex-java-cookie",
"exempt-dlp", "exempt-rangeblock", "extended-log-others"]},
"youtube_restrict": {"required": False, "type": "str",
"choices": ["none", "strict", "moderate"]}
}},
"web_content_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_extended_all_action_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_filter_activex_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_filter_applet_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_filter_command_block_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_filter_cookie_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_filter_cookie_removal_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_filter_js_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_filter_jscript_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_filter_referer_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_filter_unknown_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_filter_vbs_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_ftgd_err_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_ftgd_quota_usage": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_invalid_domain_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"web_url_log": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"wisp": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"wisp_algorithm": {"required": False, "type": "str",
"choices": ["primary-secondary", "round-robin", "auto-learning"]},
"wisp_servers": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"youtube_channel_filter": {"required": False, "type": "list",
"options": {
"channel_id": {"required": False, "type": "str"},
"comment": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"}
}},
"youtube_channel_status": {"required": False, "type": "str",
"choices": ["disable", "blacklist", "whitelist"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_webfilter(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_webfilter(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Tong Zhang <zhangt@frib.msu.edu>
# 2016-10-16 20:20:57 PM EDT
#
from flame import Machine
import numpy as np
import matplotlib.pyplot as plt
lat_fid = open('test.lat', 'r')
m = Machine(lat_fid)
## all BPMs and Correctors (both horizontal and vertical)
bpm_ids, cor_ids = m.find(type='bpm'), m.find(type='orbtrim')
corh_ids = cor_ids[0::2]
corv_ids = cor_ids[1::2]
observe_ids = bpm_ids
## before distortion
s = m.allocState({})
r = m.propagate(s, 0, len(m), observe=range(len(m)))
x, y = np.array([[r[i][1].moment0_env[0] for i in range(len(m))] for j in [0,2]])
pos = np.array([r[i][1].pos for i in range(len(m))])
fig1 = plt.figure(figsize=(10, 8), dpi=120)
ax1 = fig1.add_subplot(111)
linex, = ax1.plot(pos[observe_ids], x[observe_ids], 'r-',
alpha=0.6,
label='$\mathrm{ref\;orbit}$')
linex.set_lw(2)
## apply random kicks
N = 1
#corh_ids_enabled = np.random.choice(corh_ids, size=N)
#corh_val_enabled = 5e-3 * (np.random.random(size=N) * (2 - 1) + 1)
corh_ids_enabled = np.array([392])
corh_val_enabled = np.array([0.005])
for i, v in zip(corh_ids_enabled, corh_val_enabled):
m.reconfigure(i, {'theta_x': v})
"""
for i, v in zip(corh_sel, corh_val):
m.reconfigure(i, {'theta_x': v})
for i, v in zip(corv_sel, corv_val):
m.reconfigure(i, {'theta_y': v})
"""
s_tmp = m.allocState({})
r_tmp = m.propagate(s_tmp, 0, len(m), observe=range(len(m)))
x_tmp, y_tmp = np.array([[r_tmp[i][1].moment0_env[0] for i in range(len(m))] for j in [0,2]])
pos = np.array([r_tmp[i][1].pos for i in range(len(m))])
# data plot
linex_tmp, = ax1.plot(pos[observe_ids], x_tmp[observe_ids], 'b--',
alpha=0.8,
label='$\mathrm{kicked\;orbit}$')
linex_tmp.set_lw(2)
## mark the enabled kickers
corr = ax1.scatter(pos[corh_ids_enabled], x_tmp[corh_ids_enabled],
c='m', alpha=0.8, s=100,
label=r"$\mathrm{Kicker}$")
#plt.show()
## correct orbit
# define objective function to minimize
#def obj_func(cor_val, cor_ids):
# """ Objective function for `minimize`, calculate the distance
# to the ideal trajectory
#
# :param cor_val: corrector strength/values, list/array, [rad]
# :param cor_ids: corrector id numbers, list/array
# :return: sum of the square of the deviation between present
# ideal trajectory
# """
# for i, v in zip(cor_ids, cor_val):
# m.reconfigure(i, {'theta_x': v}) # horizontal only
#
# s_tmp = m.allocState({})
# r_tmp = m.propagate(s_tmp, 0, len(m), observe=range(len(m)))
# x_tmp, y_tmp = np.array([[r_tmp[i][1].moment0_env[j]
# for i in observe_ids]
# for j in [0, 2]])
# #return np.sum((x_tmp - x)**2)
# #return np.sum((x_tmp)**2)
# xsq = x_tmp * x_tmp
# return xsq.mean() * xsq.max()
# #return np.sum(xsq)
#
#print obj_func(corh_ids_enabled, corh_val_enabled)
def obj_func(cor_val, cor_ids):
""" Objective function for `minimize`, calculate the distance
to the ideal trajectory
:param cor_val: corrector strength/values, list/array, [rad]
:param cor_ids: corrector id numbers, list/array
:return: sum of the square of the deviation between present
ideal trajectory
"""
corh_val, corv_val = cor_val[0::2], cor_val[1::2]
corh_ids, corv_ids = cor_ids[0::2], cor_ids[1::2]
for i, v in zip(corh_ids, corh_val):
m.reconfigure(i, {'theta_x': v})
for i, v in zip(corv_ids, corv_val):
m.reconfigure(i, {'theta_y': v})
s_tmp = m.allocState({})
r_tmp = m.propagate(s_tmp, 0, len(m), observe=range(len(m)))
x_tmp, y_tmp = np.array([[r_tmp[i][1].moment0_env[j]
for i in observe_ids]
for j in [0, 2]])
#return np.sum((x_tmp - x)**2)
#return np.sum((x_tmp)**2)
xsq = x_tmp * x_tmp
return np.sum(xsq)
#return xsq.mean() * xsq.max()
# select correctors, H
#NC = 20
#corh_ids_se = np.random.choice(corh_ids, size=NC)
#corh_val_se = 0. * (np.random.random(size=NC) * (2 - 1) + 1)
#corh_ids_se = corh_ids_enabled
#corh_val_se = [0.005]
cor_ids_se = m.find(type='orbtrim')[45:61]
#cor_ids_se = m.find(type='orbtrim')[34:50]
#cor_ids_se = m.find(type='orbtrim')[44:50]
#print cor_ids_se
#import sys
#sys.exit(1)
#corh_ids_se = cor_ids_se[0::2]
#corv_ids_se = cor_ids_se[1::2]
cor_val_se = [1e-4]*len(cor_ids_se)
#corh_val_se = [1e-4] * len(corh_ids_se)
#corv_val_se = [1e-4] * len(corv_ids_se)
from scipy.optimize import minimize
res = minimize(obj_func, cor_val_se, args=(cor_ids_se,),
#method='Nelder-Mead',
method='L-BFGS-B', options={'disp':True}
#method='SLSQP', options={'maxiter':200, 'disp':True}
)
print res.x
cor_val = res.x
# show corrected result
corh_val, corv_val = cor_val[0::2], cor_val[1::2]
corh_ids, corv_ids = cor_ids_se[0::2], cor_ids_se[1::2]
for i, v in zip(corh_ids, corh_val):
m.reconfigure(i, {'theta_x': v})
for i, v in zip(corv_ids, corv_val):
m.reconfigure(i, {'theta_y': v})
s_oc = m.allocState({})
r_oc = m.propagate(s_oc, 0, len(m), observe=range(len(m)))
x_oc, y_oc = np.array([[r_oc[i][1].moment0_env[j]
for i in observe_ids]
for j in [0, 2]])
x_oc_all, y_oc_all = np.array([[r_oc[i][1].moment0_env[j]
for i in range(len(m))]
for j in [0, 2]])
pos_oc = np.array([r_oc[i][1].pos for i in observe_ids])
pos_oc_all = np.array([r_oc[i][1].pos for i in range(len(m))])
linex_oc, = ax1.plot(pos_oc, x_oc, 'g-',
alpha=0.9,
label='$\mathrm{corrected\;orbit}$')
linex_oc.set_lw(2)
# setup ax1
ax1.set_xlim([0,160])
ax1.set_title(r"$\mathrm{kick\;of}\;\theta_x = %.3f\;\mathrm{is\;applied\;at\;corrector\;id:}\;%d$" % (corh_val_enabled[0], corh_ids_enabled[0]), fontsize=18)
ax1.set_xlabel('$z\,\mathrm{[m]}$', fontsize=20)
ax1.set_ylabel('$x_{env}\,\mathrm{[mm]}$', fontsize=20)
ax1.legend(loc=3)
#ax1.text(20, 16,
# r'$\mathrm{Orbit\;is\;corrected\;back\;by\;applying}\;\theta_x=%.4f$' % (corh_val),
# fontdict={'fontsize':18})
corr1 = ax1.scatter(pos_oc_all[cor_ids_se], x_oc_all[cor_ids_se],
c='m', alpha=0.8, s=100,
label=r"$\mathrm{Kicker}$")
np.savetxt('zxy_scipy_3.dat', np.vstack((pos_oc, x_oc, y_oc)).T)
# show
plt.show()
import sys
sys.exit(1)
#corr1 = ax1.scatter(pos[corh_ids_se], x[corh_ids_se],
# c='k', alpha=0.8, s=80)
# show with x-rms
xrms_tmp, yrms_tmp = np.array([[r_tmp[i][1].moment0_rms[j]
for i in range(len(observe_ids))]
for j in [0, 2]])
fig_tmp = plt.figure()
ax_tmp = fig_tmp.add_subplot(111)
linex_tmp, = ax_tmp.plot(pos, x_tmp, 'r', lw=2)
fillx_tmp = ax_tmp.fill_between(pos, x_tmp - xrms_tmp,
x_tmp + xrms_tmp, alpha=0.2, color='b')
plt.show()
|
|
"""Test structuring of collections and primitives."""
from typing import Any, Dict, FrozenSet, List, MutableSet, Optional, Set, Tuple, Union
import attr
from hypothesis import assume, given
from hypothesis.strategies import (
binary,
booleans,
data,
floats,
frozensets,
integers,
just,
lists,
one_of,
sampled_from,
sets,
text,
tuples,
)
from pytest import raises
from cattr._compat import copy_with, is_bare, is_union_type
from cattrs import Converter
from cattrs.errors import IterableValidationError, StructureHandlerNotFoundError
from . import (
dicts_of_primitives,
enums_of_primitives,
lists_of_primitives,
primitive_strategies,
seqs_of_primitives,
)
from ._compat import change_type_param
NoneType = type(None)
ints_and_type = tuples(integers(), just(int))
floats_and_type = tuples(floats(allow_nan=False), just(float))
strs_and_type = tuples(text(), just(str))
bytes_and_type = tuples(binary(), just(bytes))
primitives_and_type = one_of(
ints_and_type, floats_and_type, strs_and_type, bytes_and_type
)
mut_set_types = sampled_from([Set, MutableSet])
set_types = one_of(mut_set_types, just(FrozenSet))
def create_generic_type(generic_types, param_type):
"""Create a strategy for generating parameterized generic types."""
return one_of(
generic_types,
generic_types.map(lambda t: t[Any]),
generic_types.map(lambda t: t[param_type]),
)
mut_sets_of_primitives = primitive_strategies.flatmap(
lambda e: tuples(sets(e[0]), create_generic_type(mut_set_types, e[1]))
)
frozen_sets_of_primitives = primitive_strategies.flatmap(
lambda e: tuples(frozensets(e[0]), create_generic_type(just(FrozenSet), e[1]))
)
sets_of_primitives = one_of(mut_sets_of_primitives, frozen_sets_of_primitives)
@given(primitives_and_type)
def test_structuring_primitives(primitive_and_type):
"""Test just structuring a primitive value."""
converter = Converter()
val, t = primitive_and_type
assert converter.structure(val, t) == val
assert converter.structure(val, Any) == val
@given(seqs_of_primitives)
def test_structuring_seqs(seq_and_type):
"""Test structuring sequence generic types."""
converter = Converter()
iterable, t = seq_and_type
converted = converter.structure(iterable, t)
for x, y in zip(iterable, converted):
assert x == y
@given(sets_of_primitives, set_types)
def test_structuring_sets(set_and_type, set_type):
"""Test structuring generic sets."""
converter = Converter()
set_, input_set_type = set_and_type
if input_set_type not in (Set, FrozenSet, MutableSet):
set_type = set_type[input_set_type.__args__[0]]
converted = converter.structure(set_, set_type)
assert converted == set_
# Set[int] can't be used with isinstance any more.
non_generic = set_type.__origin__ if set_type.__origin__ is not None else set_type
assert isinstance(converted, non_generic)
converted = converter.structure(set_, Any)
assert converted == set_
assert isinstance(converted, type(set_))
@given(sets_of_primitives)
def test_stringifying_sets(set_and_type):
"""Test structuring generic sets and converting the contents to str."""
converter = Converter()
set_, input_set_type = set_and_type
if is_bare(input_set_type):
input_set_type = input_set_type[str]
else:
input_set_type = copy_with(input_set_type, str)
converted = converter.structure(set_, input_set_type)
assert len(converted) == len(set_)
for e in set_:
assert str(e) in converted
@given(lists(primitives_and_type, min_size=1))
def test_structuring_hetero_tuples(list_of_vals_and_types):
"""Test structuring heterogenous tuples."""
converter = Converter()
types = tuple(e[1] for e in list_of_vals_and_types)
vals = [e[0] for e in list_of_vals_and_types]
t = Tuple[types]
converted = converter.structure(vals, t)
assert isinstance(converted, tuple)
for x, y in zip(vals, converted):
assert x == y
for x, y in zip(types, converted):
assert isinstance(y, x)
@given(lists(primitives_and_type))
def test_stringifying_tuples(list_of_vals_and_types):
"""Stringify all elements of a heterogeneous tuple."""
converter = Converter()
vals = [e[0] for e in list_of_vals_and_types]
t = Tuple[(str,) * len(list_of_vals_and_types)]
converted = converter.structure(vals, t)
assert isinstance(converted, tuple)
for x, y in zip(vals, converted):
assert str(x) == y
for x in converted:
# this should just be unicode, but in python2, '' is not unicode
assert isinstance(x, str)
@given(dicts_of_primitives)
def test_structuring_dicts(dict_and_type):
converter = Converter()
d, t = dict_and_type
converted = converter.structure(d, t)
assert converted == d
assert converted is not d
@given(dicts_of_primitives, data())
def test_structuring_dicts_opts(dict_and_type, data):
"""Structure dicts, but with optional primitives."""
converter = Converter()
d, t = dict_and_type
assume(not is_bare(t))
t.__args__ = (t.__args__[0], Optional[t.__args__[1]])
d = {k: v if data.draw(booleans()) else None for k, v in d.items()}
converted = converter.structure(d, t)
assert converted == d
assert converted is not d
@given(dicts_of_primitives)
def test_stringifying_dicts(dict_and_type):
converter = Converter()
d, t = dict_and_type
converted = converter.structure(d, Dict[str, str])
for k, v in d.items():
assert converted[str(k)] == str(v)
@given(primitives_and_type)
def test_structuring_optional_primitives(primitive_and_type):
"""Test structuring Optional primitive types."""
converter = Converter()
val, type = primitive_and_type
assert converter.structure(val, Optional[type]) == val
assert converter.structure(None, Optional[type]) is None
@given(lists_of_primitives().filter(lambda lp: not is_bare(lp[1])), booleans())
def test_structuring_lists_of_opt(list_and_type, detailed_validation: bool):
"""Test structuring lists of Optional primitive types."""
converter = Converter(detailed_validation=detailed_validation)
l, t = list_and_type
l.append(None)
args = t.__args__
is_optional = args[0] is Optional or (
is_union_type(args[0])
and len(args[0].__args__) == 2
and args[0].__args__[1] is NoneType
)
if not is_bare(t) and (args[0] not in (Any, str) and not is_optional):
with raises(
(TypeError, ValueError)
if not detailed_validation
else IterableValidationError
):
converter.structure(l, t)
optional_t = Optional[args[0]]
# We want to create a generic type annotation with an optional
# type parameter.
t = change_type_param(t, optional_t)
converted = converter.structure(l, t)
for x, y in zip(l, converted):
assert x == y
t.__args__ = args
@given(lists_of_primitives())
def test_stringifying_lists_of_opt(list_and_type):
"""Test structuring Optional primitive types into strings."""
converter = Converter()
l, t = list_and_type
l.append(None)
converted = converter.structure(l, List[Optional[str]])
for x, y in zip(l, converted):
if x is None:
assert x is y
else:
assert str(x) == y
@given(lists(integers()))
def test_structuring_primitive_union_hook(ints):
"""Registering a union loading hook works."""
converter = Converter()
def structure_hook(val, cl):
"""Even ints are passed through, odd are stringified."""
return val if val % 2 == 0 else str(val)
converter.register_structure_hook(Union[str, int], structure_hook)
converted = converter.structure(ints, List[Union[str, int]])
for x, y in zip(ints, converted):
if x % 2 == 0:
assert x == y
else:
assert str(x) == y
def test_structure_hook_func():
"""testing the hook_func method"""
converter = Converter()
def can_handle(cls):
return cls.__name__.startswith("F")
def handle(obj, cls):
return "hi"
class Foo(object):
pass
class Bar(object):
pass
converter.register_structure_hook_func(can_handle, handle)
assert converter.structure(10, Foo) == "hi"
with raises(StructureHandlerNotFoundError) as exc:
converter.structure(10, Bar)
assert exc.value.type_ is Bar
@given(data(), enums_of_primitives())
def test_structuring_enums(data, enum):
"""Test structuring enums by their values."""
converter = Converter()
val = data.draw(sampled_from(list(enum)))
assert converter.structure(val.value, enum) == val
def test_structuring_unsupported():
"""Loading unsupported classes should throw."""
converter = Converter()
with raises(StructureHandlerNotFoundError) as exc:
converter.structure(1, Converter)
assert exc.value.type_ is Converter
with raises(StructureHandlerNotFoundError) as exc:
converter.structure(1, Union[int, str])
assert exc.value.type_ is Union[int, str]
def test_subclass_registration_is_honored():
"""If a subclass is registered after a superclass,
that subclass handler should be dispatched for
structure
"""
converter = Converter()
class Foo(object):
def __init__(self, value):
self.value = value
class Bar(Foo):
pass
converter.register_structure_hook(Foo, lambda obj, cls: cls("foo"))
assert converter.structure(None, Foo).value == "foo"
assert converter.structure(None, Bar).value == "foo"
converter.register_structure_hook(Bar, lambda obj, cls: cls("bar"))
assert converter.structure(None, Foo).value == "foo"
assert converter.structure(None, Bar).value == "bar"
def test_structure_union_edge_case():
converter = Converter()
@attr.s(auto_attribs=True)
class A:
a1: Any
a2: Optional[Any] = None
@attr.s(auto_attribs=True)
class B:
b1: Any
b2: Optional[Any] = None
assert converter.structure([{"a1": "foo"}, {"b1": "bar"}], List[Union[A, B]]) == [
A("foo"),
B("bar"),
]
|
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Normalization layers.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.engine.input_spec import InputSpec
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import distribution_strategy_context
from tensorflow.python.util.tf_export import tf_export
@tf_export('keras.layers.BatchNormalization')
class BatchNormalization(Layer):
"""Batch normalization layer (Ioffe and Szegedy, 2014).
Normalize the activations of the previous layer at each batch,
i.e. applies a transformation that maintains the mean activation
close to 0 and the activation standard deviation close to 1.
Arguments:
axis: Integer, the axis that should be normalized
(typically the features axis).
For instance, after a `Conv2D` layer with
`data_format="channels_first"`,
set `axis=1` in `BatchNormalization`.
momentum: Momentum for the moving average.
epsilon: Small float added to variance to avoid dividing by zero.
center: If True, add offset of `beta` to normalized tensor.
If False, `beta` is ignored.
scale: If True, multiply by `gamma`.
If False, `gamma` is not used.
When the next layer is linear (also e.g. `nn.relu`),
this can be disabled since the scaling
will be done by the next layer.
beta_initializer: Initializer for the beta weight.
gamma_initializer: Initializer for the gamma weight.
moving_mean_initializer: Initializer for the moving mean.
moving_variance_initializer: Initializer for the moving variance.
beta_regularizer: Optional regularizer for the beta weight.
gamma_regularizer: Optional regularizer for the gamma weight.
beta_constraint: Optional constraint for the beta weight.
gamma_constraint: Optional constraint for the gamma weight.
renorm: Whether to use Batch Renormalization
(https://arxiv.org/abs/1702.03275). This adds extra variables during
training. The inference is the same for either value of this parameter.
renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to
scalar `Tensors` used to clip the renorm correction. The correction
`(r, d)` is used as `corrected_value = normalized_value * r + d`, with
`r` clipped to [rmin, rmax], and `d` to [-dmax, dmax]. Missing rmax, rmin,
dmax are set to inf, 0, inf, respectively.
renorm_momentum: Momentum used to update the moving means and standard
deviations with renorm. Unlike `momentum`, this affects training
and should be neither too small (which would add noise) nor too large
(which would give stale estimates). Note that `momentum` is still applied
to get the means and variances for inference.
fused: if `None` or `True`, use a faster, fused implementation if possible.
If `False`, use the system recommended implementation.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
virtual_batch_size: An `int`. By default, `virtual_batch_size` is `None`,
which means batch normalization is performed across the whole batch. When
`virtual_batch_size` is not `None`, instead perform "Ghost Batch
Normalization", which creates virtual sub-batches which are each
normalized separately (with shared gamma, beta, and moving statistics).
Must divide the actual batch size during execution.
adjustment: A function taking the `Tensor` containing the (dynamic) shape of
the input tensor and returning a pair (scale, bias) to apply to the
normalized values (before gamma and beta), only during training. For
example, if axis==-1,
`adjustment = lambda shape: (
tf.random_uniform(shape[-1:], 0.93, 1.07),
tf.random_uniform(shape[-1:], -0.1, 0.1))`
will scale the normalized value by up to 7% up or down, then shift the
result by up to 0.1 (with independent scaling and bias for each feature
but shared across all examples), and finally apply gamma and/or beta. If
`None`, no adjustment is applied. Cannot be specified if
virtual_batch_size is specified.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as input.
References:
- [Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shift](https://arxiv.org/abs/1502.03167)
"""
def __init__(self,
axis=-1,
momentum=0.99,
epsilon=1e-3,
center=True,
scale=True,
beta_initializer='zeros',
gamma_initializer='ones',
moving_mean_initializer='zeros',
moving_variance_initializer='ones',
beta_regularizer=None,
gamma_regularizer=None,
beta_constraint=None,
gamma_constraint=None,
renorm=False,
renorm_clipping=None,
renorm_momentum=0.99,
fused=None,
trainable=True,
virtual_batch_size=None,
adjustment=None,
name=None,
**kwargs):
super(BatchNormalization, self).__init__(
name=name, trainable=trainable, **kwargs)
if isinstance(axis, list):
self.axis = axis[:]
else:
self.axis = axis
self.momentum = momentum
self.epsilon = epsilon
self.center = center
self.scale = scale
self.beta_initializer = initializers.get(beta_initializer)
self.gamma_initializer = initializers.get(gamma_initializer)
self.moving_mean_initializer = initializers.get(moving_mean_initializer)
self.moving_variance_initializer = initializers.get(
moving_variance_initializer)
self.beta_regularizer = regularizers.get(beta_regularizer)
self.gamma_regularizer = regularizers.get(gamma_regularizer)
self.beta_constraint = constraints.get(beta_constraint)
self.gamma_constraint = constraints.get(gamma_constraint)
self.renorm = renorm
self.virtual_batch_size = virtual_batch_size
self.adjustment = adjustment
if fused is None:
fused = True
self.supports_masking = True
self.fused = fused
self._bessels_correction_test_only = True
if renorm:
renorm_clipping = renorm_clipping or {}
keys = ['rmax', 'rmin', 'dmax']
if set(renorm_clipping) - set(keys):
raise ValueError('renorm_clipping %s contains keys not in %s' %
(renorm_clipping, keys))
self.renorm_clipping = renorm_clipping
self.renorm_momentum = renorm_momentum
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
if not input_shape.ndims:
raise ValueError('Input has undefined rank:', input_shape)
ndims = len(input_shape)
# Convert axis to list and resolve negatives
if isinstance(self.axis, int):
self.axis = [self.axis]
if not isinstance(self.axis, list):
raise TypeError('axis must be int or list, type given: %s'
% type(self.axis))
for idx, x in enumerate(self.axis):
if x < 0:
self.axis[idx] = ndims + x
# Validate axes
for x in self.axis:
if x < 0 or x >= ndims:
raise ValueError('Invalid axis: %d' % x)
if len(self.axis) != len(set(self.axis)):
raise ValueError('Duplicate axis: %s' % self.axis)
if self.virtual_batch_size is not None:
if self.virtual_batch_size <= 0:
raise ValueError('virtual_batch_size must be a positive integer that '
'divides the true batch size of the input Tensor')
# If using virtual batches, the first dimension must be the batch
# dimension and cannot be the batch norm axis
if 0 in self.axis:
raise ValueError('When using virtual_batch_size, the batch dimension '
'must be 0 and thus axis cannot include 0')
if self.adjustment is not None:
raise ValueError('When using virtual_batch_size, adjustment cannot '
'be specified')
if self.fused:
# Currently fused batch norm doesn't support renorm. It also only supports
# an input tensor of rank 4 and a channel dimension on axis 1 or 3.
# TODO(yaozhang): if input is not 4D, reshape it to 4D and reshape the
# output back to its original shape accordingly.
self.fused = (not self.renorm and
ndims == 4 and
self.axis in [[1], [3]] and
self.virtual_batch_size is None and
self.adjustment is None)
# TODO(chrisying): fused batch norm is currently not supported for
# multi-axis batch norm and by extension virtual batches. In some cases,
# it might be possible to use fused batch norm but would require reshaping
# the Tensor to 4D with the axis in 1 or 3 (preferred 1) which is
# particularly tricky. A compromise might be to just support the most
# common use case (turning 5D w/ virtual batch to NCHW)
if self.fused:
if self.axis == [1]:
self._data_format = 'NCHW'
elif self.axis == [3]:
self._data_format = 'NHWC'
else:
raise ValueError('Unsupported axis, fused batch norm only supports '
'axis == [1] or axis == [3]')
# Raise parameters of fp16 batch norm to fp32
if self.dtype == dtypes.float16 or self.dtype == dtypes.bfloat16:
param_dtype = dtypes.float32
else:
param_dtype = self.dtype or dtypes.float32
axis_to_dim = {x: input_shape.dims[x].value for x in self.axis}
for x in axis_to_dim:
if axis_to_dim[x] is None:
raise ValueError('Input has undefined `axis` dimension. Input shape: ',
input_shape)
self.input_spec = InputSpec(ndim=ndims, axes=axis_to_dim)
if len(axis_to_dim) == 1 and self.virtual_batch_size is None:
# Single axis batch norm (most common/default use-case)
param_shape = (list(axis_to_dim.values())[0],)
else:
# Parameter shape is the original shape but with 1 in all non-axis dims
param_shape = [axis_to_dim[i] if i in axis_to_dim
else 1 for i in range(ndims)]
if self.virtual_batch_size is not None:
# When using virtual batches, add an extra dim at index 1
param_shape.insert(1, 1)
for idx, x in enumerate(self.axis):
self.axis[idx] = x + 1 # Account for added dimension
if self.scale:
self.gamma = self.add_weight(
name='gamma',
shape=param_shape,
dtype=param_dtype,
initializer=self.gamma_initializer,
regularizer=self.gamma_regularizer,
constraint=self.gamma_constraint,
trainable=True)
else:
self.gamma = None
if self.fused:
self._gamma_const = array_ops.constant(
1.0, dtype=param_dtype, shape=param_shape)
if self.center:
self.beta = self.add_weight(
name='beta',
shape=param_shape,
dtype=param_dtype,
initializer=self.beta_initializer,
regularizer=self.beta_regularizer,
constraint=self.beta_constraint,
trainable=True)
else:
self.beta = None
if self.fused:
self._beta_const = array_ops.constant(
0.0, dtype=param_dtype, shape=param_shape)
try:
# Disable variable partitioning when creating the moving mean and variance
if hasattr(self, '_scope') and self._scope:
partitioner = self._scope.partitioner
self._scope.set_partitioner(None)
else:
partitioner = None
self.moving_mean = self.add_weight(
name='moving_mean',
shape=param_shape,
dtype=param_dtype,
initializer=self.moving_mean_initializer,
synchronization=tf_variables.VariableSynchronization.ON_READ,
trainable=False,
aggregation=tf_variables.VariableAggregation.MEAN)
self.moving_variance = self.add_weight(
name='moving_variance',
shape=param_shape,
dtype=param_dtype,
initializer=self.moving_variance_initializer,
synchronization=tf_variables.VariableSynchronization.ON_READ,
trainable=False,
aggregation=tf_variables.VariableAggregation.MEAN)
if self.renorm:
# Create variables to maintain the moving mean and standard deviation.
# These are used in training and thus are different from the moving
# averages above. The renorm variables are colocated with moving_mean
# and moving_variance.
# NOTE: below, the outer `with device` block causes the current device
# stack to be cleared. The nested ones use a `lambda` to set the desired
# device and ignore any devices that may be set by the custom getter.
def _renorm_variable(name, shape):
var = self.add_weight(
name=name,
shape=shape,
dtype=param_dtype,
initializer=init_ops.zeros_initializer(),
synchronization=tf_variables.VariableSynchronization.ON_READ,
trainable=False,
aggregation=tf_variables.VariableAggregation.MEAN)
return var
with distribution_strategy_context.get_distribution_strategy(
).colocate_vars_with(self.moving_mean):
self.renorm_mean = _renorm_variable('renorm_mean', param_shape)
self.renorm_mean_weight = _renorm_variable('renorm_mean_weight', ())
# We initialize renorm_stddev to 0, and maintain the (0-initialized)
# renorm_stddev_weight. This allows us to (1) mix the average
# stddev with the minibatch stddev early in training, and (2) compute
# the unbiased average stddev by dividing renorm_stddev by the weight.
with distribution_strategy_context.get_distribution_strategy(
).colocate_vars_with(self.moving_variance):
self.renorm_stddev = _renorm_variable('renorm_stddev', param_shape)
self.renorm_stddev_weight = _renorm_variable('renorm_stddev_weight',
())
finally:
if partitioner:
self._scope.set_partitioner(partitioner)
self.built = True
def _assign_moving_average(self, variable, value, momentum):
with ops.name_scope(None, 'AssignMovingAvg',
[variable, value, momentum]) as scope:
with ops.colocate_with(variable):
decay = ops.convert_to_tensor(1.0 - momentum, name='decay')
if decay.dtype != variable.dtype.base_dtype:
decay = math_ops.cast(decay, variable.dtype.base_dtype)
update_delta = (variable - math_ops.cast(value, variable.dtype)) * decay
return state_ops.assign_sub(variable, update_delta, name=scope)
def _fused_batch_norm(self, inputs, training):
"""Returns the output of fused batch norm."""
beta = self.beta if self.center else self._beta_const
gamma = self.gamma if self.scale else self._gamma_const
def _fused_batch_norm_training():
return nn.fused_batch_norm(
inputs,
gamma,
beta,
epsilon=self.epsilon,
data_format=self._data_format)
def _fused_batch_norm_inference():
return nn.fused_batch_norm(
inputs,
gamma,
beta,
mean=self.moving_mean,
variance=self.moving_variance,
epsilon=self.epsilon,
is_training=False,
data_format=self._data_format)
output, mean, variance = tf_utils.smart_cond(
training, _fused_batch_norm_training, _fused_batch_norm_inference)
if not self._bessels_correction_test_only:
# Remove Bessel's correction to be consistent with non-fused batch norm.
# Note that the variance computed by fused batch norm is
# with Bessel's correction.
sample_size = math_ops.cast(
array_ops.size(inputs) / array_ops.size(variance), variance.dtype)
factor = (sample_size - math_ops.cast(1.0, variance.dtype)) / sample_size
variance *= factor
training_value = tf_utils.constant_value(training)
if training_value is None:
momentum = tf_utils.smart_cond(training,
lambda: self.momentum,
lambda: 1.0)
else:
momentum = ops.convert_to_tensor(self.momentum)
if training_value or training_value is None:
mean_update = self._assign_moving_average(self.moving_mean, mean,
momentum)
variance_update = self._assign_moving_average(self.moving_variance,
variance, momentum)
self.add_update(mean_update, inputs=True)
self.add_update(variance_update, inputs=True)
return output
def _renorm_correction_and_moments(self, mean, variance, training):
"""Returns the correction and update values for renorm."""
stddev = math_ops.sqrt(variance + self.epsilon)
# Compute the average mean and standard deviation, as if they were
# initialized with this batch's moments.
mixed_renorm_mean = (self.renorm_mean +
(1. - self.renorm_mean_weight) * mean)
mixed_renorm_stddev = (self.renorm_stddev +
(1. - self.renorm_stddev_weight) * stddev)
# Compute the corrections for batch renorm.
r = stddev / mixed_renorm_stddev
d = (mean - mixed_renorm_mean) / mixed_renorm_stddev
# Ensure the corrections use pre-update moving averages.
with ops.control_dependencies([r, d]):
mean = array_ops.identity(mean)
stddev = array_ops.identity(stddev)
rmin, rmax, dmax = [self.renorm_clipping.get(key)
for key in ['rmin', 'rmax', 'dmax']]
if rmin is not None:
r = math_ops.maximum(r, rmin)
if rmax is not None:
r = math_ops.minimum(r, rmax)
if dmax is not None:
d = math_ops.maximum(d, -dmax)
d = math_ops.minimum(d, dmax)
# When not training, use r=1, d=0.
r = tf_utils.smart_cond(training, lambda: r, lambda: array_ops.ones_like(r))
d = tf_utils.smart_cond(training,
lambda: d,
lambda: array_ops.zeros_like(d))
def _update_renorm_variable(var, weight, value):
"""Updates a moving average and weight, returns the unbiased value."""
value = array_ops.identity(value)
def _do_update():
"""Updates the var and weight, returns their updated ratio."""
# Update the variables without zero debiasing. The debiasing will be
# accomplished by dividing the exponential moving average by the weight.
# For example, after a single update, the moving average would be
# (1-decay) * value. and the weight will be 1-decay, with their ratio
# giving the value.
# Make sure the weight is not updated until before r and d computation.
with ops.control_dependencies([value]):
weight_value = array_ops.constant(1., dtype=weight.dtype)
new_var = self._assign_moving_average(var, value, self.renorm_momentum)
new_weight = self._assign_moving_average(weight, weight_value,
self.renorm_momentum)
# TODO(yuefengz): the updates to var and weighted can not be batched
# together if we fetch their updated values here. Consider calculating
# new values and delaying the updates.
return new_var / new_weight
def _fake_update():
return array_ops.identity(var)
return tf_utils.smart_cond(training, _do_update, _fake_update)
# TODO(yuefengz): colocate the operations
new_mean = _update_renorm_variable(self.renorm_mean,
self.renorm_mean_weight, mean)
new_stddev = _update_renorm_variable(self.renorm_stddev,
self.renorm_stddev_weight, stddev)
# Make sqrt(moving_variance + epsilon) = new_stddev.
new_variance = math_ops.square(new_stddev) - self.epsilon
return (r, d, new_mean, new_variance)
def call(self, inputs, training=None):
if training is None:
training = K.learning_phase()
in_eager_mode = context.executing_eagerly()
if self.virtual_batch_size is not None:
# Virtual batches (aka ghost batches) can be simulated by reshaping the
# Tensor and reusing the existing batch norm implementation
original_shape = [-1] + inputs.shape.as_list()[1:]
expanded_shape = [self.virtual_batch_size, -1] + original_shape[1:]
# Will cause errors if virtual_batch_size does not divide the batch size
inputs = array_ops.reshape(inputs, expanded_shape)
def undo_virtual_batching(outputs):
outputs = array_ops.reshape(outputs, original_shape)
return outputs
if self.fused:
outputs = self._fused_batch_norm(inputs, training=training)
if self.virtual_batch_size is not None:
# Currently never reaches here since fused_batch_norm does not support
# virtual batching
outputs = undo_virtual_batching(outputs)
return outputs
# Compute the axes along which to reduce the mean / variance
input_shape = inputs.get_shape()
ndims = len(input_shape)
reduction_axes = [i for i in range(ndims) if i not in self.axis]
if self.virtual_batch_size is not None:
del reduction_axes[1] # Do not reduce along virtual batch dim
# Broadcasting only necessary for single-axis batch norm where the axis is
# not the last dimension
broadcast_shape = [1] * ndims
broadcast_shape[self.axis[0]] = input_shape.dims[self.axis[0]].value
def _broadcast(v):
if (v is not None and
len(v.get_shape()) != ndims and
reduction_axes != list(range(ndims - 1))):
return array_ops.reshape(v, broadcast_shape)
return v
scale, offset = _broadcast(self.gamma), _broadcast(self.beta)
def _compose_transforms(scale, offset, then_scale, then_offset):
if then_scale is not None:
scale *= then_scale
offset *= then_scale
if then_offset is not None:
offset += then_offset
return (scale, offset)
# Determine a boolean value for `training`: could be True, False, or None.
training_value = tf_utils.constant_value(training)
if training_value is not False:
if self.adjustment:
adj_scale, adj_bias = self.adjustment(array_ops.shape(inputs))
# Adjust only during training.
adj_scale = tf_utils.smart_cond(training,
lambda: adj_scale,
lambda: array_ops.ones_like(adj_scale))
adj_bias = tf_utils.smart_cond(training,
lambda: adj_bias,
lambda: array_ops.zeros_like(adj_bias))
scale, offset = _compose_transforms(adj_scale, adj_bias, scale, offset)
# Some of the computations here are not necessary when training==False
# but not a constant. However, this makes the code simpler.
keep_dims = self.virtual_batch_size is not None or len(self.axis) > 1
mean, variance = nn.moments(inputs, reduction_axes, keep_dims=keep_dims)
moving_mean = self.moving_mean
moving_variance = self.moving_variance
mean = tf_utils.smart_cond(training,
lambda: mean,
lambda: moving_mean)
variance = tf_utils.smart_cond(training,
lambda: variance,
lambda: moving_variance)
if self.virtual_batch_size is not None:
# This isn't strictly correct since in ghost batch norm, you are
# supposed to sequentially update the moving_mean and moving_variance
# with each sub-batch. However, since the moving statistics are only
# used during evaluation, it is more efficient to just update in one
# step and should not make a significant difference in the result.
new_mean = math_ops.reduce_mean(mean, axis=1, keepdims=True)
new_variance = math_ops.reduce_mean(variance, axis=1, keepdims=True)
else:
new_mean, new_variance = mean, variance
if self.renorm:
r, d, new_mean, new_variance = self._renorm_correction_and_moments(
new_mean, new_variance, training)
# When training, the normalized values (say, x) will be transformed as
# x * gamma + beta without renorm, and (x * r + d) * gamma + beta
# = x * (r * gamma) + (d * gamma + beta) with renorm.
r = _broadcast(array_ops.stop_gradient(r, name='renorm_r'))
d = _broadcast(array_ops.stop_gradient(d, name='renorm_d'))
scale, offset = _compose_transforms(r, d, scale, offset)
def _do_update(var, value):
if in_eager_mode and not self.trainable:
return
return self._assign_moving_average(var, value, self.momentum)
mean_update = tf_utils.smart_cond(
training,
lambda: _do_update(self.moving_mean, new_mean),
lambda: self.moving_mean)
variance_update = tf_utils.smart_cond(
training,
lambda: _do_update(self.moving_variance, new_variance),
lambda: self.moving_variance)
if not context.executing_eagerly():
self.add_update(mean_update, inputs=True)
self.add_update(variance_update, inputs=True)
else:
mean, variance = self.moving_mean, self.moving_variance
mean = math_ops.cast(mean, inputs.dtype)
variance = math_ops.cast(variance, inputs.dtype)
if offset is not None:
offset = math_ops.cast(offset, inputs.dtype)
outputs = nn.batch_normalization(inputs,
_broadcast(mean),
_broadcast(variance),
offset,
scale,
self.epsilon)
# If some components of the shape got lost due to adjustments, fix that.
outputs.set_shape(input_shape)
if self.virtual_batch_size is not None:
outputs = undo_virtual_batching(outputs)
return outputs
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {
'axis': self.axis,
'momentum': self.momentum,
'epsilon': self.epsilon,
'center': self.center,
'scale': self.scale,
'beta_initializer': initializers.serialize(self.beta_initializer),
'gamma_initializer': initializers.serialize(self.gamma_initializer),
'moving_mean_initializer':
initializers.serialize(self.moving_mean_initializer),
'moving_variance_initializer':
initializers.serialize(self.moving_variance_initializer),
'beta_regularizer': regularizers.serialize(self.beta_regularizer),
'gamma_regularizer': regularizers.serialize(self.gamma_regularizer),
'beta_constraint': constraints.serialize(self.beta_constraint),
'gamma_constraint': constraints.serialize(self.gamma_constraint)
}
# Only add TensorFlow-specific parameters if they are set, so as to preserve
# model compatibility with external Keras.
if self.renorm:
config['renorm'] = True
config['renorm_clipping'] = self.renorm_clipping
config['renorm_momentum'] = self.renorm_momentum
if self.virtual_batch_size is not None:
config['virtual_batch_size'] = self.virtual_batch_size
# Note: adjustment is not serializable.
if self.adjustment is not None:
logging.warning('The `adjustment` function of this `BatchNormalization` '
'layer cannot be serialized and has been omitted from '
'the layer config. It will not be included when '
're-creating the layer from the saved config.')
base_config = super(BatchNormalization, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
|
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import mock
from catapult_base import dependency_manager
from catapult_base import cloud_storage
from catapult_base.dependency_manager import exceptions
class DependencyManagerTest(unittest.TestCase):
def setUp(self):
self.local_paths = ['path0', 'path1', 'path2']
self.cloud_storage_info = dependency_manager.CloudStorageInfo(
'cs_bucket', 'cs_hash', 'download_path', 'cs_remote_path')
self.dep_info = dependency_manager.DependencyInfo(
'dep', 'platform', 'config_file', local_paths=self.local_paths,
cloud_storage_info=self.cloud_storage_info)
# TODO(nednguyen): add a test that construct
# dependency_manager.DependencyManager from a list of DependencyInfo.
def testErrorInit(self):
with self.assertRaises(ValueError):
dependency_manager.DependencyManager(None)
with self.assertRaises(ValueError):
dependency_manager.DependencyManager('config_file?')
@mock.patch('catapult_base.support_binaries.FindPath')
@mock.patch(
'catapult_base.dependency_manager.DependencyManager._GetDependencyInfo')
@mock.patch(
'catapult_base.dependency_manager.dependency_info.DependencyInfo.GetRemotePath') # pylint: disable=line-too-long
@mock.patch('catapult_base.dependency_manager.DependencyManager._LocalPath')
def testFetchPathUnititializedDependency(
self, local_path_mock, cs_path_mock, dep_info_mock, sb_find_path_mock):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(local_path_mock.call_args)
self.assertFalse(cs_path_mock.call_args)
self.assertFalse(sb_find_path_mock.call_args)
sb_path = 'sb_path'
local_path = 'local_path'
cs_path = 'cs_path'
local_path_mock.return_value = local_path
cs_path_mock.return_value = cs_path
sb_find_path_mock.return_value = sb_path
dep_info_mock.return_value = None
# Empty lookup_dict
with self.assertRaises(exceptions.NoPathFoundError):
dep_manager.FetchPath('dep', 'plat_arch_x86')
dep_info_mock.reset_mock()
found_path = dep_manager.FetchPath(
'dep', 'plat_arch_x86', try_support_binaries=True)
self.assertEqual(sb_path, found_path)
self.assertFalse(local_path_mock.call_args)
self.assertFalse(cs_path_mock.call_args)
dep_info_mock.assert_called_once_with('dep', 'plat_arch_x86')
sb_find_path_mock.assert_called_once_with('dep', 'arch_x86', 'plat')
local_path_mock.reset_mock()
cs_path_mock.reset_mock()
sb_find_path_mock.reset_mock()
dep_info_mock.reset_mock()
# Non-empty lookup dict that doesn't contain the dependency we're looking
# for.
dep_manager._lookup_dict = {'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
with self.assertRaises(exceptions.NoPathFoundError):
dep_manager.FetchPath('dep', 'plat_arch_x86')
dep_info_mock.reset_mock()
found_path = dep_manager.FetchPath(
'dep', 'plat_arch_x86', try_support_binaries=True)
self.assertEqual(sb_path, found_path)
self.assertFalse(local_path_mock.call_args)
self.assertFalse(cs_path_mock.call_args)
dep_info_mock.assert_called_once_with('dep', 'plat_arch_x86')
sb_find_path_mock.assert_called_once_with('dep', 'arch_x86', 'plat')
local_path_mock.reset_mock()
cs_path_mock.reset_mock()
sb_find_path_mock.reset_mock()
@mock.patch('os.path')
@mock.patch('catapult_base.support_binaries.FindPath')
@mock.patch(
'catapult_base.dependency_manager.DependencyManager._GetDependencyInfo')
@mock.patch(
'catapult_base.dependency_manager.dependency_info.DependencyInfo.GetRemotePath') # pylint: disable=line-too-long
@mock.patch('catapult_base.dependency_manager.DependencyManager._LocalPath')
def testFetchPathLocalFile(self, local_path_mock, cs_path_mock, dep_info_mock,
sb_find_path_mock, path_mock):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(local_path_mock.call_args)
self.assertFalse(cs_path_mock.call_args)
self.assertFalse(sb_find_path_mock.call_args)
sb_path = 'sb_path'
local_path = 'local_path'
cs_path = 'cs_path'
dep_info = self.dep_info
local_path_mock.return_value = local_path
cs_path_mock.return_value = cs_path
sb_find_path_mock.return_value = sb_path
# The DependencyInfo returned should be passed through to LocalPath.
dep_info_mock.return_value = dep_info
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path exists.
dep_manager._lookup_dict = {'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
path_mock.exists.return_value = True
found_path = dep_manager.FetchPath('dep1', 'plat')
self.assertEqual(local_path, found_path)
local_path_mock.assert_called_with(self.dep_info)
dep_info_mock.assert_called_once_with('dep1', 'plat')
self.assertFalse(cs_path_mock.call_args)
self.assertFalse(sb_find_path_mock.call_args)
# If the below assert fails, the ordering assumption that determined the
# path_mock return values is incorrect, and should be updated.
path_mock.exists.assert_called_once_with('local_path')
local_path_mock.reset_mock()
cs_path_mock.reset_mock()
sb_find_path_mock.reset_mock()
dep_info_mock.reset_mock()
@mock.patch('os.path')
@mock.patch('catapult_base.support_binaries.FindPath')
@mock.patch(
'catapult_base.dependency_manager.dependency_info.DependencyInfo.GetRemotePath') # pylint: disable=line-too-long
@mock.patch('catapult_base.dependency_manager.DependencyManager._LocalPath')
def testFetchPathRemoteFile(
self, local_path_mock, cs_path_mock, sb_find_path_mock, path_mock):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(local_path_mock.call_args)
self.assertFalse(cs_path_mock.call_args)
self.assertFalse(sb_find_path_mock.call_args)
local_path = 'local_path'
cs_path = 'cs_path'
cs_path_mock.return_value = cs_path
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path doesn't exist, but cloud_storage_path is downloaded.
dep_manager._lookup_dict = {'dep': {'platform' : self.dep_info,
'plat1': mock.MagicMock()},
'dep2': {'plat2': mock.MagicMock()}}
path_mock.exists.side_effect = [False, True]
local_path_mock.return_value = local_path
found_path = dep_manager.FetchPath('dep', 'platform')
self.assertEqual(cs_path, found_path)
local_path_mock.assert_called_with(self.dep_info)
self.assertFalse(sb_find_path_mock.call_args)
# If the below assert fails, the ordering assumption that determined the
# path_mock return values is incorrect, and should be updated.
path_mock.exists.assert_has_calls([mock.call(local_path),
mock.call(cs_path)], any_order=False)
local_path_mock.reset_mock()
cs_path_mock.reset_mock()
sb_find_path_mock.reset_mock()
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path isn't found, but cloud_storage_path is downloaded.
dep_manager._lookup_dict = {'dep': {'platform' : self.dep_info,
'plat1': mock.MagicMock()},
'dep2': {'plat2': mock.MagicMock()}}
path_mock.exists.side_effect = [True]
local_path_mock.return_value = None
found_path = dep_manager.FetchPath('dep', 'platform')
self.assertEqual(cs_path, found_path)
local_path_mock.assert_called_with(self.dep_info)
self.assertFalse(sb_find_path_mock.call_args)
# If the below assert fails, the ordering assumption that determined the
# path_mock return values is incorrect, and should be updated.
path_mock.exists.assert_has_calls([mock.call(local_path),
mock.call(cs_path)], any_order=False)
@mock.patch('catapult_base.support_binaries.FindPath')
@mock.patch(
'catapult_base.dependency_manager.dependency_info.DependencyInfo.GetRemotePath') # pylint: disable=line-too-long
@mock.patch('catapult_base.dependency_manager.DependencyManager._LocalPath')
def testFetchPathError(
self, local_path_mock, cs_path_mock, sb_find_path_mock):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(local_path_mock.call_args)
self.assertFalse(cs_path_mock.call_args)
self.assertFalse(sb_find_path_mock.call_args)
local_path_mock.return_value = None
cs_path_mock.return_value = None
dep_manager._lookup_dict = {'dep': {'platform' : self.dep_info,
'plat1': mock.MagicMock()},
'dep2': {'plat2': mock.MagicMock()}}
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path doesn't exist, and cloud_storage path wasn't successfully
# found.
self.assertRaises(exceptions.NoPathFoundError,
dep_manager.FetchPath, 'dep', 'platform')
cs_path_mock.side_effect = cloud_storage.CredentialsError
self.assertRaises(cloud_storage.CredentialsError,
dep_manager.FetchPath, 'dep', 'platform')
cs_path_mock.side_effect = cloud_storage.CloudStorageError
self.assertRaises(cloud_storage.CloudStorageError,
dep_manager.FetchPath, 'dep', 'platform')
cs_path_mock.side_effect = cloud_storage.PermissionError
self.assertRaises(cloud_storage.PermissionError,
dep_manager.FetchPath, 'dep', 'platform')
@mock.patch('os.path')
@mock.patch('catapult_base.support_binaries.FindLocallyBuiltPath')
@mock.patch(
'catapult_base.dependency_manager.DependencyManager._GetDependencyInfo')
@mock.patch('catapult_base.dependency_manager.DependencyManager._LocalPath')
def testLocalPath(self, local_path_mock, dep_info_mock, sb_find_path_mock,
path_mock):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(local_path_mock.call_args)
self.assertFalse(sb_find_path_mock.call_args)
sb_path = 'sb_path'
local_path = 'local_path'
dep_info = 'dep_info'
local_path_mock.return_value = local_path
sb_find_path_mock.return_value = sb_path
# GetDependencyInfo should return None when missing from the lookup dict.
dep_info_mock.return_value = None
# Empty lookup_dict
with self.assertRaises(exceptions.NoPathFoundError):
dep_manager.LocalPath('dep', 'plat')
dep_info_mock.reset_mock()
found_path = dep_manager.LocalPath(
'dep', 'plat', try_support_binaries=True)
self.assertEqual(sb_path, found_path)
self.assertFalse(local_path_mock.call_args)
sb_find_path_mock.assert_called_once_with('dep')
dep_info_mock.assert_called_once_with('dep', 'plat')
local_path_mock.reset_mock()
sb_find_path_mock.reset_mock()
dep_info_mock.reset_mock()
# Non-empty lookup dict that doesn't contain the dependency we're looking
# for.
dep_manager._lookup_dict = {'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
with self.assertRaises(exceptions.NoPathFoundError):
dep_manager.LocalPath('dep', 'plat')
dep_info_mock.reset_mock()
found_path = dep_manager.LocalPath(
'dep', 'plat', try_support_binaries=True)
self.assertEqual(sb_path, found_path)
self.assertFalse(local_path_mock.call_args)
sb_find_path_mock.assert_called_once_with('dep')
dep_info_mock.assert_called_once_with('dep', 'plat')
local_path_mock.reset_mock()
sb_find_path_mock.reset_mock()
dep_info_mock.reset_mock()
# The DependencyInfo returned should be passed through to LocalPath.
dep_info_mock.return_value = dep_info
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path exists.
dep_manager._lookup_dict = {'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
path_mock.exists.return_value = True
found_path = dep_manager.LocalPath('dep1', 'plat')
self.assertEqual(local_path, found_path)
local_path_mock.assert_called_with('dep_info')
self.assertFalse(sb_find_path_mock.call_args)
# If the below assert fails, the ordering assumption that determined the
# path_mock return values is incorrect, and should be updated.
path_mock.exists.assert_called_once_with('local_path')
dep_info_mock.assert_called_once_with('dep1', 'plat')
local_path_mock.reset_mock()
sb_find_path_mock.reset_mock()
dep_info_mock.reset_mock()
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path is found but doesn't exist.
dep_manager._lookup_dict = {'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
path_mock.exists.return_value = False
local_path_mock.return_value = local_path
self.assertRaises(exceptions.NoPathFoundError,
dep_manager.LocalPath, 'dep1', 'plat')
# Non-empty lookup dict that contains the dependency we're looking for.
# Local path isn't found.
dep_manager._lookup_dict = {'dep1': mock.MagicMock(),
'dep2': mock.MagicMock()}
local_path_mock.return_value = None
self.assertRaises(exceptions.NoPathFoundError,
dep_manager.LocalPath, 'dep1', 'plat')
def testInitialUpdateDependencies(self):
dep_manager = dependency_manager.DependencyManager([])
# Empty BaseConfig.
dep_manager._lookup_dict = {}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
base_config_mock.IterDependencyInfo.return_value = iter([])
dep_manager._UpdateDependencies(base_config_mock)
self.assertFalse(dep_manager._lookup_dict)
# One dependency/platform in a BaseConfig.
dep_manager._lookup_dict = {}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
dep_info = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep = 'dependency'
plat = 'platform'
dep_info.dependency = dep
dep_info.platform = plat
base_config_mock.IterDependencyInfo.return_value = iter([dep_info])
expected_lookup_dict = {dep: {plat: dep_info}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info.Update.called)
# One dependency multiple platforms in a BaseConfig.
dep_manager._lookup_dict = {}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
dep = 'dependency'
plat1 = 'platform1'
plat2 = 'platform2'
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep
dep_info2.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter([dep_info1,
dep_info2])
expected_lookup_dict = {dep: {plat1: dep_info1,
plat2: dep_info2}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
# Multiple dependencies, multiple platforms in a BaseConfig.
dep_manager._lookup_dict = {}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
dep1 = 'dependency1'
dep2 = 'dependency2'
plat1 = 'platform1'
plat2 = 'platform2'
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep1
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep1
dep_info2.platform = plat2
dep_info3 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info3.dependency = dep2
dep_info3.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter(
[dep_info1, dep_info2, dep_info3])
expected_lookup_dict = {dep1: {plat1: dep_info1,
plat2: dep_info2},
dep2: {plat2: dep_info3}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
self.assertFalse(dep_info3.Update.called)
def testFollowupUpdateDependenciesNoOverlap(self):
dep_manager = dependency_manager.DependencyManager([])
dep = 'dependency'
dep1 = 'dependency1'
dep2 = 'dependency2'
dep3 = 'dependency3'
plat1 = 'platform1'
plat2 = 'platform2'
plat3 = 'platform3'
dep_info_a = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_a.dependency = dep1
dep_info_a.platform = plat1
dep_info_b = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_b.dependency = dep1
dep_info_b.platform = plat2
dep_info_c = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_c.dependency = dep
dep_info_c.platform = plat1
start_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c}}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
# Empty BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
base_config_mock.IterDependencyInfo.return_value = iter([])
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(start_lookup_dict, dep_manager._lookup_dict)
# One dependency/platform in a BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep_info = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info.dependency = dep3
dep_info.platform = plat1
base_config_mock.IterDependencyInfo.return_value = iter([dep_info])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c},
dep3: {plat3: dep_info}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertItemsEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info.Update.called)
self.assertFalse(dep_info_a.Update.called)
self.assertFalse(dep_info_b.Update.called)
self.assertFalse(dep_info_c.Update.called)
# One dependency multiple platforms in a BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep2
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep2
dep_info2.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter([dep_info1,
dep_info2])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c},
dep2: {plat1: dep_info1,
plat2: dep_info2}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
self.assertFalse(dep_info_a.Update.called)
self.assertFalse(dep_info_b.Update.called)
self.assertFalse(dep_info_c.Update.called)
# Multiple dependencies, multiple platforms in a BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep1 = 'dependency1'
plat1 = 'platform1'
plat2 = 'platform2'
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep2
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep2
dep_info2.platform = plat2
dep_info3 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info3.dependency = dep3
dep_info3.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter(
[dep_info1, dep_info2, dep_info3])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c},
dep2: {plat1: dep_info1,
plat2: dep_info2},
dep3: {plat2: dep_info3}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
self.assertFalse(dep_info3.Update.called)
self.assertFalse(dep_info_a.Update.called)
self.assertFalse(dep_info_b.Update.called)
self.assertFalse(dep_info_c.Update.called)
# Ensure the testing data wasn't corrupted.
self.assertEqual(start_lookup_dict,
{dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c}})
def testFollowupUpdateDependenciesWithCollisions(self):
dep_manager = dependency_manager.DependencyManager([])
dep = 'dependency'
dep1 = 'dependency1'
dep2 = 'dependency2'
plat1 = 'platform1'
plat2 = 'platform2'
dep_info_a = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_a.dependency = dep1
dep_info_a.platform = plat1
dep_info_b = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_b.dependency = dep1
dep_info_b.platform = plat2
dep_info_c = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info_c.dependency = dep
dep_info_c.platform = plat1
start_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c}}
base_config_mock = mock.MagicMock(spec=dependency_manager.BaseConfig)
# One dependency/platform.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep_info = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info.dependency = dep
dep_info.platform = plat1
base_config_mock.IterDependencyInfo.return_value = iter([dep_info])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertItemsEqual(expected_lookup_dict, dep_manager._lookup_dict)
dep_info_a.Update.assert_called_once_with(dep_info)
self.assertFalse(dep_info.Update.called)
self.assertFalse(dep_info_b.Update.called)
self.assertFalse(dep_info_c.Update.called)
dep_info_a.reset_mock()
dep_info_b.reset_mock()
dep_info_c.reset_mock()
# One dependency multiple platforms in a BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep1
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep2
dep_info2.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter([dep_info1,
dep_info2])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c},
dep2: {plat2: dep_info2}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
self.assertFalse(dep_info_a.Update.called)
self.assertFalse(dep_info_b.Update.called)
dep_info_c.Update.assert_called_once_with(dep_info1)
dep_info_a.reset_mock()
dep_info_b.reset_mock()
dep_info_c.reset_mock()
# Multiple dependencies, multiple platforms in a BaseConfig.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep1 = 'dependency1'
plat1 = 'platform1'
plat2 = 'platform2'
dep_info1 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info1.dependency = dep
dep_info1.platform = plat1
dep_info2 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info2.dependency = dep1
dep_info2.platform = plat1
dep_info3 = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info3.dependency = dep2
dep_info3.platform = plat2
base_config_mock.IterDependencyInfo.return_value = iter(
[dep_info1, dep_info2, dep_info3])
expected_lookup_dict = {dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c},
dep2: {plat2: dep_info3}}
dep_manager._UpdateDependencies(base_config_mock)
self.assertEqual(expected_lookup_dict, dep_manager._lookup_dict)
self.assertFalse(dep_info1.Update.called)
self.assertFalse(dep_info2.Update.called)
self.assertFalse(dep_info3.Update.called)
self.assertFalse(dep_info_b.Update.called)
dep_info_a.Update.assert_called_once_with(dep_info1)
dep_info_c.Update.assert_called_once_with(dep_info2)
# Collision error.
dep_manager._lookup_dict = start_lookup_dict.copy()
dep_info = mock.MagicMock(spec=dependency_manager.DependencyInfo)
dep_info.dependency = dep
dep_info.platform = plat1
base_config_mock.IterDependencyInfo.return_value = iter([dep_info])
dep_info_a.Update.side_effect = ValueError
self.assertRaises(ValueError,
dep_manager._UpdateDependencies, base_config_mock)
# Ensure the testing data wasn't corrupted.
self.assertEqual(start_lookup_dict,
{dep: {plat1: dep_info_a,
plat2: dep_info_b},
dep1: {plat1: dep_info_c}})
def testGetDependencyInfo(self):
dep_manager = dependency_manager.DependencyManager([])
self.assertFalse(dep_manager._lookup_dict)
# No dependencies in the dependency manager.
self.assertEqual(None, dep_manager._GetDependencyInfo('missing_dep',
'missing_plat'))
dep_manager._lookup_dict = {'dep1': {'plat1': 'dep_info11',
'plat2': 'dep_info12',
'plat3': 'dep_info13'},
'dep2': {'plat1': 'dep_info11',
'plat2': 'dep_info21',
'plat3': 'dep_info23',
'default': 'dep_info2d'},
'dep3': {'plat1': 'dep_info31',
'plat2': 'dep_info32',
'default': 'dep_info3d'}}
# Dependency not in the dependency manager.
self.assertEqual(None, dep_manager._GetDependencyInfo(
'missing_dep', 'missing_plat'))
# Dependency in the dependency manager, but not the platform. No default.
self.assertEqual(None, dep_manager._GetDependencyInfo(
'dep1', 'missing_plat'))
# Dependency in the dependency manager, but not the platform, but a default
# exists.
self.assertEqual('dep_info2d', dep_manager._GetDependencyInfo(
'dep2', 'missing_plat'))
# Dependency and platform in the dependency manager. A default exists.
self.assertEqual('dep_info23', dep_manager._GetDependencyInfo(
'dep2', 'plat3'))
# Dependency and platform in the dependency manager. No default exists.
self.assertEqual('dep_info12', dep_manager._GetDependencyInfo(
'dep1', 'plat2'))
@mock.patch('os.path.exists')
def testLocalPathHelper(self, exists_mock):
dep_info = mock.MagicMock(spec=dependency_manager.DependencyInfo)
# There is no local path for the given dependency.
dep_info.local_paths = {}
self.assertEqual(None,
dependency_manager.DependencyManager._LocalPath(dep_info))
# There is a local path for the given dependency, but it doesn't exist.
exists_mock.side_effect = [False]
dep_info.local_paths = {'local_path0'}
self.assertEqual(None,
dependency_manager.DependencyManager._LocalPath(dep_info))
exists_mock.assert_called_once_with('local_path0')
exists_mock.reset_mock()
# There is a local path for the given dependency, and it does exist.
exists_mock.side_effect = [True]
dep_info.local_paths = {'local_path0'}
self.assertEqual('local_path0',
dependency_manager.DependencyManager._LocalPath(dep_info))
exists_mock.assert_called_once_with('local_path0')
exists_mock.reset_mock()
# There are multiple local paths for the given dependency, and the first one
# exists.
exists_mock.side_effect = [True]
dep_info.local_paths = {'local_path0', 'local_path1', 'local_path2'}
self.assertEqual('local_path0',
dependency_manager.DependencyManager._LocalPath(dep_info))
exists_mock.assert_called_once_with('local_path0')
exists_mock.reset_mock()
# There are multiple local paths for the given dependency, and the first one
# doesn't exist but the second one does.
exists_mock.side_effect = [False, True]
dep_info.local_paths = {'local_path0', 'local_path1', 'local_path2'}
self.assertEqual('local_path1',
dependency_manager.DependencyManager._LocalPath(dep_info))
expected_calls = [mock.call('local_path0'), mock.call('local_path1')]
exists_mock.assert_has_calls(expected_calls, any_order=False)
exists_mock.reset_mock()
# There are multiple local paths for the given dependency, and the first and
# second ones don't exist but the third one does.
exists_mock.side_effect = [False, False, True]
dep_info.local_paths = {'local_path0', 'local_path1', 'local_path2'}
self.assertEqual('local_path2',
dependency_manager.DependencyManager._LocalPath(dep_info))
expected_calls = [mock.call('local_path0'), mock.call('local_path1'),
mock.call('local_path2')]
exists_mock.assert_has_calls(expected_calls, any_order=False)
exists_mock.reset_mock()
# There are multiple local paths for the given dependency, but none of them
# exist.
exists_mock.side_effect = [False, False, False]
dep_info.local_paths = {'local_path0', 'local_path1', 'local_path2'}
self.assertEqual(None,
dependency_manager.DependencyManager._LocalPath(dep_info))
expected_calls = [mock.call('local_path0'), mock.call('local_path1'),
mock.call('local_path2')]
exists_mock.assert_has_calls(expected_calls, any_order=False)
exists_mock.reset_mock()
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014 Cloudwatt
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Edouard Thuleau, Cloudwatt.
"""
Script to start or destroy a Linux network namespace plug
between two virtual networks. Such that an application can
be executed under the context of a virtualized network.
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
import argparse
import netaddr
import sys
import uuid
import requests
import json
from linux import ip_lib
def validate_uuid(val):
try:
if str(uuid.UUID(val)) == val:
return val
except (TypeError, ValueError, AttributeError):
raise ValueError('Invalid UUID format')
class NetnsManager(object):
SNAT_RT_TABLES_ID = 42
DEV_NAME_LEN = 14
NETNS_PREFIX = 'vrouter-'
LEFT_DEV_PREFIX = 'int-'
RIGH_DEV_PREFIX = 'gw-'
TAP_PREFIX = 'veth'
PORT_TYPE = 'NameSpacePort'
LBAAS_PROCESS = 'haproxy'
BASE_URL = "http://localhost:9091/port"
HEADERS = {'content-type': 'application/json'}
def __init__(self, vm_uuid, nic_left, nic_right, other_nics=None,
root_helper='sudo', cfg_file=None, update=False,
pool_id=None, gw_ip=None, namespace_name=None):
self.vm_uuid = vm_uuid
if namespace_name is None:
self.namespace = self.NETNS_PREFIX + self.vm_uuid
else:
self.namespace = namespace_name
if pool_id:
self.namespace = self.namespace + ":" + pool_id
self.nic_left = nic_left
self.nic_right = nic_right
self.root_helper = root_helper
self.nics = other_nics or []
if self.nic_left:
self.nic_left['name'] = (self.LEFT_DEV_PREFIX +
self.nic_left['uuid'])[:self.DEV_NAME_LEN]
self.nics.append(self.nic_left)
if self.nic_right:
self.nic_right['name'] = (self.RIGH_DEV_PREFIX +
self.nic_right['uuid'])[:self.DEV_NAME_LEN]
self.nics.append(self.nic_right)
self.ip_ns = ip_lib.IPWrapper(root_helper=self.root_helper,
namespace=self.namespace)
self.cfg_file = cfg_file
self.update = update
self.gw_ip = gw_ip
def _get_tap_name(self, uuid_str):
return (self.TAP_PREFIX + uuid_str)[:self.DEV_NAME_LEN]
def is_netns_already_exists(self):
return self.ip_ns.netns.exists(self.namespace)
def create(self):
ip = ip_lib.IPWrapper(self.root_helper)
ip.ensure_namespace(self.namespace)
for nic in self.nics:
self._create_interfaces(ip, nic)
def set_snat(self):
if not self.ip_ns.netns.exists(self.namespace):
raise ValueError('Need to create the network namespace before set '
'up the SNAT')
self.ip_ns.netns.execute(['sysctl', '-w', 'net.ipv4.ip_forward=1'])
self.ip_ns.netns.execute(['iptables', '-t', 'nat', '-F'])
self.ip_ns.netns.execute(['iptables', '-t', 'nat', '-A', 'POSTROUTING',
'-s', '0.0.0.0/0', '-o',
self.nic_right['name'], '-j', 'MASQUERADE'])
self.ip_ns.netns.execute(['ip', 'route', 'replace', 'default', 'dev',
self.nic_right['name']])
self.ip_ns.netns.execute(['ip', 'route', 'replace', 'default', 'dev',
self.nic_left['name'], 'table',
self.SNAT_RT_TABLES_ID])
try:
self.ip_ns.netns.execute(['ip', 'rule', 'del', 'iif',
str(self.nic_right['name']), 'table',
self.SNAT_RT_TABLES_ID])
except RuntimeError:
pass
self.ip_ns.netns.execute(['ip', 'rule', 'add', 'iif',
str(self.nic_right['name']), 'table',
self.SNAT_RT_TABLES_ID])
self.ip_ns.netns.execute(['ip', 'route', 'del', 'default', 'table',
self.SNAT_RT_TABLES_ID])
self.ip_ns.netns.execute(['ip', 'route', 'add', 'default', 'table',
self.SNAT_RT_TABLES_ID, 'via', self.gw_ip,
'dev', str(self.nic_left['name'])])
def _get_lbaas_pid(self):
cmd = """ps aux | grep \'%(process)s -f %(file)s\' | grep -v grep
""" % {'process':self.LBAAS_PROCESS, 'file':self.cfg_file}
try:
if "check_output" not in dir(subprocess):
s = _check_output(cmd)
else:
s = subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError:
return None
words = s.split()
pid = int(words[1])
return pid
def set_lbaas(self):
if not self.ip_ns.netns.exists(self.namespace):
raise ValueError('Need to create the network namespace before set '
'up the lbaas')
pid_file = self.cfg_file + ".pid"
pid = self._get_lbaas_pid()
if (self.update is False):
if pid is not None:
self.release_lbaas()
self.ip_ns.netns.execute([self.LBAAS_PROCESS, '-f', self.cfg_file, '-D',
'-p', pid_file])
self.ip_ns.netns.execute(['route', 'add', 'default', 'gw', self.gw_ip])
else:
if pid is not None:
self.ip_ns.netns.execute([self.LBAAS_PROCESS, '-f', self.cfg_file, '-D', '-p', pid_file, '-sf', pid])
else:
self.ip_ns.netns.execute([self.LBAAS_PROCESS, '-f', self.cfg_file, '-D',
'-p', pid_file])
try:
self.ip_ns.netns.execute(['route', 'add', 'default', 'gw', self.gw_ip])
except RuntimeError:
pass
def release_lbaas(self):
if not self.ip_ns.netns.exists(self.namespace):
raise ValueError('Need to create the network namespace before '
'relasing lbaas')
pid = self._get_lbaas_pid()
if pid is not None:
cmd = """kill -9 %(pid)s""" % {'pid':pid}
try:
if "check_output" not in dir(subprocess):
s = _check_output(cmd)
else:
s = subprocess.check_output(cmd, shell=True)
print ("Haproxy process with pid %d config file %s killed" %(pid, self.cfg_file), file=sys.stderr)
except subprocess.CalledProcessError:
print ("SIGKILL Error for pid %d %s" %(pid, self.cfg_file), file=sys.stderr)
try:
self.ip_ns.netns.execute(['route', 'del', 'default'])
except RuntimeError:
pass
def destroy(self):
if not self.ip_ns.netns.exists(self.namespace):
raise ValueError('Namespace %s does not exist' % self.namespace)
for device in self.ip_ns.get_devices(exclude_loopback=True):
ip_lib.IPDevice(device.name,
self.root_helper,
self.namespace).link.delete()
self.ip_ns.netns.delete(self.namespace)
def plug_namespace_interface(self):
for nic in self.nics:
self._add_port_to_agent(nic,
display_name='NetNS-%s-%s-interface'
% (self.vm_uuid, nic['name']))
def unplug_namespace_interface(self):
for nic in self.nics:
self._delete_port_to_agent(nic)
def _create_interfaces(self, ip, nic):
if ip_lib.device_exists(nic['name'],
self.root_helper,
namespace=self.namespace):
ip_lib.IPDevice(nic['name'],
self.root_helper,
self.namespace).link.delete()
root_dev, ns_dev = ip.add_veth(self._get_tap_name(nic['uuid']),
nic['name'],
namespace2=self.namespace)
if nic['mac']:
ns_dev.link.set_address(str(nic['mac']))
ns_dev.link.set_up()
root_dev.link.set_up()
if nic['ip']:
ip = nic['ip']
ns_dev.addr.flush()
ns_dev.addr.add(ip.version, str(ip), str(ip.broadcast))
else:
#TODO(ethuleau): start DHCP client
raise NotImplementedError
# disable reverse path filtering
self.ip_ns.netns.execute(['sysctl', '-w',
'net.ipv4.conf.%s.rp_filter=2' % nic['name']]
)
def _add_port_to_agent(self, nic, display_name=None):
if self.PORT_TYPE == "NovaVMPort":
port_type_value = 0
elif self.PORT_TYPE == "NameSpacePort":
port_type_value = 1
payload = {"ip-address": str(nic['ip'].ip), "vlan-id": -1,
"display-name": display_name, "id": nic['uuid'],
"instance-id": self.vm_uuid, "ip6-address": '',
"isolated-vlan-id": -1,
"system-name": self._get_tap_name(nic['uuid']),
"vn-id": '', "vm-project-id": '',
"type": port_type_value, "mac-address": str(nic['mac'])}
json_dump = json.dumps(payload)
requests.post(self.BASE_URL, data=json_dump, headers=self.HEADERS)
def _delete_port_to_agent(self, nic):
url = self.BASE_URL + "/" + nic['uuid']
requests.delete(url, data=None, headers=self.HEADERS);
class VRouterNetns(object):
"""Create or destroy a Linux network namespace plug
between two virtual networks.
"""
SOURCE_NAT = 'source-nat'
LOAD_BALANCER = 'loadbalancer'
SERVICE_TYPES = [SOURCE_NAT, LOAD_BALANCER]
def __init__(self, args_str=None):
self.args = None
if not args_str:
args_str = ' '.join(sys.argv[1:])
self._parse_args(args_str)
def _parse_args(self, args_str):
"""Return an argparse.ArgumentParser for me"""
conf_parser = argparse.ArgumentParser(add_help=False)
conf_parser.add_argument("-c", "--root_helper",
help="Helper to execute root commands. "
"Default: 'sudo'", default="sudo")
args, remaining_argv = conf_parser.parse_known_args(args_str.split())
# Override with CLI options
# Don't surpress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser],
# print script description with -h/--help
description=__doc__,
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
)
subparsers = parser.add_subparsers()
create_parser = subparsers.add_parser('create')
create_parser.add_argument(
"service_type",
choices=self.SERVICE_TYPES,
help="Service type to run into the namespace")
create_parser.add_argument(
"vm_id",
help="Virtual machine UUID")
create_parser.add_argument(
"vmi_left_id",
help="Left virtual machine interface UUID")
create_parser.add_argument(
"vmi_right_id",
help="Right virtual machine interface UUID")
create_parser.add_argument(
"--vmi-left-mac",
default=None,
help=("Left virtual machine interface MAC. Default: automatically "
"generated by the system"))
create_parser.add_argument(
"--vmi-left-ip",
default=None,
help=("Left virtual machine interface IPv4 and mask "
"(ie: a.a.a.a/bb). Default mask to /32"))
create_parser.add_argument(
"--vmi-right-mac",
default=None,
help=("Right virtual machine interface MAC. Default: "
"automatically generated by the system"))
create_parser.add_argument(
"--vmi-right-ip",
default=None,
help=("Right virtual machine interface IPv4 and mask "
"(ie: a.a.a.a/bb). Default mask to /32"))
create_parser.add_argument(
"--update",
action="store_true",
default=False,
help=("Update a created namespace (do nothing for the moment)"))
create_parser.add_argument(
"--cfg-file",
default=None,
help=("Config file for lbaas"))
create_parser.add_argument(
"--gw-ip",
default=None,
help=("Gateway IP for Virtual Network"))
create_parser.add_argument(
"--pool-id",
default=None,
help=("Loadbalancer Pool"))
create_parser.set_defaults(func=self.create)
destroy_parser = subparsers.add_parser('destroy')
destroy_parser.add_argument(
"service_type",
choices=self.SERVICE_TYPES,
help="Service type to run into the namespace")
destroy_parser.add_argument(
"vm_id",
help="Virtual machine UUID")
destroy_parser.add_argument(
"vmi_left_id",
help="Left virtual machine interface UUID")
destroy_parser.add_argument(
"vmi_right_id",
help="Right virtual machine interface UUID")
destroy_parser.add_argument(
"--cfg-file",
default=None,
help=("config file for lbaas"))
destroy_parser.add_argument(
"--pool-id",
default=None,
help=("Loadbalancer Pool"))
destroy_parser.set_defaults(func=self.destroy)
self.args = parser.parse_args(remaining_argv)
def create(self):
netns_name = validate_uuid(self.args.vm_id)
nic_left = {}
if uuid.UUID(self.args.vmi_left_id).int:
nic_left['uuid'] = validate_uuid(self.args.vmi_left_id)
if self.args.vmi_left_mac:
nic_left['mac'] = netaddr.EUI(self.args.vmi_left_mac,
dialect=netaddr.mac_unix)
else:
nic_left['mac'] = None
if self.args.vmi_left_ip:
nic_left['ip'] = netaddr.IPNetwork(self.args.vmi_left_ip)
else:
nic_left['ip'] = None
nic_right = {}
if uuid.UUID(self.args.vmi_right_id).int:
nic_right['uuid'] = validate_uuid(self.args.vmi_right_id)
if self.args.vmi_right_mac:
nic_right['mac'] = netaddr.EUI(self.args.vmi_right_mac,
dialect=netaddr.mac_unix)
else:
nic_right['mac'] = None
if self.args.vmi_right_ip:
nic_right['ip'] = netaddr.IPNetwork(self.args.vmi_right_ip)
else:
nic_right['ip'] = None
netns_mgr = NetnsManager(netns_name, nic_left, nic_right,
root_helper=self.args.root_helper,
cfg_file=self.args.cfg_file,
update=self.args.update, gw_ip=self.args.gw_ip,
pool_id=self.args.pool_id)
if (self.args.update is False):
if netns_mgr.is_netns_already_exists():
# If the netns already exists, destroy it to be sure to set it
# with new parameters like another external network
if self.args.service_type == self.LOAD_BALANCER:
netns_mgr.release_lbaas()
netns_mgr.unplug_namespace_interface()
netns_mgr.destroy()
netns_mgr.create()
if self.args.service_type == self.SOURCE_NAT:
netns_mgr.set_snat()
elif self.args.service_type == self.LOAD_BALANCER:
netns_mgr.set_lbaas()
else:
msg = ('The %s service type is not supported' %
self.args.service_type)
raise NotImplementedError(msg)
netns_mgr.plug_namespace_interface()
def destroy(self):
netns_name = validate_uuid(self.args.vm_id)
nic_left = {}
if uuid.UUID(self.args.vmi_left_id).int:
nic_left = {'uuid': validate_uuid(self.args.vmi_left_id)}
nic_right = {}
if uuid.UUID(self.args.vmi_right_id).int:
nic_right = {'uuid': validate_uuid(self.args.vmi_right_id)}
netns_mgr = NetnsManager(netns_name, nic_left, nic_right,
root_helper=self.args.root_helper,
cfg_file=self.args.cfg_file, gw_ip=None,
pool_id=self.args.pool_id)
netns_mgr.unplug_namespace_interface()
if self.args.service_type == self.SOURCE_NAT:
netns_mgr.destroy()
elif self.args.service_type == self.LOAD_BALANCER:
netns_mgr.release_lbaas()
netns_mgr.destroy()
else:
msg = ('The %s service type is not supported' %
self.args.service_type)
raise NotImplementedError(msg)
def _check_output(cmd, flag=True):
proc = subprocess.Popen(cmd, shell=flag, stdout=subprocess.PIPE)
data, err = proc.communicate()
retcode = proc.poll()
if retcode:
raise subprocess.CalledProcessError(retcode, cmd)
return data
def main(args_str=None):
vrouter_netns = VRouterNetns(args_str)
vrouter_netns.args.func()
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit tests for local command-line-interface debug wrapper session."""
import os
import tempfile
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.client import session
from tensorflow.python.debug.cli import cli_config
from tensorflow.python.debug.cli import cli_shared
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.debug.cli import ui_factory
from tensorflow.python.debug.wrappers import local_cli_wrapper
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
# Import resource_variable_ops for the variables-to-tensor implicit conversion.
from tensorflow.python.ops import resource_variable_ops # pylint: disable=unused-import
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.training import monitored_session
from tensorflow.python.training import session_run_hook
class LocalCLIDebuggerWrapperSessionForTest(
local_cli_wrapper.LocalCLIDebugWrapperSession):
"""Subclasses the wrapper class for testing.
Overrides its CLI-related methods for headless testing environments.
Inserts observer variables for assertions.
"""
def __init__(self,
command_sequence,
sess,
dump_root=None):
"""Constructor of the for-test subclass.
Args:
command_sequence: (list of list of str) A list of command arguments,
including the command prefix, each element of the list is such as:
["run", "-n"],
["print_feed", "input:0"].
sess: See the doc string of LocalCLIDebugWrapperSession.__init__.
dump_root: See the doc string of LocalCLIDebugWrapperSession.__init__.
"""
local_cli_wrapper.LocalCLIDebugWrapperSession.__init__(
self, sess, dump_root=dump_root, log_usage=False)
self._command_sequence = command_sequence
self._command_pointer = 0
# Observer variables.
self.observers = {
"debug_dumps": [],
"tf_errors": [],
"run_start_cli_run_numbers": [],
"run_end_cli_run_numbers": [],
"print_feed_responses": [],
"profiler_py_graphs": [],
"profiler_run_metadata": [],
}
def _prep_cli_for_run_start(self):
pass
def _prep_debug_cli_for_run_end(self,
debug_dump,
tf_error,
passed_filter,
passed_filter_exclude_op_names):
self.observers["debug_dumps"].append(debug_dump)
self.observers["tf_errors"].append(tf_error)
def _prep_profile_cli_for_run_end(self, py_graph, run_metadata):
self.observers["profiler_py_graphs"].append(py_graph)
self.observers["profiler_run_metadata"].append(run_metadata)
def _launch_cli(self):
if self._is_run_start:
self.observers["run_start_cli_run_numbers"].append(self._run_call_count)
else:
self.observers["run_end_cli_run_numbers"].append(self._run_call_count)
readline_cli = ui_factory.get_ui(
"readline",
config=cli_config.CLIConfig(
config_file_path=os.path.join(tempfile.mkdtemp(), ".tfdbg_config")))
self._register_this_run_info(readline_cli)
while self._command_pointer < len(self._command_sequence):
command = self._command_sequence[self._command_pointer]
self._command_pointer += 1
try:
if command[0] == "run":
self._run_handler(command[1:])
elif command[0] == "print_feed":
self.observers["print_feed_responses"].append(
self._print_feed_handler(command[1:]))
else:
raise ValueError("Unrecognized command prefix: %s" % command[0])
except debugger_cli_common.CommandLineExit as e:
return e.exit_token
@test_util.run_v1_only("b/120545219")
class LocalCLIDebugWrapperSessionTest(test_util.TensorFlowTestCase):
def setUp(self):
self._tmp_dir = tempfile.mkdtemp()
self.v = variables.VariableV1(10.0, name="v")
self.w = variables.VariableV1(21.0, name="w")
self.delta = constant_op.constant(1.0, name="delta")
self.inc_v = state_ops.assign_add(self.v, self.delta, name="inc_v")
self.w_int = control_flow_ops.with_dependencies(
[self.inc_v],
math_ops.cast(self.w, dtypes.int32, name="w_int_inner"),
name="w_int_outer")
self.ph = array_ops.placeholder(dtypes.float32, name="ph")
self.xph = array_ops.transpose(self.ph, name="xph")
self.m = constant_op.constant(
[[0.0, 1.0, 2.0], [-4.0, -1.0, 0.0]], dtype=dtypes.float32, name="m")
self.y = math_ops.matmul(self.m, self.xph, name="y")
self.sparse_ph = array_ops.sparse_placeholder(
dtypes.float32, shape=([5, 5]), name="sparse_placeholder")
self.sparse_add = sparse_ops.sparse_add(self.sparse_ph, self.sparse_ph)
rewriter_config = rewriter_config_pb2.RewriterConfig(
disable_model_pruning=True,
arithmetic_optimization=rewriter_config_pb2.RewriterConfig.OFF,
dependency_optimization=rewriter_config_pb2.RewriterConfig.OFF)
graph_options = config_pb2.GraphOptions(rewrite_options=rewriter_config)
config_proto = config_pb2.ConfigProto(graph_options=graph_options)
self.sess = session.Session(config=config_proto)
# Initialize variable.
self.sess.run(variables.global_variables_initializer())
def tearDown(self):
ops.reset_default_graph()
if os.path.isdir(self._tmp_dir):
file_io.delete_recursively(self._tmp_dir)
def testConstructWrapper(self):
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), log_usage=False)
def testConstructWrapperWithExistingNonEmptyDumpRoot(self):
dir_path = os.path.join(self._tmp_dir, "foo")
os.mkdir(dir_path)
self.assertTrue(os.path.isdir(dir_path))
with self.assertRaisesRegex(
ValueError, "dump_root path points to a non-empty directory"):
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), dump_root=self._tmp_dir, log_usage=False)
def testConstructWrapperWithExistingFileDumpRoot(self):
file_path = os.path.join(self._tmp_dir, "foo")
open(file_path, "a").close() # Create the file
self.assertTrue(os.path.isfile(file_path))
with self.assertRaisesRegex(ValueError, "dump_root path points to a file"):
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), dump_root=file_path, log_usage=False)
def testRunsUnderDebugMode(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
# Verify that the assign_add op did take effect.
self.assertAllClose(12.0, self.sess.run(self.v))
# Assert correct run call numbers for which the CLI has been launched at
# run-start and run-end.
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([1, 2], wrapped_sess.observers["run_end_cli_run_numbers"])
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
# Verify that the TensorFlow runtime errors are picked up and in this case,
# they should be both None.
self.assertEqual([None, None], wrapped_sess.observers["tf_errors"])
def testRunsWithEmptyStringDumpRootWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root="")
# run under debug mode.
wrapped_sess.run(self.inc_v)
self.assertAllClose(11.0, self.sess.run(self.v))
def testRunInfoOutputAtRunEndIsCorrect(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
wrapped_sess.run(self.inc_v)
run_info_output = wrapped_sess._run_info_handler([])
tfdbg_logo = cli_shared.get_tfdbg_logo()
# The run_info output in the first run() call should contain the tfdbg logo.
self.assertEqual(tfdbg_logo.lines,
run_info_output.lines[:len(tfdbg_logo.lines)])
menu = run_info_output.annotations[debugger_cli_common.MAIN_MENU_KEY]
self.assertIn("list_tensors", menu.captions())
wrapped_sess.run(self.inc_v)
run_info_output = wrapped_sess._run_info_handler([])
# The run_info output in the second run() call should NOT contain the logo.
self.assertNotEqual(tfdbg_logo.lines,
run_info_output.lines[:len(tfdbg_logo.lines)])
menu = run_info_output.annotations[debugger_cli_common.MAIN_MENU_KEY]
self.assertIn("list_tensors", menu.captions())
def testRunsUnderNonDebugMode(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-n"], ["run", "-n"], ["run", "-n"]],
self.sess, dump_root=self._tmp_dir)
# run three times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(13.0, self.sess.run(self.v))
self.assertEqual([1, 2, 3],
wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([], wrapped_sess.observers["run_end_cli_run_numbers"])
def testRunningWithSparsePlaceholderFeedWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
sparse_feed = ([[0, 1], [0, 2]], [10.0, 20.0])
sparse_result = wrapped_sess.run(
self.sparse_add, feed_dict={self.sparse_ph: sparse_feed})
self.assertAllEqual([[0, 1], [0, 2]], sparse_result.indices)
self.assertAllClose([20.0, 40.0], sparse_result.values)
def testRunsUnderNonDebugThenDebugMode(self):
# Do two NON_DEBUG_RUNs, followed by DEBUG_RUNs.
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-n"], ["run", "-n"], ["run"], ["run"]],
self.sess, dump_root=self._tmp_dir)
# run three times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(13.0, self.sess.run(self.v))
self.assertEqual([1, 2, 3],
wrapped_sess.observers["run_start_cli_run_numbers"])
# Here, the CLI should have been launched only under the third run,
# because the first and second runs are NON_DEBUG.
self.assertEqual([3], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None], wrapped_sess.observers["tf_errors"])
def testRunMultipleTimesWithinLimit(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-t", "3"], ["run"]],
self.sess, dump_root=self._tmp_dir)
# run three times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(13.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([3], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None], wrapped_sess.observers["tf_errors"])
def testRunMultipleTimesOverLimit(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-t", "3"]], self.sess, dump_root=self._tmp_dir)
# run twice, which is less than the number of times specified by the
# command.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(12.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(0, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([], wrapped_sess.observers["tf_errors"])
def testRunMixingDebugModeAndMultipleTimes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-n"], ["run", "-t", "2"], ["run"], ["run"]],
self.sess, dump_root=self._tmp_dir)
# run four times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(14.0, self.sess.run(self.v))
self.assertEqual([1, 2],
wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([3, 4], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None, None], wrapped_sess.observers["tf_errors"])
def testDebuggingMakeCallableTensorRunnerWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
v = variables.VariableV1(42)
tensor_runner = wrapped_sess.make_callable(v)
self.sess.run(v.initializer)
self.assertAllClose(42, tensor_runner())
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
def testDebuggingMakeCallableTensorRunnerWithCustomRunOptionsWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
a = constant_op.constant(42)
tensor_runner = wrapped_sess.make_callable(a)
run_options = config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE)
run_metadata = config_pb2.RunMetadata()
self.assertAllClose(
42, tensor_runner(options=run_options, run_metadata=run_metadata))
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertGreater(len(run_metadata.step_stats.dev_stats), 0)
def testDebuggingMakeCallableOperationRunnerWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
v = variables.VariableV1(10.0)
inc_v = state_ops.assign_add(v, 1.0)
op_runner = wrapped_sess.make_callable(inc_v.op)
self.sess.run(v.initializer)
op_runner()
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual(11.0, self.sess.run(v))
def testDebuggingMakeCallableRunnerWithFeedListWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
ph1 = array_ops.placeholder(dtypes.float32)
ph2 = array_ops.placeholder(dtypes.float32)
a = math_ops.add(ph1, ph2)
tensor_runner = wrapped_sess.make_callable(a, feed_list=[ph1, ph2])
self.assertAllClose(42.0, tensor_runner(41.0, 1.0))
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
def testDebuggingMakeCallableFromOptionsWithZeroFeedWorks(self):
variable_1 = variables.VariableV1(
10.5, dtype=dtypes.float32, name="variable_1")
a = math_ops.add(variable_1, variable_1, "callable_a")
math_ops.add(a, a, "callable_b")
self.sess.run(variable_1.initializer)
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]] * 3, self.sess, dump_root=self._tmp_dir)
callable_options = config_pb2.CallableOptions()
callable_options.fetch.append("callable_b")
sess_callable = wrapped_sess._make_callable_from_options(callable_options)
for _ in range(2):
callable_output = sess_callable()
self.assertAllClose(np.array(42.0, dtype=np.float32), callable_output[0])
debug_dumps = wrapped_sess.observers["debug_dumps"]
self.assertEqual(2, len(debug_dumps))
for debug_dump in debug_dumps:
node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
self.assertItemsEqual(
["callable_a", "callable_b", "variable_1", "variable_1/read"],
node_names)
def testDebuggingMakeCallableFromOptionsWithOneFeedWorks(self):
ph1 = array_ops.placeholder(dtypes.float32, name="callable_ph1")
a = math_ops.add(ph1, ph1, "callable_a")
math_ops.add(a, a, "callable_b")
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]] * 3, self.sess, dump_root=self._tmp_dir)
callable_options = config_pb2.CallableOptions()
callable_options.feed.append("callable_ph1")
callable_options.fetch.append("callable_b")
sess_callable = wrapped_sess._make_callable_from_options(callable_options)
ph1_value = np.array([10.5, -10.5], dtype=np.float32)
for _ in range(2):
callable_output = sess_callable(ph1_value)
self.assertAllClose(
np.array([42.0, -42.0], dtype=np.float32), callable_output[0])
debug_dumps = wrapped_sess.observers["debug_dumps"]
self.assertEqual(2, len(debug_dumps))
for debug_dump in debug_dumps:
node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
self.assertIn("callable_a", node_names)
self.assertIn("callable_b", node_names)
def testDebuggingMakeCallableFromOptionsWithTwoFeedsWorks(self):
ph1 = array_ops.placeholder(dtypes.float32, name="callable_ph1")
ph2 = array_ops.placeholder(dtypes.float32, name="callable_ph2")
a = math_ops.add(ph1, ph2, "callable_a")
math_ops.add(a, a, "callable_b")
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]] * 3, self.sess, dump_root=self._tmp_dir)
callable_options = config_pb2.CallableOptions()
callable_options.feed.append("callable_ph1")
callable_options.feed.append("callable_ph2")
callable_options.fetch.append("callable_b")
sess_callable = wrapped_sess._make_callable_from_options(callable_options)
ph1_value = np.array(5.0, dtype=np.float32)
ph2_value = np.array(16.0, dtype=np.float32)
for _ in range(2):
callable_output = sess_callable(ph1_value, ph2_value)
self.assertAllClose(np.array(42.0, dtype=np.float32), callable_output[0])
debug_dumps = wrapped_sess.observers["debug_dumps"]
self.assertEqual(2, len(debug_dumps))
for debug_dump in debug_dumps:
node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
self.assertIn("callable_a", node_names)
self.assertIn("callable_b", node_names)
def testDebugMakeCallableFromOptionsWithCustomOptionsAndMetadataWorks(self):
variable_1 = variables.VariableV1(
10.5, dtype=dtypes.float32, name="variable_1")
a = math_ops.add(variable_1, variable_1, "callable_a")
math_ops.add(a, a, "callable_b")
self.sess.run(variable_1.initializer)
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
callable_options = config_pb2.CallableOptions()
callable_options.fetch.append("callable_b")
callable_options.run_options.trace_level = config_pb2.RunOptions.FULL_TRACE
sess_callable = wrapped_sess._make_callable_from_options(callable_options)
run_metadata = config_pb2.RunMetadata()
# Call the callable with a custom run_metadata.
callable_output = sess_callable(run_metadata=run_metadata)
# Verify that step_stats is populated in the custom run_metadata.
self.assertTrue(run_metadata.step_stats)
self.assertAllClose(np.array(42.0, dtype=np.float32), callable_output[0])
debug_dumps = wrapped_sess.observers["debug_dumps"]
self.assertEqual(1, len(debug_dumps))
debug_dump = debug_dumps[0]
node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
self.assertItemsEqual(
["callable_a", "callable_b", "variable_1", "variable_1/read"],
node_names)
def testRuntimeErrorShouldBeCaught(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
# Do a run that should lead to an TensorFlow runtime error.
wrapped_sess.run(self.y, feed_dict={self.ph: [[0.0], [1.0], [2.0]]})
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([1], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
# Verify that the runtime error is caught by the wrapped session properly.
self.assertEqual(1, len(wrapped_sess.observers["tf_errors"]))
tf_error = wrapped_sess.observers["tf_errors"][0]
self.assertEqual("y", tf_error.op.name)
def testRunTillFilterPassesShouldLaunchCLIAtCorrectRun(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-f", "v_greater_than_twelve"],
["run", "-f", "v_greater_than_twelve"],
["run"]],
self.sess,
dump_root=self._tmp_dir)
def v_greater_than_twelve(datum, tensor):
return datum.node_name == "v" and tensor > 12.0
# Verify that adding the same tensor filter more than once is tolerated
# (i.e., as if it were added only once).
wrapped_sess.add_tensor_filter("v_greater_than_twelve",
v_greater_than_twelve)
wrapped_sess.add_tensor_filter("v_greater_than_twelve",
v_greater_than_twelve)
# run five times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(15.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
# run-end CLI should NOT have been launched for run #2 and #3, because only
# starting from run #4 v becomes greater than 12.0.
self.assertEqual([4, 5], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None, None], wrapped_sess.observers["tf_errors"])
def testRunTillFilterPassesWithExcludeOpNames(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-f", "greater_than_twelve",
"--filter_exclude_node_names", "inc_v.*"],
["run"], ["run"]],
self.sess,
dump_root=self._tmp_dir)
def greater_than_twelve(datum, tensor):
del datum # Unused.
return tensor > 12.0
# Verify that adding the same tensor filter more than once is tolerated
# (i.e., as if it were added only once).
wrapped_sess.add_tensor_filter("greater_than_twelve", greater_than_twelve)
# run five times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(14.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
# Due to the --filter_exclude_op_names flag, the run-end CLI should show up
# not after run 3, but after run 4.
self.assertEqual([4], wrapped_sess.observers["run_end_cli_run_numbers"])
def testRunTillFilterPassesWorksInConjunctionWithOtherNodeNameFilter(self):
"""Test that --.*_filter flags work in conjunction with -f.
In other words, test that you can use a tensor filter on a subset of
the tensors.
"""
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-f", "v_greater_than_twelve", "--node_name_filter", "v$"],
["run", "-f", "v_greater_than_twelve", "--node_name_filter", "v$"],
["run"]],
self.sess,
dump_root=self._tmp_dir)
def v_greater_than_twelve(datum, tensor):
return datum.node_name == "v" and tensor > 12.0
wrapped_sess.add_tensor_filter("v_greater_than_twelve",
v_greater_than_twelve)
# run five times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(15.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
# run-end CLI should NOT have been launched for run #2 and #3, because only
# starting from run #4 v becomes greater than 12.0.
self.assertEqual([4, 5], wrapped_sess.observers["run_end_cli_run_numbers"])
debug_dumps = wrapped_sess.observers["debug_dumps"]
self.assertEqual(2, len(debug_dumps))
self.assertEqual(1, len(debug_dumps[0].dumped_tensor_data))
self.assertEqual("v:0", debug_dumps[0].dumped_tensor_data[0].tensor_name)
self.assertEqual(1, len(debug_dumps[1].dumped_tensor_data))
self.assertEqual("v:0", debug_dumps[1].dumped_tensor_data[0].tensor_name)
def testRunsUnderDebugModeWithWatchFnFilteringNodeNames(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--node_name_filter", "inc.*"],
["run", "--node_name_filter", "delta"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
# Verify that the assign_add op did take effect.
self.assertAllClose(12.0, self.sess.run(self.v))
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(1, dumps.size)
self.assertEqual("inc_v", dumps.dumped_tensor_data[0].node_name)
dumps = wrapped_sess.observers["debug_dumps"][1]
self.assertEqual(1, dumps.size)
self.assertEqual("delta", dumps.dumped_tensor_data[0].node_name)
def testRunsUnderDebugModeWithWatchFnFilteringOpTypes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--node_name_filter", "delta"],
["run", "--op_type_filter", "AssignAdd"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
# Verify that the assign_add op did take effect.
self.assertAllClose(12.0, self.sess.run(self.v))
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(1, dumps.size)
self.assertEqual("delta", dumps.dumped_tensor_data[0].node_name)
dumps = wrapped_sess.observers["debug_dumps"][1]
self.assertEqual(1, dumps.size)
self.assertEqual("inc_v", dumps.dumped_tensor_data[0].node_name)
def testRunsUnderDebugModeWithWatchFnFilteringTensorDTypes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--op_type_filter", "Variable.*"],
["run", "--tensor_dtype_filter", "int32"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.w_int)
wrapped_sess.run(self.w_int)
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(2, dumps.size)
self.assertItemsEqual(
["v", "w"], [dumps.dumped_tensor_data[i].node_name for i in [0, 1]])
dumps = wrapped_sess.observers["debug_dumps"][1]
self.assertEqual(2, dumps.size)
self.assertEqual(
["w_int_inner", "w_int_outer"],
[dumps.dumped_tensor_data[i].node_name for i in [0, 1]])
def testRunsUnderDebugModeWithWatchFnFilteringOpTypesAndTensorDTypes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--op_type_filter", "Cast", "--tensor_dtype_filter", "int32"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.w_int)
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(1, dumps.size)
self.assertEqual("w_int_inner", dumps.dumped_tensor_data[0].node_name)
def testPrintFeedPrintsFeedValueForTensorFeedKey(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "ph:0"], ["run"], ["run"]], self.sess)
self.assertAllClose(
[[5.0], [-1.0]],
wrapped_sess.run(self.y, feed_dict={self.ph: [[0.0, 1.0, 2.0]]}))
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["Tensor \"ph:0 (feed)\":", "", "[[0.0, 1.0, 2.0]]"],
print_feed_responses[0].lines)
def testPrintFeedPrintsFeedValueForTensorNameFeedKey(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "ph:0"], ["run"], ["run"]], self.sess)
self.assertAllClose(
[[5.0], [-1.0]],
wrapped_sess.run(self.y, feed_dict={"ph:0": [[0.0, 1.0, 2.0]]}))
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["Tensor \"ph:0 (feed)\":", "", "[[0.0, 1.0, 2.0]]"],
print_feed_responses[0].lines)
def testPrintFeedPrintsErrorForInvalidFeedKey(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "spam"], ["run"], ["run"]], self.sess)
self.assertAllClose(
[[5.0], [-1.0]],
wrapped_sess.run(self.y, feed_dict={"ph:0": [[0.0, 1.0, 2.0]]}))
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["ERROR: The feed_dict of the current run does not contain the key "
"spam"], print_feed_responses[0].lines)
def testPrintFeedPrintsErrorWhenFeedDictIsNone(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "spam"], ["run"], ["run"]], self.sess)
wrapped_sess.run(self.w_int)
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["ERROR: The feed_dict of the current run is None or empty."],
print_feed_responses[0].lines)
def testRunUnderProfilerModeWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-p"], ["run"]], self.sess)
wrapped_sess.run(self.w_int)
self.assertEqual(1, len(wrapped_sess.observers["profiler_run_metadata"]))
self.assertTrue(
wrapped_sess.observers["profiler_run_metadata"][0].step_stats)
self.assertEqual(1, len(wrapped_sess.observers["profiler_py_graphs"]))
self.assertIsInstance(
wrapped_sess.observers["profiler_py_graphs"][0], ops.Graph)
def testCallingHookDelBeforeAnyRun(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess)
del wrapped_sess
def testCallingShouldStopMethodOnNonWrappedNonMonitoredSessionErrors(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess)
with self.assertRaisesRegex(
ValueError,
r"The wrapped session .* does not have a method .*should_stop.*"):
wrapped_sess.should_stop()
def testLocalCLIDebugWrapperSessionWorksOnMonitoredSession(self):
monitored_sess = monitored_session.MonitoredSession()
wrapped_monitored_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], monitored_sess)
self.assertFalse(wrapped_monitored_sess.should_stop())
def testRunsWithEmptyFetchWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]], self.sess, dump_root="")
run_output = wrapped_sess.run([])
self.assertEqual([], run_output)
def testRunsWithEmptyNestedFetchWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]], self.sess, dump_root="")
run_output = wrapped_sess.run({"foo": {"baz": []}, "bar": ()})
self.assertEqual({"foo": {"baz": []}, "bar": ()}, run_output)
def testSessionRunHook(self):
a = array_ops.placeholder(dtypes.float32, [10])
b = a + 1
c = b * 2
class Hook(session_run_hook.SessionRunHook):
def before_run(self, _):
return session_run_hook.SessionRunArgs(fetches=c)
class Hook2(session_run_hook.SessionRunHook):
def before_run(self, _):
return session_run_hook.SessionRunArgs(fetches=b)
sess = session.Session()
sess = LocalCLIDebuggerWrapperSessionForTest([["run"], ["run"]], sess)
class SessionCreator(object):
def create_session(self):
return sess
final_sess = monitored_session.MonitoredSession(
session_creator=SessionCreator(), hooks=[Hook(), Hook2()])
final_sess.run(b, feed_dict={a: np.arange(10)})
debug_dumps = sess.observers["debug_dumps"]
self.assertEqual(1, len(debug_dumps))
debug_dump = debug_dumps[0]
node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
self.assertIn(b.op.name, node_names)
if __name__ == "__main__":
googletest.main()
|
|
## @file
# This file contain unit test for CommentParsing
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
import unittest
import Logger.Log as Logger
from Library.CommentParsing import ParseHeaderCommentSection, \
ParseGenericComment, \
ParseDecPcdGenericComment, \
ParseDecPcdTailComment
from Library.CommentParsing import _IsCopyrightLine
from Library.StringUtils import GetSplitValueList
from Library.DataType import TAB_SPACE_SPLIT
from Library.DataType import TAB_LANGUAGE_EN_US
#
# Test ParseHeaderCommentSection
#
class ParseHeaderCommentSectionTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case1: have license/copyright/license above @file
#
def testNormalCase1(self):
TestCommentLines1 = \
'''# License1
# License2
#
## @file
# example abstract
#
# example description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License3
#'''
CommentList = GetSplitValueList(TestCommentLines1, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'example abstract'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'example description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2010,'\
' Intel Corporation. All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = 'License1\nLicense2\n\nLicense3'
self.assertEqual(License, ExpectedLicense)
#
# Normal case2: have license/copyright above @file, but no copyright after
#
def testNormalCase2(self):
TestCommentLines2 = \
''' # License1
# License2
#
## @file
# example abstract
#
# example description
#
#Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
##'''
CommentList = GetSplitValueList(TestCommentLines2, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'example abstract'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'example description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = 'License1\nLicense2'
self.assertEqual(License, ExpectedLicense)
#
# Normal case2: have license/copyright/license above @file,
# but no abstract/description
#
def testNormalCase3(self):
TestCommentLines3 = \
''' # License1
# License2
#
## @file
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License3 Line1
# License3 Line2
##'''
CommentList = GetSplitValueList(TestCommentLines3, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = ''
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = ''
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2010,'\
' Intel Corporation. All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License1\n' \
'License2\n\n' \
'License3 Line1\n' \
'License3 Line2'
self.assertEqual(License, ExpectedLicense)
#
# Normal case4: format example in spec
#
def testNormalCase4(self):
TestCommentLines = \
'''
## @file
# Abstract
#
# Description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'Abstract'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
#
# Normal case5: other line between copyright
#
def testNormalCase5(self):
TestCommentLines = \
'''
## @file
# Abstract
#
# Description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# other line
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'Abstract'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
#
# Normal case6: multiple lines of copyright
#
def testNormalCase6(self):
TestCommentLines = \
'''
## @file
# Abstract
#
# Description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, FOO1 Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, FOO2 Corporation. All rights reserved.<BR>
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'Abstract'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO1 Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO2 Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
#
# Normal case7: Abstract not present
#
def testNormalCase7(self):
TestCommentLines = \
'''
## @file
#
# Description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, FOO1 Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, FOO2 Corporation. All rights reserved.<BR>
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = ''
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO1 Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO2 Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
#
# Normal case8: Description not present
#
def testNormalCase8(self):
TestCommentLines = \
'''
## @file
# Abstact
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'Abstact'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = ''
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
#
# Error case1: No copyright found
#
def testErrorCase1(self):
TestCommentLines = \
'''
## @file
# Abstract
#
# Description
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
self.assertRaises(Logger.FatalError,
ParseHeaderCommentSection,
TestCommentLinesList,
"PhonyFile")
#
# Error case2: non-empty non-comment lines passed in
#
def testErrorCase2(self):
TestCommentLines = \
'''
## @file
# Abstract
#
this is invalid line
# Description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
self.assertRaises(Logger.FatalError,
ParseHeaderCommentSection,
TestCommentLinesList,
"PhonyFile")
#
# Test ParseGenericComment
#
class ParseGenericCommentTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case1: one line of comment
#
def testNormalCase1(self):
TestCommentLines = \
'''# hello world'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase1')
self.failIf(not HelptxtObj)
self.assertEqual(HelptxtObj.GetString(), 'hello world')
self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
#
# Normal case2: multiple lines of comment
#
def testNormalCase2(self):
TestCommentLines = \
'''## hello world
# second line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase2')
self.failIf(not HelptxtObj)
self.assertEqual(HelptxtObj.GetString(),
'hello world\n' + 'second line')
self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
#
# Normal case3: multiple lines of comment, non comment lines will be skipped
#
def testNormalCase3(self):
TestCommentLines = \
'''## hello world
This is not comment line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase3')
self.failIf(not HelptxtObj)
self.assertEqual(HelptxtObj.GetString(),
'hello world\n\n')
self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
#
# Test ParseDecPcdGenericComment
#
class ParseDecPcdGenericCommentTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case1: comments with no special comment
#
def testNormalCase1(self):
TestCommentLines = \
'''## hello world
# second line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'testNormalCase1')
self.failIf(not HelpTxt)
self.failIf(PcdErr)
self.assertEqual(HelpTxt,
'hello world\n' + 'second line')
#
# Normal case2: comments with valid list
#
def testNormalCase2(self):
TestCommentLines = \
'''## hello world
# second line
# @ValidList 1, 2, 3
# other line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpTxt)
self.failIf(not PcdErr)
self.assertEqual(HelpTxt,
'hello world\n' + 'second line\n' + 'other line')
ExpectedList = GetSplitValueList('1 2 3', TAB_SPACE_SPLIT)
ActualList = [item for item in \
GetSplitValueList(PcdErr.GetValidValue(), TAB_SPACE_SPLIT) if item]
self.assertEqual(ExpectedList, ActualList)
self.failIf(PcdErr.GetExpression())
self.failIf(PcdErr.GetValidValueRange())
#
# Normal case3: comments with valid range
#
def testNormalCase3(self):
TestCommentLines = \
'''## hello world
# second line
# @ValidRange LT 1 AND GT 2
# other line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpTxt)
self.failIf(not PcdErr)
self.assertEqual(HelpTxt,
'hello world\n' + 'second line\n' + 'other line')
self.assertEqual(PcdErr.GetValidValueRange().strip(), 'LT 1 AND GT 2')
self.failIf(PcdErr.GetExpression())
self.failIf(PcdErr.GetValidValue())
#
# Normal case4: comments with valid expression
#
def testNormalCase4(self):
TestCommentLines = \
'''## hello world
# second line
# @Expression LT 1 AND GT 2
# other line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpTxt)
self.failIf(not PcdErr)
self.assertEqual(HelpTxt,
'hello world\n' + 'second line\n' + 'other line')
self.assertEqual(PcdErr.GetExpression().strip(), 'LT 1 AND GT 2')
self.failIf(PcdErr.GetValidValueRange())
self.failIf(PcdErr.GetValidValue())
#
# Normal case5: comments with valid expression and no generic comment
#
def testNormalCase5(self):
TestCommentLines = \
'''# @Expression LT 1 AND GT 2'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(HelpTxt)
self.failIf(not PcdErr)
self.assertEqual(PcdErr.GetExpression().strip(), 'LT 1 AND GT 2')
self.failIf(PcdErr.GetValidValueRange())
self.failIf(PcdErr.GetValidValue())
#
# Normal case6: comments with only generic help text
#
def testNormalCase6(self):
TestCommentLines = \
'''#'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.assertEqual(HelpTxt, '\n')
self.failIf(PcdErr)
#
# Error case1: comments with both expression and valid list, use later
# ignore the former and with a warning message
#
def testErrorCase1(self):
TestCommentLines = \
'''## hello world
# second line
# @ValidList 1, 2, 3
# @Expression LT 1 AND GT 2
# other line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
try:
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
except Logger.FatalError:
pass
#
# Test ParseDecPcdTailComment
#
class ParseDecPcdTailCommentTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case1: comments with no SupModeList
#
def testNormalCase1(self):
TestCommentLines = \
'''## #hello world'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.failIf(SupModeList)
self.assertEqual(HelpStr,
'hello world')
#
# Normal case2: comments with one SupMode
#
def testNormalCase2(self):
TestCommentLines = \
'''## BASE #hello world'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.failIf(not SupModeList)
self.assertEqual(HelpStr,
'hello world')
self.assertEqual(SupModeList,
['BASE'])
#
# Normal case3: comments with more than one SupMode
#
def testNormalCase3(self):
TestCommentLines = \
'''## BASE UEFI_APPLICATION #hello world'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.failIf(not SupModeList)
self.assertEqual(HelpStr,
'hello world')
self.assertEqual(SupModeList,
['BASE', 'UEFI_APPLICATION'])
#
# Normal case4: comments with more than one SupMode, no help text
#
def testNormalCase4(self):
TestCommentLines = \
'''## BASE UEFI_APPLICATION'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(HelpStr)
self.failIf(not SupModeList)
self.assertEqual(SupModeList,
['BASE', 'UEFI_APPLICATION'])
#
# Normal case5: general comments with no supModList, extract from real case
#
def testNormalCase5(self):
TestCommentLines = \
''' # 1 = 128MB, 2 = 256MB, 3 = MAX'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.assertEqual(HelpStr,
'1 = 128MB, 2 = 256MB, 3 = MAX')
self.failIf(SupModeList)
#
# Error case2: comments with supModList contains valid and invalid
# module type
#
def testErrorCase2(self):
TestCommentLines = \
'''## BASE INVALID_MODULE_TYPE #hello world'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
try:
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
except Logger.FatalError:
pass
#
# Test _IsCopyrightLine
#
class _IsCopyrightLineTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case
#
def testCase1(self):
Line = 'this is a copyright ( line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase2(self):
Line = 'this is a Copyright ( line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase3(self):
Line = 'this is not aCopyright ( line'
Result = _IsCopyrightLine(Line)
self.failIf(Result)
#
# Normal case
#
def testCase4(self):
Line = 'this is Copyright( line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase5(self):
Line = 'this is Copyright (line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase6(self):
Line = 'this is not Copyright line'
Result = _IsCopyrightLine(Line)
self.failIf(Result)
#
# Normal case
#
def testCase7(self):
Line = 'Copyright (c) line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase8(self):
Line = ' Copyright (c) line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase9(self):
Line = 'not a Copyright '
Result = _IsCopyrightLine(Line)
self.failIf(Result)
if __name__ == '__main__':
Logger.Initialize()
unittest.main()
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Configuration, Serializer, Deserializer
from .version import VERSION
from msrest.pipeline import ClientRawResponse
from msrest.exceptions import HttpOperationError
from . import models
class AutoRestValidationTestConfiguration(Configuration):
"""Configuration for AutoRestValidationTest
Note that all parameters used to create this instance are saved as instance
attributes.
:param subscription_id: Subscription ID.
:type subscription_id: str
:param api_version: Required string following pattern \\d{2}-\\d{2}-\\d{4}
:type api_version: str
:param str base_url: Service URL
:param str filepath: Existing config
"""
def __init__(
self, subscription_id, api_version, base_url=None, filepath=None):
if subscription_id is None:
raise ValueError('subscription_id must not be None.')
if api_version is None:
raise ValueError('api_version must not be None.')
if not base_url:
base_url = 'http://localhost'
super(AutoRestValidationTestConfiguration, self).__init__(base_url, filepath)
self.add_user_agent('autorestvalidationtest/{}'.format(VERSION))
self.subscription_id = subscription_id
self.api_version = api_version
class AutoRestValidationTest(object):
"""Test Infrastructure for AutoRest. No server backend exists for these tests.
:param config: Configuration for client.
:type config: AutoRestValidationTestConfiguration
"""
def __init__(self, config):
self._client = ServiceClient(None, config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer()
self._deserialize = Deserializer(client_models)
self.config = config
def validation_of_method_parameters(
self, resource_group_name, id, custom_headers={}, raw=False, **operation_config):
"""
Validates input parameters on the method. See swagger for details.
:param resource_group_name: Required string between 3 and 10 chars
with pattern [a-zA-Z0-9]+.
:type resource_group_name: str
:param id: Required int multiple of 10 from 100 to 1000.
:type id: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Product
<fixtures.acceptancetestsvalidation.models.Product>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/fakepath/{subscriptionId}/{resourceGroupName}/{id}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=10, min_length=3, pattern='[a-zA-Z0-9]+'),
'id': self._serialize.url("id", id, 'int', maximum=1000, minimum=100, multiple=10)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['apiVersion'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str', pattern='\d{2}-\d{2}-\d{4}')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def validation_of_body(
self, resource_group_name, id, body=None, custom_headers={}, raw=False, **operation_config):
"""
Validates body parameters on the method. See swagger for details.
:param resource_group_name: Required string between 3 and 10 chars
with pattern [a-zA-Z0-9]+.
:type resource_group_name: str
:param id: Required int multiple of 10 from 100 to 1000.
:type id: int
:param body:
:type body: :class:`Product
<fixtures.acceptancetestsvalidation.models.Product>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Product
<fixtures.acceptancetestsvalidation.models.Product>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/fakepath/{subscriptionId}/{resourceGroupName}/{id}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=10, min_length=3, pattern='[a-zA-Z0-9]+'),
'id': self._serialize.url("id", id, 'int', maximum=1000, minimum=100, multiple=10)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['apiVersion'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str', pattern='\d{2}-\d{2}-\d{4}')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_with_constant_in_path(
self, constant_param="constant", custom_headers={}, raw=False, **operation_config):
"""
:param constant_param:
:type constant_param: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/validation/constantsInPath/{constantParam}/value'
path_format_arguments = {
'constantParam': self._serialize.url("constant_param", constant_param, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def post_with_constant_in_body(
self, constant_param="constant", body=None, custom_headers={}, raw=False, **operation_config):
"""
:param constant_param:
:type constant_param: str
:param body:
:type body: :class:`Product
<fixtures.acceptancetestsvalidation.models.Product>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Product
<fixtures.acceptancetestsvalidation.models.Product>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/validation/constantsInPath/{constantParam}/value'
path_format_arguments = {
'constantParam': self._serialize.url("constant_param", constant_param, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
|
|
"""
Module for managing Windows systems and getting Windows system information.
Support for reboot, shutdown, join domain, rename
:depends:
- pywintypes
- win32api
- win32con
- win32net
- wmi
"""
import ctypes
import logging
import platform
import time
from datetime import datetime
import salt.utils.functools
import salt.utils.locales
import salt.utils.platform
import salt.utils.win_system
import salt.utils.winapi
from salt.exceptions import CommandExecutionError
try:
import pywintypes
import win32api
import win32con
import win32net
import wmi
from ctypes import windll
HAS_WIN32NET_MODS = True
except ImportError:
HAS_WIN32NET_MODS = False
# Set up logging
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = "system"
def __virtual__():
"""
Only works on Windows Systems with Win32 Modules
"""
if not salt.utils.platform.is_windows():
return False, "Module win_system: Requires Windows"
if not HAS_WIN32NET_MODS:
return False, "Module win_system: Missing win32 modules"
return __virtualname__
def _convert_minutes_seconds(timeout, in_seconds=False):
"""
convert timeout to seconds
"""
return timeout if in_seconds else timeout * 60
def _convert_date_time_string(dt_string):
"""
convert string to date time object
"""
dt_string = dt_string.split(".")[0]
dt_obj = datetime.strptime(dt_string, "%Y%m%d%H%M%S")
return dt_obj.strftime("%Y-%m-%d %H:%M:%S")
def halt(timeout=5, in_seconds=False):
"""
Halt a running system.
Args:
timeout (int):
Number of seconds before halting the system. Default is 5 seconds.
in_seconds (bool):
Whether to treat timeout as seconds or minutes.
.. versionadded:: 2015.8.0
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.halt 5 True
"""
return shutdown(timeout=timeout, in_seconds=in_seconds)
def init(runlevel): # pylint: disable=unused-argument
"""
Change the system runlevel on sysV compatible systems. Not applicable to
Windows
CLI Example:
.. code-block:: bash
salt '*' system.init 3
"""
# cmd = ['init', runlevel]
# ret = __salt__['cmd.run'](cmd, python_shell=False)
# return ret
# TODO: Create a mapping of runlevels to # pylint: disable=fixme
# corresponding Windows actions
return "Not implemented on Windows at this time."
def poweroff(timeout=5, in_seconds=False):
"""
Power off a running system.
Args:
timeout (int):
Number of seconds before powering off the system. Default is 5
seconds.
in_seconds (bool):
Whether to treat timeout as seconds or minutes.
.. versionadded:: 2015.8.0
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.poweroff 5
"""
return shutdown(timeout=timeout, in_seconds=in_seconds)
def reboot(
timeout=5,
in_seconds=False,
wait_for_reboot=False, # pylint: disable=redefined-outer-name
only_on_pending_reboot=False,
):
"""
Reboot a running system.
Args:
timeout (int):
The number of minutes/seconds before rebooting the system. Use of
minutes or seconds depends on the value of ``in_seconds``. Default
is 5 minutes.
in_seconds (bool):
``True`` will cause the ``timeout`` parameter to be in seconds.
``False`` will be in minutes. Default is ``False``.
.. versionadded:: 2015.8.0
wait_for_reboot (bool)
``True`` will sleep for timeout + 30 seconds after reboot has been
initiated. This is useful for use in a highstate. For example, you
may have states that you want to apply only after the reboot.
Default is ``False``.
.. versionadded:: 2015.8.0
only_on_pending_reboot (bool):
If this is set to ``True``, then the reboot will only proceed
if the system reports a pending reboot. Setting this parameter to
``True`` could be useful when calling this function from a final
housekeeping state intended to be executed at the end of a state run
(using *order: last*). Default is ``False``.
Returns:
bool: ``True`` if successful (a reboot will occur), otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.reboot 5
salt '*' system.reboot 5 True
Invoking this function from a final housekeeping state:
.. code-block:: yaml
final_housekeeping:
module.run:
- name: system.reboot
- only_on_pending_reboot: True
- order: last
"""
ret = shutdown(
timeout=timeout,
reboot=True,
in_seconds=in_seconds,
only_on_pending_reboot=only_on_pending_reboot,
)
if wait_for_reboot:
seconds = _convert_minutes_seconds(timeout, in_seconds)
time.sleep(seconds + 30)
return ret
def shutdown(
message=None,
timeout=5,
force_close=True,
reboot=False, # pylint: disable=redefined-outer-name
in_seconds=False,
only_on_pending_reboot=False,
):
"""
Shutdown a running system.
Args:
message (str):
The message to display to the user before shutting down.
timeout (int):
The length of time (in seconds) that the shutdown dialog box should
be displayed. While this dialog box is displayed, the shutdown can
be aborted using the ``system.shutdown_abort`` function.
If timeout is not zero, InitiateSystemShutdown displays a dialog box
on the specified computer. The dialog box displays the name of the
user who called the function, the message specified by the lpMessage
parameter, and prompts the user to log off. The dialog box beeps
when it is created and remains on top of other windows (system
modal). The dialog box can be moved but not closed. A timer counts
down the remaining time before the shutdown occurs.
If timeout is zero, the computer shuts down immediately without
displaying the dialog box and cannot be stopped by
``system.shutdown_abort``.
Default is 5 minutes
in_seconds (bool):
``True`` will cause the ``timeout`` parameter to be in seconds.
``False`` will be in minutes. Default is ``False``.
.. versionadded:: 2015.8.0
force_close (bool):
``True`` will force close all open applications. ``False`` will
display a dialog box instructing the user to close open
applications. Default is ``True``.
reboot (bool):
``True`` restarts the computer immediately after shutdown. ``False``
powers down the system. Default is ``False``.
only_on_pending_reboot (bool): If this is set to True, then the shutdown
will only proceed if the system reports a pending reboot. To
optionally shutdown in a highstate, consider using the shutdown
state instead of this module.
only_on_pending_reboot (bool):
If ``True`` the shutdown will only proceed if there is a reboot
pending. ``False`` will shutdown the system. Default is ``False``.
Returns:
bool:
``True`` if successful (a shutdown or reboot will occur), otherwise
``False``
CLI Example:
.. code-block:: bash
salt '*' system.shutdown "System will shutdown in 5 minutes"
"""
timeout = _convert_minutes_seconds(timeout, in_seconds)
if only_on_pending_reboot and not get_pending_reboot():
return False
if message and not isinstance(message, str):
message = message.decode("utf-8")
try:
win32api.InitiateSystemShutdown(
"127.0.0.1", message, timeout, force_close, reboot
)
return True
except pywintypes.error as exc:
(number, context, message) = exc.args
log.error("Failed to shutdown the system")
log.error("nbr: %s", number)
log.error("ctx: %s", context)
log.error("msg: %s", message)
return False
def shutdown_hard():
"""
Shutdown a running system with no timeout or warning.
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.shutdown_hard
"""
return shutdown(timeout=0)
def shutdown_abort():
"""
Abort a shutdown. Only available while the dialog box is being
displayed to the user. Once the shutdown has initiated, it cannot be
aborted.
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt 'minion-id' system.shutdown_abort
"""
try:
win32api.AbortSystemShutdown("127.0.0.1")
return True
except pywintypes.error as exc:
(number, context, message) = exc.args
log.error("Failed to abort system shutdown")
log.error("nbr: %s", number)
log.error("ctx: %s", context)
log.error("msg: %s", message)
return False
def lock():
"""
Lock the workstation.
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt 'minion-id' system.lock
"""
return windll.user32.LockWorkStation()
def set_computer_name(name):
"""
Set the Windows computer name
Args:
name (str):
The new name to give the computer. Requires a reboot to take effect.
Returns:
dict:
Returns a dictionary containing the old and new names if successful.
``False`` if not.
CLI Example:
.. code-block:: bash
salt 'minion-id' system.set_computer_name 'DavesComputer'
"""
if windll.kernel32.SetComputerNameExW(
win32con.ComputerNamePhysicalDnsHostname, name
):
ret = {"Computer Name": {"Current": get_computer_name()}}
pending = get_pending_computer_name()
if pending not in (None, False):
ret["Computer Name"]["Pending"] = pending
return ret
return False
def get_pending_computer_name():
"""
Get a pending computer name. If the computer name has been changed, and the
change is pending a system reboot, this function will return the pending
computer name. Otherwise, ``None`` will be returned. If there was an error
retrieving the pending computer name, ``False`` will be returned, and an
error message will be logged to the minion log.
Returns:
str:
Returns the pending name if pending restart. Returns ``None`` if not
pending restart.
CLI Example:
.. code-block:: bash
salt 'minion-id' system.get_pending_computer_name
"""
return salt.utils.win_system.get_pending_computer_name()
def get_computer_name():
"""
Get the Windows computer name
Returns:
str: Returns the computer name if found. Otherwise returns ``False``.
CLI Example:
.. code-block:: bash
salt 'minion-id' system.get_computer_name
"""
return salt.utils.win_system.get_computer_name()
def set_computer_desc(desc=None):
"""
Set the Windows computer description
Args:
desc (str):
The computer description
Returns:
str: Description if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt 'minion-id' system.set_computer_desc 'This computer belongs to Dave!'
"""
# Make sure the system exists
# Return an object containing current information array for the computer
system_info = win32net.NetServerGetInfo(None, 101)
# If desc is passed, decode it for unicode
if desc is None:
return False
system_info["comment"] = desc
# Apply new settings
try:
win32net.NetServerSetInfo(None, 101, system_info)
except win32net.error as exc:
(number, context, message) = exc.args
log.error("Failed to update system")
log.error("nbr: %s", number)
log.error("ctx: %s", context)
log.error("msg: %s", message)
return False
return {"Computer Description": get_computer_desc()}
# pylint: disable=invalid-name
set_computer_description = salt.utils.functools.alias_function(
set_computer_desc, "set_computer_description"
)
# pylint: enable=invalid-name
def get_system_info():
"""
Get system information.
.. note::
Not all system info is available across all versions of Windows. If it
is not available on an older version, it will be skipped
Returns:
dict: Dictionary containing information about the system to include
name, description, version, etc...
CLI Example:
.. code-block:: bash
salt 'minion-id' system.get_system_info
"""
def byte_calc(val):
val = float(val)
if val < 2 ** 10:
return "{:.3f}B".format(val)
elif val < 2 ** 20:
return "{:.3f}KB".format(val / 2 ** 10)
elif val < 2 ** 30:
return "{:.3f}MB".format(val / 2 ** 20)
elif val < 2 ** 40:
return "{:.3f}GB".format(val / 2 ** 30)
else:
return "{:.3f}TB".format(val / 2 ** 40)
# Lookup dicts for Win32_OperatingSystem
os_type = {1: "Work Station", 2: "Domain Controller", 3: "Server"}
# lookup dicts for Win32_ComputerSystem
domain_role = {
0: "Standalone Workstation",
1: "Member Workstation",
2: "Standalone Server",
3: "Member Server",
4: "Backup Domain Controller",
5: "Primary Domain Controller",
}
warning_states = {
1: "Other",
2: "Unknown",
3: "Safe",
4: "Warning",
5: "Critical",
6: "Non-recoverable",
}
pc_system_types = {
0: "Unspecified",
1: "Desktop",
2: "Mobile",
3: "Workstation",
4: "Enterprise Server",
5: "SOHO Server",
6: "Appliance PC",
7: "Performance Server",
8: "Maximum",
}
# Connect to WMI
with salt.utils.winapi.Com():
conn = wmi.WMI()
system = conn.Win32_OperatingSystem()[0]
ret = {
"name": get_computer_name(),
"description": system.Description,
"install_date": system.InstallDate,
"last_boot": system.LastBootUpTime,
"os_manufacturer": system.Manufacturer,
"os_name": system.Caption,
"users": system.NumberOfUsers,
"organization": system.Organization,
"os_architecture": system.OSArchitecture,
"primary": system.Primary,
"os_type": os_type[system.ProductType],
"registered_user": system.RegisteredUser,
"system_directory": system.SystemDirectory,
"system_drive": system.SystemDrive,
"os_version": system.Version,
"windows_directory": system.WindowsDirectory,
}
# Must get chassis_sku_number this way for backwards compatibility
# system.ChassisSKUNumber is only available on Windows 10/2016 and newer
product = conn.Win32_ComputerSystemProduct()[0]
ret.update({"chassis_sku_number": product.SKUNumber})
system = conn.Win32_ComputerSystem()[0]
# Get pc_system_type depending on Windows version
if platform.release() in ["Vista", "7", "8"]:
# Types for Vista, 7, and 8
pc_system_type = pc_system_types[system.PCSystemType]
else:
# New types were added with 8.1 and newer
pc_system_types.update({8: "Slate", 9: "Maximum"})
pc_system_type = pc_system_types[system.PCSystemType]
ret.update(
{
"bootup_state": system.BootupState,
"caption": system.Caption,
"chassis_bootup_state": warning_states[system.ChassisBootupState],
"dns_hostname": system.DNSHostname,
"domain": system.Domain,
"domain_role": domain_role[system.DomainRole],
"hardware_manufacturer": system.Manufacturer,
"hardware_model": system.Model,
"network_server_mode_enabled": system.NetworkServerModeEnabled,
"part_of_domain": system.PartOfDomain,
"pc_system_type": pc_system_type,
"power_state": system.PowerState,
"status": system.Status,
"system_type": system.SystemType,
"total_physical_memory": byte_calc(system.TotalPhysicalMemory),
"total_physical_memory_raw": system.TotalPhysicalMemory,
"thermal_state": warning_states[system.ThermalState],
"workgroup": system.Workgroup,
}
)
# Get processor information
processors = conn.Win32_Processor()
ret["processors"] = 0
ret["processors_logical"] = 0
ret["processor_cores"] = 0
ret["processor_cores_enabled"] = 0
ret["processor_manufacturer"] = processors[0].Manufacturer
ret["processor_max_clock_speed"] = str(processors[0].MaxClockSpeed) + "MHz"
for processor in processors:
ret["processors"] += 1
ret["processors_logical"] += processor.NumberOfLogicalProcessors
ret["processor_cores"] += processor.NumberOfCores
# Older versions of Windows do not have the NumberOfEnabledCore
# property. In that case, we'll just skip it
try:
ret["processor_cores_enabled"] += processor.NumberOfEnabledCore
except (AttributeError, TypeError):
pass
if ret["processor_cores_enabled"] == 0:
ret.pop("processor_cores_enabled", False)
bios = conn.Win32_BIOS()[0]
ret.update(
{
"hardware_serial": bios.SerialNumber,
"bios_manufacturer": bios.Manufacturer,
"bios_version": bios.Version,
"bios_details": bios.BIOSVersion,
"bios_caption": bios.Caption,
"bios_description": bios.Description,
}
)
ret["install_date"] = _convert_date_time_string(ret["install_date"])
ret["last_boot"] = _convert_date_time_string(ret["last_boot"])
return ret
def get_computer_desc():
"""
Get the Windows computer description
Returns:
str: Returns the computer description if found. Otherwise returns
``False``.
CLI Example:
.. code-block:: bash
salt 'minion-id' system.get_computer_desc
"""
desc = get_system_info()["description"]
return False if desc is None else desc
# pylint: disable=invalid-name
get_computer_description = salt.utils.functools.alias_function(
get_computer_desc, "get_computer_description"
)
# pylint: enable=invalid-name
def get_hostname():
"""
Get the hostname of the windows minion
.. versionadded:: 2016.3.0
Returns:
str: Returns the hostname of the windows minion
CLI Example:
.. code-block:: bash
salt 'minion-id' system.get_hostname
"""
cmd = "hostname"
ret = __salt__["cmd.run"](cmd=cmd)
return ret
def set_hostname(hostname):
"""
Set the hostname of the windows minion, requires a restart before this will
be updated.
.. versionadded:: 2016.3.0
Args:
hostname (str): The hostname to set
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt 'minion-id' system.set_hostname newhostname
"""
with salt.utils.winapi.Com():
conn = wmi.WMI()
comp = conn.Win32_ComputerSystem()[0]
return comp.Rename(Name=hostname)
def join_domain(
domain,
username=None,
password=None,
account_ou=None,
account_exists=False,
restart=False,
):
"""
Join a computer to an Active Directory domain. Requires a reboot.
Args:
domain (str):
The domain to which the computer should be joined, e.g.
``example.com``
username (str):
Username of an account which is authorized to join computers to the
specified domain. Needs to be either fully qualified like
``user@domain.tld`` or simply ``user``
password (str):
Password of the specified user
account_ou (str):
The DN of the OU below which the account for this computer should be
created when joining the domain, e.g.
``ou=computers,ou=departm_432,dc=my-company,dc=com``
account_exists (bool):
If set to ``True`` the computer will only join the domain if the
account already exists. If set to ``False`` the computer account
will be created if it does not exist, otherwise it will use the
existing account. Default is ``False``
restart (bool):
``True`` will restart the computer after a successful join. Default
is ``False``
.. versionadded:: 2015.8.2/2015.5.7
Returns:
dict: Returns a dictionary if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt 'minion-id' system.join_domain domain='domain.tld' \\
username='joinuser' password='joinpassword' \\
account_ou='ou=clients,ou=org,dc=domain,dc=tld' \\
account_exists=False, restart=True
"""
status = get_domain_workgroup()
if "Domain" in status:
if status["Domain"] == domain:
return "Already joined to {}".format(domain)
if username and "\\" not in username and "@" not in username:
username = "{}@{}".format(username, domain)
if username and password is None:
return "Must specify a password if you pass a username"
# remove any escape characters
if isinstance(account_ou, str):
account_ou = account_ou.split("\\")
account_ou = "".join(account_ou)
err = _join_domain(
domain=domain,
username=username,
password=password,
account_ou=account_ou,
account_exists=account_exists,
)
if not err:
ret = {"Domain": domain, "Restart": False}
if restart:
ret["Restart"] = reboot()
return ret
raise CommandExecutionError(win32api.FormatMessage(err).rstrip())
def _join_domain(
domain, username=None, password=None, account_ou=None, account_exists=False
):
"""
Helper function to join the domain.
Args:
domain (str): The domain to which the computer should be joined, e.g.
``example.com``
username (str): Username of an account which is authorized to join
computers to the specified domain. Need to be either fully qualified
like ``user@domain.tld`` or simply ``user``
password (str): Password of the specified user
account_ou (str): The DN of the OU below which the account for this
computer should be created when joining the domain, e.g.
``ou=computers,ou=departm_432,dc=my-company,dc=com``
account_exists (bool): If set to ``True`` the computer will only join
the domain if the account already exists. If set to ``False`` the
computer account will be created if it does not exist, otherwise it
will use the existing account. Default is False.
Returns:
int:
"""
NETSETUP_JOIN_DOMAIN = 0x1 # pylint: disable=invalid-name
NETSETUP_ACCOUNT_CREATE = 0x2 # pylint: disable=invalid-name
NETSETUP_DOMAIN_JOIN_IF_JOINED = 0x20 # pylint: disable=invalid-name
NETSETUP_JOIN_WITH_NEW_NAME = 0x400 # pylint: disable=invalid-name
join_options = 0x0
join_options |= NETSETUP_JOIN_DOMAIN
join_options |= NETSETUP_DOMAIN_JOIN_IF_JOINED
join_options |= NETSETUP_JOIN_WITH_NEW_NAME
if not account_exists:
join_options |= NETSETUP_ACCOUNT_CREATE
with salt.utils.winapi.Com():
conn = wmi.WMI()
comp = conn.Win32_ComputerSystem()[0]
# Return the results of the command as an error
# JoinDomainOrWorkgroup returns a strangely formatted value that looks like
# (0,) so return the first item
return comp.JoinDomainOrWorkgroup(
Name=domain,
Password=password,
UserName=username,
AccountOU=account_ou,
FJoinOptions=join_options,
)[0]
def unjoin_domain(
username=None,
password=None,
domain=None,
workgroup="WORKGROUP",
disable=False,
restart=False,
):
# pylint: disable=anomalous-backslash-in-string
"""
Unjoin a computer from an Active Directory Domain. Requires a restart.
Args:
username (str):
Username of an account which is authorized to manage computer
accounts on the domain. Needs to be a fully qualified name like
``user@domain.tld`` or ``domain.tld\\user``. If the domain is not
specified, the passed domain will be used. If the computer account
doesn't need to be disabled after the computer is unjoined, this can
be ``None``.
password (str):
The password of the specified user
domain (str):
The domain from which to unjoin the computer. Can be ``None``
workgroup (str):
The workgroup to join the computer to. Default is ``WORKGROUP``
.. versionadded:: 2015.8.2/2015.5.7
disable (bool):
``True`` to disable the computer account in Active Directory.
Default is ``False``
restart (bool):
``True`` will restart the computer after successful unjoin. Default
is ``False``
.. versionadded:: 2015.8.2/2015.5.7
Returns:
dict: Returns a dictionary if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt 'minion-id' system.unjoin_domain restart=True
salt 'minion-id' system.unjoin_domain username='unjoinuser' \\
password='unjoinpassword' disable=True \\
restart=True
"""
# pylint: enable=anomalous-backslash-in-string
status = get_domain_workgroup()
if "Workgroup" in status:
if status["Workgroup"] == workgroup:
return "Already joined to {}".format(workgroup)
if username and "\\" not in username and "@" not in username:
if domain:
username = "{}@{}".format(username, domain)
else:
return "Must specify domain if not supplied in username"
if username and password is None:
return "Must specify a password if you pass a username"
NETSETUP_ACCT_DELETE = 0x4 # pylint: disable=invalid-name
unjoin_options = 0x0
if disable:
unjoin_options |= NETSETUP_ACCT_DELETE
with salt.utils.winapi.Com():
conn = wmi.WMI()
comp = conn.Win32_ComputerSystem()[0]
err = comp.UnjoinDomainOrWorkgroup(
Password=password, UserName=username, FUnjoinOptions=unjoin_options
)
# you have to do this because UnjoinDomainOrWorkgroup returns a
# strangely formatted value that looks like (0,)
if not err[0]:
err = comp.JoinDomainOrWorkgroup(Name=workgroup)
if not err[0]:
ret = {"Workgroup": workgroup, "Restart": False}
if restart:
ret["Restart"] = reboot()
return ret
else:
log.error(win32api.FormatMessage(err[0]).rstrip())
log.error("Failed to unjoin the computer from %s", workgroup)
return False
else:
log.error(win32api.FormatMessage(err[0]).rstrip())
log.error("Failed to unjoin computer from %s", status["Domain"])
return False
def get_domain_workgroup():
"""
Get the domain or workgroup the computer belongs to.
.. versionadded:: 2015.5.7
.. versionadded:: 2015.8.2
Returns:
str: The name of the domain or workgroup
CLI Example:
.. code-block:: bash
salt 'minion-id' system.get_domain_workgroup
"""
with salt.utils.winapi.Com():
conn = wmi.WMI()
for computer in conn.Win32_ComputerSystem():
if computer.PartOfDomain:
return {"Domain": computer.Domain}
else:
return {"Workgroup": computer.Domain}
def set_domain_workgroup(workgroup):
"""
Set the domain or workgroup the computer belongs to.
.. versionadded:: 3001
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt 'minion-id' system.set_domain_workgroup LOCAL
"""
# Initialize COM
with salt.utils.winapi.Com():
# Grab the first Win32_ComputerSystem object from wmi
conn = wmi.WMI()
comp = conn.Win32_ComputerSystem()[0]
# Now we can join the new workgroup
res = comp.JoinDomainOrWorkgroup(Name=workgroup.upper())
return True if not res[0] else False
def _try_parse_datetime(time_str, fmts):
"""
A helper function that attempts to parse the input time_str as a date.
Args:
time_str (str): A string representing the time
fmts (list): A list of date format strings
Returns:
datetime: Returns a datetime object if parsed properly, otherwise None
"""
result = None
for fmt in fmts:
try:
result = datetime.strptime(time_str, fmt)
break
except ValueError:
pass
return result
def get_system_time():
"""
Get the system time.
Returns:
str: Returns the system time in HH:MM:SS AM/PM format.
CLI Example:
.. code-block:: bash
salt 'minion-id' system.get_system_time
"""
now = win32api.GetLocalTime()
meridian = "AM"
hours = int(now[4])
if hours == 12:
meridian = "PM"
elif hours == 0:
hours = 12
elif hours > 12:
hours = hours - 12
meridian = "PM"
return "{:02d}:{:02d}:{:02d} {}".format(hours, now[5], now[6], meridian)
def set_system_time(newtime):
"""
Set the system time.
Args:
newtime (str):
The time to set. Can be any of the following formats:
- HH:MM:SS AM/PM
- HH:MM AM/PM
- HH:MM:SS (24 hour)
- HH:MM (24 hour)
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt 'minion-id' system.set_system_time 12:01
"""
# Get date/time object from newtime
fmts = ["%I:%M:%S %p", "%I:%M %p", "%H:%M:%S", "%H:%M"]
dt_obj = _try_parse_datetime(newtime, fmts)
if dt_obj is None:
return False
# Set time using set_system_date_time()
return set_system_date_time(
hours=dt_obj.hour, minutes=dt_obj.minute, seconds=dt_obj.second
)
def set_system_date_time(
years=None, months=None, days=None, hours=None, minutes=None, seconds=None
):
"""
Set the system date and time. Each argument is an element of the date, but
not required. If an element is not passed, the current system value for that
element will be used. For example, if you don't pass the year, the current
system year will be used. (Used by set_system_date and set_system_time)
Args:
years (int): Years digit, ie: 2015
months (int): Months digit: 1 - 12
days (int): Days digit: 1 - 31
hours (int): Hours digit: 0 - 23
minutes (int): Minutes digit: 0 - 59
seconds (int): Seconds digit: 0 - 59
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.set_system_date_ time 2015 5 12 11 37 53
"""
# Get the current date/time
try:
date_time = win32api.GetLocalTime()
except win32api.error as exc:
(number, context, message) = exc.args
log.error("Failed to get local time")
log.error("nbr: %s", number)
log.error("ctx: %s", context)
log.error("msg: %s", message)
return False
# Check for passed values. If not passed, use current values
if years is None:
years = date_time[0]
if months is None:
months = date_time[1]
if days is None:
days = date_time[3]
if hours is None:
hours = date_time[4]
if minutes is None:
minutes = date_time[5]
if seconds is None:
seconds = date_time[6]
try:
class SYSTEMTIME(ctypes.Structure):
_fields_ = [
("wYear", ctypes.c_int16),
("wMonth", ctypes.c_int16),
("wDayOfWeek", ctypes.c_int16),
("wDay", ctypes.c_int16),
("wHour", ctypes.c_int16),
("wMinute", ctypes.c_int16),
("wSecond", ctypes.c_int16),
("wMilliseconds", ctypes.c_int16),
]
system_time = SYSTEMTIME()
# pylint: disable=invalid-name
system_time.wYear = int(years)
system_time.wMonth = int(months)
system_time.wDay = int(days)
system_time.wHour = int(hours)
system_time.wMinute = int(minutes)
system_time.wSecond = int(seconds)
# pylint: enable=invalid-name
system_time_ptr = ctypes.pointer(system_time)
succeeded = ctypes.windll.kernel32.SetLocalTime(system_time_ptr)
if succeeded != 0:
return True
else:
log.error("Failed to set local time")
raise CommandExecutionError(win32api.FormatMessage(succeeded).rstrip())
except OSError as err:
log.error("Failed to set local time")
raise CommandExecutionError(err)
def get_system_date():
"""
Get the Windows system date
Returns:
str: Returns the system date
CLI Example:
.. code-block:: bash
salt '*' system.get_system_date
"""
now = win32api.GetLocalTime()
return "{:02d}/{:02d}/{:04d}".format(now[1], now[3], now[0])
def set_system_date(newdate):
"""
Set the Windows system date. Use <mm-dd-yy> format for the date.
Args:
newdate (str):
The date to set. Can be any of the following formats
- YYYY-MM-DD
- MM-DD-YYYY
- MM-DD-YY
- MM/DD/YYYY
- MM/DD/YY
- YYYY/MM/DD
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.set_system_date '03-28-13'
"""
fmts = ["%Y-%m-%d", "%m-%d-%Y", "%m-%d-%y", "%m/%d/%Y", "%m/%d/%y", "%Y/%m/%d"]
# Get date/time object from newdate
dt_obj = _try_parse_datetime(newdate, fmts)
if dt_obj is None:
return False
# Set time using set_system_date_time()
return set_system_date_time(years=dt_obj.year, months=dt_obj.month, days=dt_obj.day)
def start_time_service():
"""
Start the Windows time service
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.start_time_service
"""
return __salt__["service.start"]("w32time")
def stop_time_service():
"""
Stop the Windows time service
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.stop_time_service
"""
return __salt__["service.stop"]("w32time")
def get_pending_component_servicing():
"""
Determine whether there are pending Component Based Servicing tasks that
require a reboot.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if there are pending Component Based Servicing tasks,
otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.get_pending_component_servicing
"""
return salt.utils.win_system.get_pending_component_servicing()
def get_pending_domain_join():
"""
Determine whether there is a pending domain join action that requires a
reboot.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if there is a pending domain join action, otherwise
``False``
CLI Example:
.. code-block:: bash
salt '*' system.get_pending_domain_join
"""
return salt.utils.win_system.get_pending_domain_join()
def get_pending_file_rename():
"""
Determine whether there are pending file rename operations that require a
reboot.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if there are pending file rename operations, otherwise
``False``
CLI Example:
.. code-block:: bash
salt '*' system.get_pending_file_rename
"""
return salt.utils.win_system.get_pending_file_rename()
def get_pending_servermanager():
"""
Determine whether there are pending Server Manager tasks that require a
reboot.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if there are pending Server Manager tasks, otherwise
``False``
CLI Example:
.. code-block:: bash
salt '*' system.get_pending_servermanager
"""
return salt.utils.win_system.get_pending_servermanager()
def get_pending_update():
"""
Determine whether there are pending updates that require a reboot.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if there are pending updates, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.get_pending_update
"""
return salt.utils.win_system.get_pending_update()
def set_reboot_required_witnessed():
r"""
This function is used to remember that an event indicating that a reboot is
required was witnessed. This function relies on the salt-minion's ability to
create the following volatile registry key in the *HKLM* hive:
*SYSTEM\\CurrentControlSet\\Services\\salt-minion\\Volatile-Data*
Because this registry key is volatile, it will not persist beyond the
current boot session. Also, in the scope of this key, the name *'Reboot
required'* will be assigned the value of *1*.
For the time being, this function is being used whenever an install
completes with exit code 3010 and can be extended where appropriate in the
future.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if successful, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.set_reboot_required_witnessed
"""
return salt.utils.win_system.set_reboot_required_witnessed()
def get_reboot_required_witnessed():
"""
Determine if at any time during the current boot session the salt minion
witnessed an event indicating that a reboot is required.
This function will return ``True`` if an install completed with exit
code 3010 during the current boot session and can be extended where
appropriate in the future.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if the ``Requires reboot`` registry flag is set to ``1``,
otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.get_reboot_required_witnessed
"""
return salt.utils.win_system.get_reboot_required_witnessed()
def get_pending_reboot():
"""
Determine whether there is a reboot pending.
.. versionadded:: 2016.11.0
Returns:
bool: ``True`` if the system is pending reboot, otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.get_pending_reboot
"""
return salt.utils.win_system.get_pending_reboot()
def get_pending_reboot_details():
"""
Determine which check is signalling that the system is pending a reboot.
Useful in determining why your system is signalling that it needs a reboot.
.. versionadded:: 3001
Returns:
dict: A dictionary of the results of each system that would indicate a
pending reboot
CLI Example:
.. code-block:: bash
salt '*' system.get_pending_reboot_details
"""
return salt.utils.win_system.get_pending_reboot_details()
def get_pending_windows_update():
"""
Check the Windows Update system for a pending reboot state.
This leverages the Windows Update System to determine if the system is
pending a reboot.
.. versionadded:: 3001
Returns:
bool: ``True`` if the Windows Update system reports a pending update,
otherwise ``False``
CLI Example:
.. code-block:: bash
salt '*' system.get_pending_windows_update
"""
return salt.utils.win_system.get_pending_windows_update()
|
|
import json
import os
from django.core.cache import cache
from django.core.files.storage import default_storage as storage
from django.db.models import Q
import mock
from pyquery import PyQuery as pq
from olympia import amo
from olympia.activity.models import ActivityLog
from olympia.addons.forms import AddonFormBasic
from olympia.addons.models import (
Addon, AddonCategory, AddonDependency, Category)
from olympia.amo.templatetags.jinja_helpers import user_media_path
from olympia.amo.tests import (
TestCase, addon_factory, formset, initial, req_factory_factory)
from olympia.amo.tests.test_helpers import get_image_path
from olympia.amo.urlresolvers import reverse
from olympia.amo.utils import image_size
from olympia.bandwagon.models import (
Collection, CollectionAddon, FeaturedCollection)
from olympia.constants.categories import CATEGORIES_BY_ID
from olympia.devhub.views import edit_theme
from olympia.tags.models import AddonTag, Tag
from olympia.users.models import UserProfile
from olympia.versions.models import VersionPreview
class BaseTestEdit(TestCase):
fixtures = ['base/users', 'base/addon_3615',
'base/addon_5579', 'base/addon_3615_categories']
listed = True
__test__ = False # this is an abstract test case
def setUp(self):
super(BaseTestEdit, self).setUp()
assert self.client.login(email='del@icio.us')
addon = self.get_addon()
if self.listed:
self.make_addon_listed(addon)
ac = AddonCategory.objects.filter(addon=addon, category__id=22)[0]
ac.feature = False
ac.save()
AddonCategory.objects.filter(addon=addon,
category__id__in=[1, 71]).delete()
cache.clear()
self.tags = ['tag3', 'tag2', 'tag1']
for t in self.tags:
Tag(tag_text=t).save_tag(addon)
else:
self.make_addon_unlisted(addon)
addon.save()
self.user = UserProfile.objects.get(pk=55021)
self.addon = self.get_addon()
self.url = self.addon.get_dev_url()
def get_addon(self):
return Addon.objects.no_cache().get(id=3615)
def get_url(self, section, edit=False):
args = [self.addon.slug, section]
if edit:
args.append('edit')
return reverse('devhub.addons.section', args=args)
class BaseTestEditBasic(BaseTestEdit):
__test__ = False # this is an abstract test case
def setUp(self):
super(BaseTestEditBasic, self).setUp()
self.basic_edit_url = self.get_url('basic', edit=True)
if self.listed:
ctx = self.client.get(self.basic_edit_url).context
self.cat_initial = initial(ctx['cat_form'].initial_forms[0])
def get_dict(self, **kw):
result = {'name': 'new name', 'slug': 'test_slug',
'summary': 'new summary'}
if self.listed:
fs = formset(self.cat_initial, initial_count=1)
result.update({'is_experimental': True,
'requires_payment': True,
'tags': ', '.join(self.tags)})
result.update(fs)
result.update(**kw)
return result
def test_edit_page_not_editable(self):
# The /edit page is the entry point for the individual edit sections,
# and should never display the actual forms, so it should always pass
# editable=False to the templates it renders.
# See https://github.com/mozilla/addons-server/issues/6208
response = self.client.get(self.url)
assert response.context['editable'] is False
def test_redirect(self):
# /addon/:id => /addon/:id/edit
response = self.client.get(
'/en-US/developers/addon/3615/', follow=True)
self.assert3xx(response, self.url, 301)
def test_edit(self):
old_name = self.addon.name
data = self.get_dict()
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
addon = self.get_addon()
assert unicode(addon.name) == data['name']
assert addon.name.id == old_name.id
assert unicode(addon.summary) == data['summary']
assert unicode(addon.slug) == data['slug']
if self.listed:
assert [unicode(t) for t in addon.tags.all()] == sorted(self.tags)
def test_edit_check_description(self):
# Make sure bug 629779 doesn't return.
old_desc = self.addon.description
data = self.get_dict()
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
addon = self.get_addon()
assert addon.description == old_desc
def test_edit_slug_invalid(self):
old_edit = self.basic_edit_url
data = self.get_dict(name='', slug='invalid')
response = self.client.post(self.basic_edit_url, data)
doc = pq(response.content)
assert doc('form').attr('action') == old_edit
def test_edit_slug_valid(self):
old_edit = self.basic_edit_url
data = self.get_dict(slug='valid')
response = self.client.post(self.basic_edit_url, data)
doc = pq(response.content)
assert doc('form').attr('action') != old_edit
def test_edit_summary_escaping(self):
data = self.get_dict()
data['summary'] = '<b>oh my</b>'
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
addon = self.get_addon()
# Fetch the page so the LinkifiedTranslation gets in cache.
response = self.client.get(
reverse('devhub.addons.edit', args=[addon.slug]))
assert pq(response.content)('[data-name=summary]').html().strip() == (
'<span lang="en-us"><b>oh my</b></span>')
# Now make sure we don't have escaped content in the rendered form.
form = AddonFormBasic(instance=addon, request=req_factory_factory('/'))
html = pq('<body>%s</body>' % form['summary'])('[lang="en-us"]').html()
assert html.strip() == '<b>oh my</b>'
def test_edit_as_developer(self):
self.login('regular@mozilla.com')
data = self.get_dict()
response = self.client.post(self.basic_edit_url, data)
# Make sure we get errors when they are just regular users.
assert response.status_code == 403 if self.listed else 404
devuser = UserProfile.objects.get(pk=999)
self.get_addon().addonuser_set.create(
user=devuser, role=amo.AUTHOR_ROLE_DEV)
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
addon = self.get_addon()
assert unicode(addon.name) == data['name']
assert unicode(addon.summary) == data['summary']
assert unicode(addon.slug) == data['slug']
if self.listed:
assert [unicode(t) for t in addon.tags.all()] == sorted(self.tags)
def test_edit_name_required(self):
data = self.get_dict(name='', slug='test_addon')
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
self.assertFormError(
response, 'form', 'name', 'This field is required.')
def test_edit_name_spaces(self):
data = self.get_dict(name=' ', slug='test_addon')
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
self.assertFormError(
response, 'form', 'name', 'This field is required.')
def test_edit_slugs_unique(self):
Addon.objects.get(id=5579).update(slug='test_slug')
data = self.get_dict()
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
self.assertFormError(
response, 'form', 'slug',
'This slug is already in use. Please choose another.')
def test_edit_name_not_empty(self):
data = self.get_dict(name='', slug=self.addon.slug,
summary=self.addon.summary)
response = self.client.post(self.basic_edit_url, data)
self.assertFormError(
response, 'form', 'name', 'This field is required.')
def test_edit_name_max_length(self):
data = self.get_dict(name='xx' * 70, slug=self.addon.slug,
summary=self.addon.summary)
response = self.client.post(self.basic_edit_url, data)
self.assertFormError(response, 'form', 'name',
'Ensure this value has at most 50 '
'characters (it has 140).')
def test_edit_summary_max_length(self):
data = self.get_dict(name=self.addon.name, slug=self.addon.slug,
summary='x' * 251)
response = self.client.post(self.basic_edit_url, data)
self.assertFormError(response, 'form', 'summary',
'Ensure this value has at most 250 '
'characters (it has 251).')
def test_nav_links(self, show_compat_reporter=True):
if self.listed:
links = [
self.addon.get_dev_url('edit'), # Edit Information
self.addon.get_dev_url('owner'), # Manage Authors
self.addon.get_dev_url('versions'), # Manage Status & Versions
self.addon.get_url_path(), # View Listing
reverse('devhub.feed', args=[self.addon.slug]), # View Recent
reverse('stats.overview', args=[self.addon.slug]), # Stats
]
else:
links = [
self.addon.get_dev_url('edit'), # Edit Information
self.addon.get_dev_url('owner'), # Manage Authors
self.addon.get_dev_url('versions'), # Manage Status & Versions
reverse('devhub.feed', args=[self.addon.slug]), # View Recent
]
if show_compat_reporter:
# Compatibility Reporter. Only shown for legacy extensions.
links.append(
reverse('compat.reporter_detail', args=[self.addon.guid]))
response = self.client.get(self.url)
doc_links = [
unicode(a.attrib['href'])
for a in pq(response.content)('#edit-addon-nav').find('li a')]
assert links == doc_links
def test_nav_links_webextensions(self):
self.addon.find_latest_version(None).files.update(is_webextension=True)
self.test_nav_links(show_compat_reporter=False)
def _feature_addon(self, addon_id=3615):
c_addon = CollectionAddon.objects.create(
addon_id=addon_id, collection=Collection.objects.create())
FeaturedCollection.objects.create(collection=c_addon.collection,
application=amo.FIREFOX.id)
cache.clear()
class TagTestsMixin(object):
def test_edit_add_tag(self):
count = ActivityLog.objects.all().count()
self.tags.insert(0, 'tag4')
data = self.get_dict()
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
result = pq(response.content)('#addon_tags_edit').eq(0).text()
assert result == ', '.join(sorted(self.tags))
html = ('<a href="/en-US/firefox/tag/tag4">tag4</a> added to '
'<a href="/en-US/firefox/addon/test_slug/">new name</a>.')
assert ActivityLog.objects.for_addons(self.addon).get(
action=amo.LOG.ADD_TAG.id).to_string() == html
assert ActivityLog.objects.filter(
action=amo.LOG.ADD_TAG.id).count() == count + 1
def test_edit_denied_tag(self):
Tag.objects.get_or_create(tag_text='blue', denied=True)
data = self.get_dict(tags='blue')
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
error = 'Invalid tag: blue'
self.assertFormError(response, 'form', 'tags', error)
def test_edit_denied_tags_2(self):
Tag.objects.get_or_create(tag_text='blue', denied=True)
Tag.objects.get_or_create(tag_text='darn', denied=True)
data = self.get_dict(tags='blue, darn, swearword')
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
error = 'Invalid tags: blue, darn'
self.assertFormError(response, 'form', 'tags', error)
def test_edit_denied_tags_3(self):
Tag.objects.get_or_create(tag_text='blue', denied=True)
Tag.objects.get_or_create(tag_text='darn', denied=True)
Tag.objects.get_or_create(tag_text='swearword', denied=True)
data = self.get_dict(tags='blue, darn, swearword')
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
error = 'Invalid tags: blue, darn, swearword'
self.assertFormError(response, 'form', 'tags', error)
def test_edit_remove_tag(self):
self.tags.remove('tag2')
count = ActivityLog.objects.all().count()
data = self.get_dict()
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
result = pq(response.content)('#addon_tags_edit').eq(0).text()
assert result == ', '.join(sorted(self.tags))
assert ActivityLog.objects.filter(
action=amo.LOG.REMOVE_TAG.id).count() == count + 1
def test_edit_minlength_tags(self):
tags = self.tags
tags.append('a' * (amo.MIN_TAG_LENGTH - 1))
data = self.get_dict()
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
self.assertFormError(response, 'form', 'tags',
'All tags must be at least %d characters.' %
amo.MIN_TAG_LENGTH)
def test_edit_max_tags(self):
tags = self.tags
for i in range(amo.MAX_TAGS + 1):
tags.append('test%d' % i)
data = self.get_dict()
response = self.client.post(self.basic_edit_url, data)
self.assertFormError(
response, 'form', 'tags',
'You have %d too many tags.' % (len(tags) - amo.MAX_TAGS))
def test_edit_tag_empty_after_slug(self):
start = Tag.objects.all().count()
data = self.get_dict(tags='>>')
self.client.post(self.basic_edit_url, data)
# Check that the tag did not get created.
assert start == Tag.objects.all().count()
def test_edit_tag_slugified(self):
data = self.get_dict(tags='<script>alert("foo")</script>')
self.client.post(self.basic_edit_url, data)
tag = Tag.objects.all().order_by('-pk')[0]
assert tag.tag_text == 'scriptalertfooscript'
def test_edit_restricted_tags(self):
addon = self.get_addon()
tag = Tag.objects.create(
tag_text='i_am_a_restricted_tag', restricted=True)
AddonTag.objects.create(tag=tag, addon=addon)
res = self.client.get(self.basic_edit_url)
divs = pq(res.content)('#addon_tags_edit .edit-addon-details')
assert len(divs) == 2
assert 'i_am_a_restricted_tag' in divs.eq(1).text()
class ContributionsTestsMixin(object):
def test_contributions_url_not_url(self):
data = self.get_dict(name='blah', slug='test_addon',
contributions='foooo')
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
self.assertFormError(
response, 'form', 'contributions', 'Enter a valid URL.')
def test_contributions_url_not_valid_domain(self):
data = self.get_dict(name='blah', slug='test_addon',
contributions='http://foo.baa/')
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
self.assertFormError(
response, 'form', 'contributions',
'URL domain must be one of [%s], or a subdomain.' %
', '.join(amo.VALID_CONTRIBUTION_DOMAINS))
def test_contributions_url_valid_domain(self):
assert 'paypal.me' in amo.VALID_CONTRIBUTION_DOMAINS
data = self.get_dict(name='blah', slug='test_addon',
contributions='http://paypal.me/')
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
assert self.addon.reload().contributions == 'http://paypal.me/'
def test_contributions_url_valid_domain_sub(self):
assert 'paypal.me' in amo.VALID_CONTRIBUTION_DOMAINS
assert 'sub,paypal.me' not in amo.VALID_CONTRIBUTION_DOMAINS
data = self.get_dict(name='blah', slug='test_addon',
contributions='http://sub.paypal.me/random/?path')
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
assert self.addon.reload().contributions == (
'http://sub.paypal.me/random/?path')
class L10nTestsMixin(object):
def get_l10n_urls(self):
paths = ('devhub.addons.edit', 'devhub.addons.owner')
return [reverse(p, args=['a3615']) for p in paths]
def test_l10n(self):
Addon.objects.get(id=3615).update(default_locale='en-US')
for url in self.get_l10n_urls():
response = self.client.get(url)
assert pq(
response.content)('#l10n-menu').attr('data-default') == 'en-us'
def test_l10n_not_us(self):
Addon.objects.get(id=3615).update(default_locale='fr')
for url in self.get_l10n_urls():
response = self.client.get(url)
assert pq(
response.content)('#l10n-menu').attr('data-default') == 'fr'
def test_l10n_not_us_id_url(self):
Addon.objects.get(id=3615).update(default_locale='fr')
for url in self.get_l10n_urls():
url = '/id' + url[6:]
response = self.client.get(url)
assert pq(
response.content)('#l10n-menu').attr('data-default') == 'fr'
class TestEditBasicListed(BaseTestEditBasic, TagTestsMixin,
ContributionsTestsMixin, L10nTestsMixin):
__test__ = True
def test_edit_categories_add(self):
assert [c.id for c in self.get_addon().all_categories] == [22]
self.cat_initial['categories'] = [22, 1]
self.client.post(self.basic_edit_url, self.get_dict())
addon_cats = self.get_addon().categories.values_list('id', flat=True)
assert sorted(addon_cats) == [1, 22]
def test_edit_categories_add_featured(self):
"""Ensure that categories cannot be changed for featured add-ons."""
self._feature_addon()
self.cat_initial['categories'] = [22, 1]
response = self.client.post(self.basic_edit_url, self.get_dict())
addon_cats = self.get_addon().categories.values_list('id', flat=True)
assert response.context['cat_form'].errors[0]['categories'] == (
['Categories cannot be changed while your add-on is featured for '
'this application.'])
# This add-on's categories should not change.
assert sorted(addon_cats) == [22]
def test_edit_categories_add_new_creatured_admin(self):
"""Ensure that admins can change categories for creatured add-ons."""
assert self.client.login(email='admin@mozilla.com')
self._feature_addon()
response = self.client.get(self.basic_edit_url)
doc = pq(response.content)
assert doc('#addon-categories-edit div.addon-app-cats').length == 1
assert doc('#addon-categories-edit > p').length == 0
self.cat_initial['categories'] = [22, 1]
response = self.client.post(self.basic_edit_url, self.get_dict())
addon_cats = self.get_addon().categories.values_list('id', flat=True)
assert 'categories' not in response.context['cat_form'].errors[0]
# This add-on's categories should change.
assert sorted(addon_cats) == [1, 22]
def test_edit_categories_disable_creatured(self):
"""Ensure that other forms are okay when disabling category changes."""
self._feature_addon()
self.cat_initial['categories'] = [22, 1]
data = self.get_dict()
self.client.post(self.basic_edit_url, data)
assert unicode(self.get_addon().name) == data['name']
def test_edit_categories_no_disclaimer(self):
"""Ensure that there is a not disclaimer for non-creatured add-ons."""
response = self.client.get(self.basic_edit_url)
doc = pq(response.content)
assert doc('#addon-categories-edit div.addon-app-cats').length == 1
assert doc('#addon-categories-edit > p').length == 0
def test_edit_no_previous_categories(self):
AddonCategory.objects.filter(addon=self.addon).delete()
response = self.client.get(self.basic_edit_url)
assert response.status_code == 200
self.cat_initial['categories'] = [22, 71]
response = self.client.post(self.basic_edit_url, self.get_dict())
self.addon = self.get_addon()
addon_cats = self.addon.categories.values_list('id', flat=True)
assert sorted(addon_cats) == [22, 71]
# Make sure the categories list we display to the user in the response
# has been updated.
assert set(response.context['addon'].all_categories) == set(
self.addon.all_categories)
def test_edit_categories_addandremove(self):
AddonCategory(addon=self.addon, category_id=1).save()
assert sorted(
[c.id for c in self.get_addon().all_categories]) == [1, 22]
self.cat_initial['categories'] = [22, 71]
response = self.client.post(self.basic_edit_url, self.get_dict())
self.addon = self.get_addon()
addon_cats = self.addon.categories.values_list('id', flat=True)
assert sorted(addon_cats) == [22, 71]
# Make sure the categories list we display to the user in the response
# has been updated.
assert set(response.context['addon'].all_categories) == set(
self.addon.all_categories)
def test_edit_categories_xss(self):
category = Category.objects.get(id=22)
category.db_name = '<script>alert("test");</script>'
category.slug = 'xssattempt'
category.save()
self.cat_initial['categories'] = [22, 71]
response = self.client.post(
self.basic_edit_url, formset(self.cat_initial, initial_count=1))
assert '<script>alert' not in response.content
assert '<script>alert' in response.content
def test_edit_categories_remove(self):
category = Category.objects.get(id=1)
AddonCategory(addon=self.addon, category=category).save()
assert sorted(
[cat.id for cat in self.get_addon().all_categories]) == [1, 22]
self.cat_initial['categories'] = [22]
response = self.client.post(self.basic_edit_url, self.get_dict())
self.addon = self.get_addon()
addon_cats = self.addon.categories.values_list('id', flat=True)
assert sorted(addon_cats) == [22]
# Make sure the categories list we display to the user in the response
# has been updated.
assert set(response.context['addon'].all_categories) == set(
self.addon.all_categories)
def test_edit_categories_required(self):
del self.cat_initial['categories']
response = self.client.post(
self.basic_edit_url, formset(self.cat_initial, initial_count=1))
assert response.context['cat_form'].errors[0]['categories'] == (
['This field is required.'])
def test_edit_categories_max(self):
assert amo.MAX_CATEGORIES == 2
self.cat_initial['categories'] = [22, 1, 71]
response = self.client.post(
self.basic_edit_url, formset(self.cat_initial, initial_count=1))
assert response.context['cat_form'].errors[0]['categories'] == (
['You can have only 2 categories.'])
def test_edit_categories_other_failure(self):
Category.objects.get(id=22).update(misc=True)
self.cat_initial['categories'] = [22, 1]
response = self.client.post(
self.basic_edit_url, formset(self.cat_initial, initial_count=1))
assert response.context['cat_form'].errors[0]['categories'] == (
['The miscellaneous category cannot be combined with additional '
'categories.'])
def test_edit_categories_nonexistent(self):
self.cat_initial['categories'] = [100]
response = self.client.post(
self.basic_edit_url, formset(self.cat_initial, initial_count=1))
assert response.context['cat_form'].errors[0]['categories'] == (
['Select a valid choice. 100 is not one of the available '
'choices.'])
def test_text_not_none_when_has_flags(self):
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('#addon-flags').text() == (
'This add-on requires external software.')
def test_text_none_when_no_flags(self):
addon = self.get_addon()
addon.update(external_software=False)
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('#addon-flags').text() == 'None'
def test_nav_links_admin(self):
assert self.client.login(email='admin@mozilla.com')
response = self.client.get(self.url)
doc = pq(response.content)('#edit-addon-nav')
links = doc('ul:last').find('li a')
assert links.eq(1).attr('href') == reverse(
'reviewers.review', args=[self.addon.slug])
assert links.eq(2).attr('href') == reverse(
'reviewers.review', args=['unlisted', self.addon.slug])
assert links.eq(3).attr('href') == reverse(
'zadmin.addon_manage', args=[self.addon.slug])
def test_not_experimental_flag(self):
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('#experimental-edit').text() == (
'This add-on is ready for general use.')
def test_experimental_flag(self):
self.get_addon().update(is_experimental=True)
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('#experimental-edit').text() == (
'This add-on is experimental.')
def test_not_requires_payment_flag(self):
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('#requires-payment-edit').text() == (
'This add-on doesn\'t require any additional payments, '
'paid services or software, or additional hardware.')
def test_requires_payment_flag(self):
self.get_addon().update(requires_payment=True)
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('#requires-payment-edit').text() == (
'This add-on requires payment, non-free services or '
'software, or additional hardware.')
class TestEditMedia(BaseTestEdit):
__test__ = True
def setUp(self):
super(TestEditMedia, self).setUp()
self.media_edit_url = self.get_url('media', True)
self.icon_upload = reverse('devhub.addons.upload_icon',
args=[self.addon.slug])
self.preview_upload = reverse('devhub.addons.upload_preview',
args=[self.addon.slug])
def formset_new_form(self, *args, **kw):
ctx = self.client.get(self.media_edit_url).context
blank = initial(ctx['preview_form'].forms[-1])
blank.update(**kw)
return blank
def formset_media(self, *args, **kw):
kw.setdefault('initial_count', 0)
kw.setdefault('prefix', 'files')
fs = formset(*[a for a in args] + [self.formset_new_form()], **kw)
return {k: '' if v is None else v for k, v in fs.items()}
def test_icon_upload_attributes(self):
doc = pq(self.client.get(self.media_edit_url).content)
field = doc('input[name=icon_upload]')
assert field.length == 1
assert sorted(field.attr('data-allowed-types').split('|')) == (
['image/jpeg', 'image/png'])
assert field.attr('data-upload-url') == self.icon_upload
def test_edit_media_defaulticon(self):
data = {'icon_type': ''}
data_formset = self.formset_media(**data)
response = self.client.post(self.media_edit_url, data_formset)
assert response.context['form'].errors == {}
addon = self.get_addon()
assert addon.get_icon_url(64).endswith('icons/default-64.png')
for k in data:
assert unicode(getattr(addon, k)) == data[k]
def test_edit_media_preuploadedicon(self):
data = {'icon_type': 'icon/appearance'}
data_formset = self.formset_media(**data)
response = self.client.post(self.media_edit_url, data_formset)
assert response.context['form'].errors == {}
addon = self.get_addon()
assert addon.get_icon_url(64).endswith('icons/appearance-64.png')
for k in data:
assert unicode(getattr(addon, k)) == data[k]
def test_edit_media_uploadedicon(self):
img = get_image_path('mozilla.png')
src_image = open(img, 'rb')
data = {'upload_image': src_image}
response = self.client.post(self.icon_upload, data)
response_json = json.loads(response.content)
addon = self.get_addon()
# Now, save the form so it gets moved properly.
data = {
'icon_type': 'image/png',
'icon_upload_hash': response_json['upload_hash']
}
data_formset = self.formset_media(**data)
response = self.client.post(self.media_edit_url, data_formset)
assert response.context['form'].errors == {}
addon = self.get_addon()
# Unfortunate hardcoding of URL
url = addon.get_icon_url(64)
assert ('addon_icons/3/%s' % addon.id) in url, (
'Unexpected path: %r' % url)
assert data['icon_type'] == 'image/png'
# Check that it was actually uploaded
dirname = os.path.join(user_media_path('addon_icons'),
'%s' % (addon.id / 1000))
dest = os.path.join(dirname, '%s-32.png' % addon.id)
assert storage.exists(dest)
assert image_size(dest) == (32, 12)
assert addon.icon_type == 'image/png'
assert addon.icon_hash == 'bb362450'
def test_edit_media_icon_log(self):
self.test_edit_media_uploadedicon()
log = ActivityLog.objects.all()
assert log.count() == 1
assert log[0].action == amo.LOG.CHANGE_ICON.id
def test_edit_media_uploadedicon_noresize(self):
img = "static/img/notifications/error.png"
src_image = open(img, 'rb')
data = {'upload_image': src_image}
response = self.client.post(self.icon_upload, data)
response_json = json.loads(response.content)
addon = self.get_addon()
# Now, save the form so it gets moved properly.
data = {
'icon_type': 'image/png',
'icon_upload_hash': response_json['upload_hash']
}
data_formset = self.formset_media(**data)
response = self.client.post(self.media_edit_url, data_formset)
assert response.context['form'].errors == {}
addon = self.get_addon()
# Unfortunate hardcoding of URL
addon_url = addon.get_icon_url(64).split('?')[0]
assert addon_url.endswith('addon_icons/3/%s-64.png' % addon.id), (
'Unexpected path: %r' % addon_url)
assert data['icon_type'] == 'image/png'
# Check that it was actually uploaded
dirname = os.path.join(user_media_path('addon_icons'),
'%s' % (addon.id / 1000))
dest = os.path.join(dirname, '%s-64.png' % addon.id)
assert storage.exists(dest)
assert image_size(dest) == (48, 48)
assert addon.icon_type == 'image/png'
assert addon.icon_hash == 'f02063c9'
def check_image_type(self, url, msg):
img = 'static/js/zamboni/devhub.js'
src_image = open(img, 'rb')
res = self.client.post(url, {'upload_image': src_image})
response_json = json.loads(res.content)
assert response_json['errors'][0] == msg
def test_edit_media_icon_wrong_type(self):
self.check_image_type(self.icon_upload,
'Icons must be either PNG or JPG.')
def test_edit_media_screenshot_wrong_type(self):
self.check_image_type(self.preview_upload,
'Images must be either PNG or JPG.')
def setup_image_status(self):
addon = self.get_addon()
self.icon_dest = os.path.join(addon.get_icon_dir(),
'%s-32.png' % addon.id)
os.makedirs(os.path.dirname(self.icon_dest))
with storage.open(self.icon_dest, 'w') as f:
f.write('some icon data\n')
self.preview = addon.previews.create()
self.preview.save()
os.makedirs(os.path.dirname(self.preview.thumbnail_path))
with storage.open(self.preview.thumbnail_path, 'w') as f:
f.write('some icon data\n')
self.url = reverse('devhub.ajax.image.status', args=[addon.slug])
def test_image_status_no_choice(self):
addon = self.get_addon()
addon.update(icon_type='')
url = reverse('devhub.ajax.image.status', args=[addon.slug])
result = json.loads(self.client.get(url).content)
assert result['icons']
def test_image_status_works(self):
self.setup_image_status()
result = json.loads(self.client.get(self.url).content)
assert result['icons']
def test_image_status_fails(self):
self.setup_image_status()
storage.delete(self.icon_dest)
result = json.loads(self.client.get(self.url).content)
assert not result['icons']
def test_preview_status_works(self):
self.setup_image_status()
result = json.loads(self.client.get(self.url).content)
assert result['previews']
# No previews means that all the images are done.
self.addon.previews.all().delete()
result = json.loads(self.client.get(self.url).content)
assert result['previews']
def test_preview_status_fails(self):
self.setup_image_status()
storage.delete(self.preview.thumbnail_path)
result = json.loads(self.client.get(self.url).content)
assert not result['previews']
def test_image_status_persona(self):
self.setup_image_status()
storage.delete(self.icon_dest)
self.get_addon().update(type=amo.ADDON_PERSONA)
result = json.loads(self.client.get(self.url).content)
assert result['icons']
def test_image_status_default(self):
self.setup_image_status()
storage.delete(self.icon_dest)
self.get_addon().update(icon_type='icon/photos')
result = json.loads(self.client.get(self.url).content)
assert result['icons']
def check_image_animated(self, url, msg):
filehandle = open(get_image_path('animated.png'), 'rb')
res = self.client.post(url, {'upload_image': filehandle})
response_json = json.loads(res.content)
assert response_json['errors'][0] == msg
def test_icon_animated(self):
self.check_image_animated(self.icon_upload,
'Icons cannot be animated.')
def test_screenshot_animated(self):
self.check_image_animated(self.preview_upload,
'Images cannot be animated.')
def preview_add(self, amount=1):
img = get_image_path('mozilla.png')
src_image = open(img, 'rb')
data = {'upload_image': src_image}
data_formset = self.formset_media(**data)
url = self.preview_upload
response = self.client.post(url, data_formset)
details = json.loads(response.content)
upload_hash = details['upload_hash']
# Create and post with the formset.
fields = []
for i in range(amount):
fields.append(self.formset_new_form(caption='hi',
upload_hash=upload_hash,
position=i))
data_formset = self.formset_media(*fields)
self.client.post(self.media_edit_url, data_formset)
def test_edit_media_preview_add(self):
self.preview_add()
assert str(self.get_addon().previews.all()[0].caption) == 'hi'
def test_edit_media_preview_edit(self):
self.preview_add()
preview = self.get_addon().previews.all()[0]
edited = {'caption': 'bye',
'upload_hash': '',
'id': preview.id,
'position': preview.position,
'file_upload': None}
data_formset = self.formset_media(edited, initial_count=1)
self.client.post(self.media_edit_url, data_formset)
assert str(self.get_addon().previews.all()[0].caption) == 'bye'
assert len(self.get_addon().previews.all()) == 1
def test_edit_media_preview_reorder(self):
self.preview_add(3)
previews = self.get_addon().previews.all()
base = {'upload_hash': '', 'file_upload': None}
# Three preview forms were generated; mix them up here.
one = {'caption': 'first', 'position': 1, 'id': previews[2].id}
two = {'caption': 'second', 'position': 2, 'id': previews[0].id}
three = {'caption': 'third', 'position': 3, 'id': previews[1].id}
one.update(base)
two.update(base)
three.update(base)
# Add them in backwards ("third", "second", "first")
data_formset = self.formset_media(three, two, one, initial_count=3)
assert data_formset['files-0-caption'] == 'third'
assert data_formset['files-1-caption'] == 'second'
assert data_formset['files-2-caption'] == 'first'
self.client.post(self.media_edit_url, data_formset)
# They should come out "first", "second", "third"
assert str(self.get_addon().previews.all()[0].caption) == 'first'
assert str(self.get_addon().previews.all()[1].caption) == 'second'
assert str(self.get_addon().previews.all()[2].caption) == 'third'
def test_edit_media_preview_delete(self):
self.preview_add()
preview = self.get_addon().previews.get()
edited = {'DELETE': 'checked',
'upload_hash': '',
'id': preview.id,
'position': 0,
'file_upload': None}
data_formset = self.formset_media(edited, initial_count=1)
self.client.post(self.media_edit_url, data_formset)
assert len(self.get_addon().previews.all()) == 0
def test_edit_media_preview_add_another(self):
self.preview_add()
self.preview_add()
assert len(self.get_addon().previews.all()) == 2
def test_edit_media_preview_add_two(self):
self.preview_add(2)
assert len(self.get_addon().previews.all()) == 2
class BaseTestEditDetails(BaseTestEdit):
__test__ = True
def setUp(self):
super(BaseTestEditDetails, self).setUp()
self.details_url = self.get_url('details')
self.details_edit_url = self.get_url('details', edit=True)
def test_edit(self):
data = {
'description': 'New description with <em>html</em>!',
'default_locale': 'en-US',
'homepage': 'http://twitter.com/fligtarsmom'
}
response = self.client.post(self.details_edit_url, data)
assert response.context['form'].errors == {}
addon = self.get_addon()
for k in data:
assert unicode(getattr(addon, k)) == data[k]
def test_edit_xss(self):
"""
Let's try to put xss in our description, and safe html, and verify
that we are playing safe.
"""
self.addon.description = ("This\n<b>IS</b>"
"<script>alert('awesome')</script>")
self.addon.save()
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('#edit-addon-details span[lang]').html() == (
"This<br/><b>IS</b><script>alert('awesome')</script>")
def test_edit_homepage_optional(self):
data = {
'description': 'New description with <em>html</em>!',
'default_locale': 'en-US',
'homepage': ''
}
response = self.client.post(self.details_edit_url, data)
assert response.context['form'].errors == {}
addon = self.get_addon()
for k in data:
assert unicode(getattr(addon, k)) == data[k]
class TestEditDetailsListed(BaseTestEditDetails):
def test_edit_default_locale_required_trans(self):
# name, summary, and description are required in the new locale.
description, homepage = map(unicode, [self.addon.description,
self.addon.homepage])
# TODO: description should get fixed up with the form.
error = ('Before changing your default locale you must have a name, '
'summary, and description in that locale. '
'You are missing ')
data = {
'description': description,
'homepage': homepage,
'default_locale': 'fr'
}
response = self.client.post(self.details_edit_url, data)
# We can't use assertFormError here, because the missing fields are
# stored in a dict, which isn't ordered.
form_error = response.context['form'].non_field_errors()[0]
assert form_error.startswith(error)
assert "'description'" in form_error
assert "'name'" in form_error
assert "'summary'" in form_error
# Now we have a name.
self.addon.name = {'fr': 'fr name'}
self.addon.save()
response = self.client.post(self.details_edit_url, data)
form_error = response.context['form'].non_field_errors()[0]
assert form_error.startswith(error)
assert "'description'" in form_error
assert "'summary'" in form_error
# Now we have a summary.
self.addon.summary = {'fr': 'fr summary'}
self.addon.save()
response = self.client.post(self.details_edit_url, data)
form_error = response.context['form'].non_field_errors()[0]
assert form_error.startswith(error)
assert "'description'" in form_error
# Now we're sending an fr description with the form.
data['description_fr'] = 'fr description'
response = self.client.post(self.details_edit_url, data)
assert response.context['form'].errors == {}
def test_edit_default_locale_frontend_error(self):
data = {
'description': 'xx',
'homepage': 'https://staticfil.es/',
'default_locale': 'fr'
}
response = self.client.post(self.details_edit_url, data)
self.assertContains(
response, 'Before changing your default locale you must')
def test_edit_locale(self):
addon = self.get_addon()
addon.update(default_locale='en-US')
response = self.client.get(self.details_url)
assert pq(response.content)('.addon_edit_locale').eq(0).text() == (
'English (US)')
class TestEditSupport(BaseTestEdit):
__test__ = True
def setUp(self):
super(TestEditSupport, self).setUp()
self.support_url = self.get_url('support')
self.support_edit_url = self.get_url('support', edit=True)
def test_edit_support(self):
data = {
'support_email': 'sjobs@apple.com',
'support_url': 'http://apple.com/'
}
response = self.client.post(self.support_edit_url, data)
assert response.context['form'].errors == {}
addon = self.get_addon()
for k in data:
assert unicode(getattr(addon, k)) == data[k]
def test_edit_support_optional_url(self):
data = {
'support_email': 'sjobs@apple.com',
'support_url': ''
}
response = self.client.post(self.support_edit_url, data)
assert response.context['form'].errors == {}
addon = self.get_addon()
for k in data:
assert unicode(getattr(addon, k)) == data[k]
def test_edit_support_optional_email(self):
data = {
'support_email': '',
'support_url': 'http://apple.com/'
}
response = self.client.post(self.support_edit_url, data)
assert response.context['form'].errors == {}
addon = self.get_addon()
for k in data:
assert unicode(getattr(addon, k)) == data[k]
class TestEditTechnical(BaseTestEdit):
__test__ = True
fixtures = BaseTestEdit.fixtures + [
'addons/persona', 'base/addon_40', 'base/addon_1833_yoono',
'base/addon_4664_twitterbar.json',
'base/addon_5299_gcal', 'base/addon_6113']
def setUp(self):
super(TestEditTechnical, self).setUp()
self.dependent_addon = Addon.objects.get(id=5579)
AddonDependency.objects.create(addon=self.addon,
dependent_addon=self.dependent_addon)
self.technical_url = self.get_url('technical')
self.technical_edit_url = self.get_url('technical', edit=True)
ctx = self.client.get(self.technical_edit_url).context
self.dep = initial(ctx['dependency_form'].initial_forms[0])
self.dep_initial = formset(self.dep, prefix='dependencies',
initial_count=1)
def dep_formset(self, *args, **kw):
kw.setdefault('initial_count', 1)
kw.setdefault('prefix', 'dependencies')
return formset(self.dep, *args, **kw)
def formset(self, data):
return self.dep_formset(**data)
def test_log(self):
data = self.formset({'developer_comments': 'This is a test'})
assert ActivityLog.objects.count() == 0
response = self.client.post(self.technical_edit_url, data)
assert response.context['form'].errors == {}
assert ActivityLog.objects.filter(
action=amo.LOG.EDIT_PROPERTIES.id).count() == 1
def test_technical_on(self):
# Turn everything on
data = {
'developer_comments': 'Test comment!',
'external_software': 'on',
'view_source': 'on',
'whiteboard-public': 'Whiteboard info.'
}
response = self.client.post(
self.technical_edit_url, self.formset(data))
assert response.context['form'].errors == {}
addon = self.get_addon()
for k in data:
if k == 'developer_comments':
assert unicode(getattr(addon, k)) == unicode(data[k])
elif k == 'whiteboard-public':
assert unicode(addon.whiteboard.public) == unicode(data[k])
else:
assert getattr(addon, k) == (data[k] == 'on')
# Andddd offf
data = {'developer_comments': 'Test comment!'}
response = self.client.post(
self.technical_edit_url, self.formset(data))
addon = self.get_addon()
assert not addon.external_software
assert not addon.view_source
def test_technical_devcomment_notrequired(self):
data = {
'developer_comments': '',
'external_software': 'on',
'view_source': 'on'
}
response = self.client.post(
self.technical_edit_url, self.formset(data))
assert response.context['form'].errors == {}
addon = self.get_addon()
for k in data:
if k == 'developer_comments':
assert unicode(getattr(addon, k)) == unicode(data[k])
else:
assert getattr(addon, k) == (data[k] == 'on')
def test_auto_repackage_not_shown(self):
file_ = self.addon.current_version.all_files[0]
file_.jetpack_version = None
file_.save()
response = self.client.get(self.technical_edit_url)
self.assertNotContains(response, 'Upgrade SDK?')
def test_auto_repackage_shown(self):
file_ = self.addon.current_version.all_files[0]
file_.jetpack_version = '1.0'
file_.save()
response = self.client.get(self.technical_edit_url)
self.assertContains(response, 'Upgrade SDK?')
def test_dependencies_none(self):
AddonDependency.objects.all().delete()
assert list(self.addon.all_dependencies) == []
response = self.client.get(self.technical_url)
assert pq(response.content)('#required-addons .empty').length == 1
def test_dependencies_overview(self):
assert [d.id for d in self.addon.all_dependencies] == [5579]
response = self.client.get(self.technical_url)
req = pq(response.content)('#required-addons')
assert req.length == 1
assert req.attr('data-src') == (
reverse('devhub.ajax.dependencies', args=[self.addon.slug]))
assert req.find('li').length == 1
link = req.find('a')
assert link.attr('href') == self.dependent_addon.get_url_path()
assert link.text() == unicode(self.dependent_addon.name)
def test_dependencies_initial(self):
response = self.client.get(self.technical_edit_url)
form = pq(response.content)(
'#required-addons .dependencies li[data-addonid]')
assert form.length == 1
assert form.find('input[id$=-dependent_addon]').val() == (
str(self.dependent_addon.id))
div = form.find('div')
assert div.attr('style') == (
'background-image:url(%s)' % self.dependent_addon.icon_url)
link = div.find('a')
assert link.attr('href') == self.dependent_addon.get_url_path()
assert link.text() == unicode(self.dependent_addon.name)
def test_dependencies_add(self):
addon = Addon.objects.get(id=5299)
assert addon.type == amo.ADDON_EXTENSION
assert addon in list(Addon.objects.public())
data = self.dep_formset({'dependent_addon': addon.id})
response = self.client.post(self.technical_edit_url, data)
assert not any(response.context['dependency_form'].errors)
self.check_dep_ids([self.dependent_addon.id, addon.id])
response = self.client.get(self.technical_edit_url)
reqs = pq(response.content)('#required-addons .dependencies')
assert reqs.find('li[data-addonid]').length == 2
req = reqs.find('li[data-addonid="5299"]')
assert req.length == 1
link = req.find('div a')
assert link.attr('href') == addon.get_url_path()
assert link.text() == unicode(addon.name)
def test_dependencies_limit(self):
deps = Addon.objects.public().exclude(
Q(id__in=[self.addon.id, self.dependent_addon.id]) |
Q(type=amo.ADDON_PERSONA))
args = []
assert deps.count() > 3 # The limit is 3.
for dep in deps:
args.append({'dependent_addon': dep.id})
data = self.dep_formset(*args)
response = self.client.post(self.technical_edit_url, data)
assert response.context['dependency_form'].non_form_errors() == (
['There cannot be more than 3 required add-ons.'])
def test_dependencies_limit_with_deleted_form(self):
deps = Addon.objects.public().exclude(
Q(id__in=[self.addon.id, self.dependent_addon.id]) |
Q(type=amo.ADDON_PERSONA))[:3]
args = []
for dep in deps:
args.append({'dependent_addon': dep.id})
# If we delete one form and add three, everything should be A-OK.
self.dep['DELETE'] = True
data = self.dep_formset(*args)
response = self.client.post(self.technical_edit_url, data)
assert not any(response.context['dependency_form'].errors)
self.check_dep_ids(deps.values_list('id', flat=True))
def check_dep_ids(self, expected=None):
if expected is None:
expected = []
ids = AddonDependency.objects.values_list(
'dependent_addon__id', flat=True)
assert sorted(list(ids)) == sorted(expected)
def check_bad_dep(self, r):
"""This helper checks that bad dependency data doesn't go through."""
assert r.context['dependency_form'].errors[1]['dependent_addon'] == (
['Select a valid choice. That choice is not one of the available '
'choices.'])
self.check_dep_ids([self.dependent_addon.id])
def test_dependencies_add_reviewed(self):
"""Ensure that reviewed add-ons can be made as dependencies."""
addon = Addon.objects.get(id=40)
for status in amo.REVIEWED_STATUSES:
addon.update(status=status)
assert addon in list(Addon.objects.public())
data = self.dep_formset({'dependent_addon': addon.id})
response = self.client.post(self.technical_edit_url, data)
assert not any(response.context['dependency_form'].errors)
self.check_dep_ids([self.dependent_addon.id, addon.id])
AddonDependency.objects.get(dependent_addon=addon).delete()
def test_dependencies_no_add_unreviewed(self):
"""Ensure that unreviewed add-ons cannot be made as dependencies."""
addon = Addon.objects.get(id=40)
for status in amo.UNREVIEWED_ADDON_STATUSES:
addon.update(status=status)
assert addon not in list(Addon.objects.public())
data = self.dep_formset({'dependent_addon': addon.id})
response = self.client.post(self.technical_edit_url, data)
self.check_bad_dep(response)
def test_dependencies_no_add_reviewed_persona(self):
"""Ensure that reviewed Personas cannot be made as dependencies."""
addon = Addon.objects.get(id=15663)
assert addon.type == amo.ADDON_PERSONA
assert addon in list(Addon.objects.public())
data = self.dep_formset({'dependent_addon': addon.id})
response = self.client.post(self.technical_edit_url, data)
self.check_bad_dep(response)
def test_dependencies_no_add_unreviewed_persona(self):
"""Ensure that unreviewed Personas cannot be made as dependencies."""
addon = Addon.objects.get(id=15663)
addon.update(status=amo.STATUS_PENDING)
assert addon.status == amo.STATUS_PENDING
assert addon not in list(Addon.objects.public())
data = self.dep_formset({'dependent_addon': addon.id})
response = self.client.post(self.technical_edit_url, data)
self.check_bad_dep(response)
def test_dependencies_add_self(self):
"""Ensure that an add-on cannot be made dependent on itself."""
data = self.dep_formset({'dependent_addon': self.addon.id})
response = self.client.post(self.technical_edit_url, data)
self.check_bad_dep(response)
def test_dependencies_add_invalid(self):
"""Ensure that a non-existent add-on cannot be a dependency."""
data = self.dep_formset({'dependent_addon': 9999})
response = self.client.post(self.technical_edit_url, data)
self.check_bad_dep(response)
def test_dependencies_add_duplicate(self):
"""Ensure that an add-on cannot be made dependent more than once."""
data = self.dep_formset({'dependent_addon': self.dependent_addon.id})
response = self.client.post(self.technical_edit_url, data)
assert (
response.context['dependency_form'].forms[1].non_field_errors() ==
['Addon dependency with this Addon and Dependent addon already '
'exists.'])
self.check_dep_ids([self.dependent_addon.id])
def test_dependencies_delete(self):
self.dep['DELETE'] = True
data = self.dep_formset(total_count=1, initial_count=1)
response = self.client.post(self.technical_edit_url, data)
assert not any(response.context['dependency_form'].errors)
self.check_dep_ids()
def test_dependencies_add_delete(self):
"""Ensure that we can both delete a dependency and add another."""
self.dep['DELETE'] = True
data = self.dep_formset({'dependent_addon': 5299})
response = self.client.post(self.technical_edit_url, data)
assert not any(response.context['dependency_form'].errors)
self.check_dep_ids([5299])
class TestEditBasicUnlisted(BaseTestEditBasic):
listed = False
__test__ = True
class TestEditDetailsUnlisted(BaseTestEditDetails):
listed = False
class TestEditTechnicalUnlisted(BaseTestEdit):
__test__ = True
listed = False
def test_whiteboard(self):
edit_url = self.get_url('technical', edit=True)
# It's okay to post empty whiteboard instructions.
response = self.client.post(edit_url, {'whiteboard-public': ''})
assert response.context['form'].errors == {}
# Let's update it.
response = self.client.post(
edit_url, {'whiteboard-public': 'important stuff'})
assert response.context['form'].errors == {}
addon = self.get_addon()
assert addon.whiteboard.public == 'important stuff'
# And clear it again.
response = self.client.post(edit_url, {'whiteboard-public': ''})
assert response.context['form'].errors == {}
addon = self.get_addon()
assert addon.whiteboard.public == ''
class StaticMixin(object):
def setUp(self):
super(StaticMixin, self).setUp()
addon = self.get_addon()
addon.update(type=amo.ADDON_STATICTHEME)
if self.listed:
AddonCategory.objects.filter(addon=addon).delete()
cache.clear()
Category.from_static_category(CATEGORIES_BY_ID[300], save=True)
Category.from_static_category(CATEGORIES_BY_ID[308], save=True)
VersionPreview.objects.create(version=addon.current_version)
class TestEditBasicStaticThemeListed(StaticMixin, BaseTestEditBasic,
TagTestsMixin, ContributionsTestsMixin,
L10nTestsMixin):
__test__ = True
def get_dict(self, **kw):
result = {'name': 'new name', 'slug': 'test_slug',
'summary': 'new summary', 'category': 300,
'tags': ', '.join(self.tags)}
result.update(**kw)
return result
def test_edit_categories_set(self):
assert [cat.id for cat in self.get_addon().all_categories] == []
response = self.client.post(
self.basic_edit_url, self.get_dict(category=308))
assert set(response.context['addon'].all_categories) == set(
self.get_addon().all_categories)
addon_cats = self.get_addon().categories.values_list('id', flat=True)
assert sorted(addon_cats) == [308]
def test_edit_categories_change(self):
category = Category.objects.get(id=300)
AddonCategory(addon=self.addon, category=category).save()
assert sorted(
[cat.id for cat in self.get_addon().all_categories]) == [300]
self.client.post(self.basic_edit_url, self.get_dict(category=308))
category_ids_new = [cat.id for cat in self.get_addon().all_categories]
# Only ever one category for Static Themes
assert category_ids_new == [308]
# Check we didn't delete the Category object too!
assert category.reload()
def test_edit_categories_required(self):
data = self.get_dict(category='')
response = self.client.post(self.basic_edit_url, data)
assert response.status_code == 200
self.assertFormError(
response, 'cat_form', 'category', 'This field is required.')
def test_edit_categories_add_featured(self):
"""Ensure that categories cannot be changed for featured add-ons."""
category = Category.objects.get(id=308)
AddonCategory(addon=self.addon, category=category).save()
self._feature_addon(self.addon.id)
response = self.client.post(self.basic_edit_url, self.get_dict())
addon_cats = self.get_addon().categories.values_list('id', flat=True)
# This add-on's categories should not change.
assert sorted(addon_cats) == [308]
self.assertFormError(
response, 'cat_form', 'category',
'Categories cannot be changed while your add-on is featured.')
def test_edit_categories_add_new_creatured_admin(self):
"""Ensure that admins can change categories for creatured add-ons."""
assert self.client.login(email='admin@mozilla.com')
category = Category.objects.get(id=308)
AddonCategory(addon=self.addon, category=category).save()
self._feature_addon(self.addon.id)
response = self.client.get(self.basic_edit_url)
doc = pq(response.content)
assert doc('#addon-categories-edit').length == 1
assert doc('#addon-categories-edit > p').length == 0
response = self.client.post(self.basic_edit_url, self.get_dict())
addon_cats = self.get_addon().categories.values_list('id', flat=True)
assert 'category' not in response.context['cat_form'].errors
# This add-on's categories should change.
assert sorted(addon_cats) == [300]
def test_edit_categories_disable_creatured(self):
"""Ensure that other forms are okay when disabling category changes."""
self._feature_addon()
data = self.get_dict()
self.client.post(self.basic_edit_url, data)
assert unicode(self.get_addon().name) == data['name']
def test_theme_preview_shown(self):
response = self.client.get(self.url)
doc = pq(response.content)
assert 'Preview' in doc('h3').text()
assert doc('img')[0].attrib['src'] == (
self.addon.current_version.previews.first().image_url)
class TestEditBasicStaticThemeUnlisted(StaticMixin, TestEditBasicUnlisted):
def get_dict(self, **kw):
result = {'name': 'new name', 'slug': 'test_slug',
'summary': 'new summary'}
result.update(**kw)
return result
def test_theme_preview_not_shown(self):
response = self.client.get(self.url)
doc = pq(response.content)
assert 'Preview' not in doc('h3').text()
class TestEditDetailsStaticThemeListed(StaticMixin, TestEditDetailsListed):
pass
class TestEditDetailsStaticThemeUnlisted(StaticMixin, TestEditDetailsUnlisted):
pass
class TestEditTechnicalStaticThemeListed(StaticMixin,
TestEditTechnicalUnlisted):
# Using the Unlisted test case because it's got the right tests for us.
listed = True
class TestEditTechnicalStaticThemeUnlisted(StaticMixin,
TestEditTechnicalUnlisted):
pass
class TestAdmin(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def login_admin(self):
assert self.client.login(email='admin@mozilla.com')
def login_user(self):
assert self.client.login(email='del@icio.us')
def test_show_admin_settings_admin(self):
self.login_admin()
url = reverse('devhub.addons.edit', args=['a3615'])
response = self.client.get(url)
assert response.status_code == 200
self.assertContains(response, 'Admin Settings')
assert 'admin_form' in response.context
def test_show_admin_settings_nonadmin(self):
self.login_user()
url = reverse('devhub.addons.edit', args=['a3615'])
response = self.client.get(url)
assert response.status_code == 200
self.assertNotContains(response, 'Admin Settings')
assert 'admin_form' not in response.context, (
'AdminForm not expected in context.')
def test_post_as_admin(self):
self.login_admin()
url = reverse('devhub.addons.admin', args=['a3615'])
response = self.client.post(url)
assert response.status_code == 200
def test_post_as_nonadmin(self):
self.login_user()
url = reverse('devhub.addons.admin', args=['a3615'])
response = self.client.post(url)
assert response.status_code == 403
def test_change_reputation_and_type(self):
addon = Addon.objects.get(pk=3615)
self.login_admin()
url = reverse('devhub.addons.admin', args=['a3615'])
data = {
'reputation': 3,
'type': amo.ADDON_THEME,
}
response = self.client.post(url, data)
assert response.status_code == 200
assert response.context['admin_form'].is_valid()
addon.reload()
assert addon.reputation == 3
assert addon.type == amo.ADDON_THEME
class TestThemeEdit(TestCase):
fixtures = ['base/user_999']
def setUp(self):
super(TestThemeEdit, self).setUp()
self.addon = addon_factory(type=amo.ADDON_PERSONA)
self.user = UserProfile.objects.get()
self.addon.addonuser_set.create(user=self.user)
@mock.patch('olympia.amo.messages.error')
def test_desc_too_long_error(self, message_mock):
data = {'description': 'a' * 501}
req = req_factory_factory(
self.addon.get_dev_url('edit'),
user=self.user, post=True, data=data, session={})
response = edit_theme(req, self.addon.slug)
doc = pq(response.content)
assert 'characters' in doc('#trans-description + ul li').text()
def test_no_reupload_on_pending(self):
self.addon.update(status=amo.STATUS_PENDING)
req = req_factory_factory(
self.addon.get_dev_url('edit'), user=self.user, session={})
response = edit_theme(req, self.addon.slug)
doc = pq(response.content)
assert not doc('a.reupload')
self.addon.update(status=amo.STATUS_PUBLIC)
req = req_factory_factory(
self.addon.get_dev_url('edit'), user=self.user, session={})
response = edit_theme(req, self.addon.slug)
doc = pq(response.content)
assert doc('a.reupload')
def test_color_input_is_empty_at_creation(self):
self.client.login(email='regular@mozilla.com')
response = self.client.get(reverse('devhub.themes.submit'))
doc = pq(response.content)
el = doc('input.color-picker')
assert el.attr('type') == 'text'
assert not el.attr('value')
def test_color_input_is_not_empty_at_edit(self):
color = "123456"
self.addon.persona.accentcolor = color
self.addon.persona.save()
self.client.login(email='regular@mozilla.com')
url = reverse('devhub.themes.edit', args=(self.addon.slug, ))
response = self.client.get(url)
doc = pq(response.content)
el = doc('input#id_accentcolor')
assert el.attr('type') == 'text'
assert el.attr('value') == "#" + color
|
|
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from typing import Dict, List, Tuple
from urllib.request import getproxies_environment # type: ignore
import attr
from synapse.config.server import DEFAULT_IP_RANGE_BLACKLIST, generate_ip_set
from synapse.types import JsonDict
from synapse.util.check_dependencies import DependencyException, check_requirements
from synapse.util.module_loader import load_module
from ._base import Config, ConfigError
logger = logging.getLogger(__name__)
DEFAULT_THUMBNAIL_SIZES = [
{"width": 32, "height": 32, "method": "crop"},
{"width": 96, "height": 96, "method": "crop"},
{"width": 320, "height": 240, "method": "scale"},
{"width": 640, "height": 480, "method": "scale"},
{"width": 800, "height": 600, "method": "scale"},
]
THUMBNAIL_SIZE_YAML = """\
# - width: %(width)i
# height: %(height)i
# method: %(method)s
"""
HTTP_PROXY_SET_WARNING = """\
The Synapse config url_preview_ip_range_blacklist will be ignored as an HTTP(s) proxy is configured."""
@attr.s(frozen=True, slots=True, auto_attribs=True)
class ThumbnailRequirement:
width: int
height: int
method: str
media_type: str
@attr.s(frozen=True, slots=True, auto_attribs=True)
class MediaStorageProviderConfig:
store_local: bool # Whether to store newly uploaded local files
store_remote: bool # Whether to store newly downloaded remote files
store_synchronous: bool # Whether to wait for successful storage for local uploads
def parse_thumbnail_requirements(
thumbnail_sizes: List[JsonDict],
) -> Dict[str, Tuple[ThumbnailRequirement, ...]]:
"""Takes a list of dictionaries with "width", "height", and "method" keys
and creates a map from image media types to the thumbnail size, thumbnailing
method, and thumbnail media type to precalculate
Args:
thumbnail_sizes: List of dicts with "width", "height", and "method" keys
Returns:
Dictionary mapping from media type string to list of ThumbnailRequirement.
"""
requirements: Dict[str, List[ThumbnailRequirement]] = {}
for size in thumbnail_sizes:
width = size["width"]
height = size["height"]
method = size["method"]
jpeg_thumbnail = ThumbnailRequirement(width, height, method, "image/jpeg")
png_thumbnail = ThumbnailRequirement(width, height, method, "image/png")
requirements.setdefault("image/jpeg", []).append(jpeg_thumbnail)
requirements.setdefault("image/jpg", []).append(jpeg_thumbnail)
requirements.setdefault("image/webp", []).append(jpeg_thumbnail)
requirements.setdefault("image/gif", []).append(png_thumbnail)
requirements.setdefault("image/png", []).append(png_thumbnail)
return {
media_type: tuple(thumbnails) for media_type, thumbnails in requirements.items()
}
class ContentRepositoryConfig(Config):
section = "media"
def read_config(self, config, **kwargs):
# Only enable the media repo if either the media repo is enabled or the
# current worker app is the media repo.
if (
self.root.server.enable_media_repo is False
and config.get("worker_app") != "synapse.app.media_repository"
):
self.can_load_media_repo = False
return
else:
self.can_load_media_repo = True
# Whether this instance should be the one to run the background jobs to
# e.g clean up old URL previews.
self.media_instance_running_background_jobs = config.get(
"media_instance_running_background_jobs",
)
self.max_upload_size = self.parse_size(config.get("max_upload_size", "50M"))
self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M"))
self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M"))
self.media_store_path = self.ensure_directory(
config.get("media_store_path", "media_store")
)
backup_media_store_path = config.get("backup_media_store_path")
synchronous_backup_media_store = config.get(
"synchronous_backup_media_store", False
)
storage_providers = config.get("media_storage_providers", [])
if backup_media_store_path:
if storage_providers:
raise ConfigError(
"Cannot use both 'backup_media_store_path' and 'storage_providers'"
)
storage_providers = [
{
"module": "file_system",
"store_local": True,
"store_synchronous": synchronous_backup_media_store,
"store_remote": True,
"config": {"directory": backup_media_store_path},
}
]
# This is a list of config that can be used to create the storage
# providers. The entries are tuples of (Class, class_config,
# MediaStorageProviderConfig), where Class is the class of the provider,
# the class_config the config to pass to it, and
# MediaStorageProviderConfig are options for StorageProviderWrapper.
#
# We don't create the storage providers here as not all workers need
# them to be started.
self.media_storage_providers: List[tuple] = []
for i, provider_config in enumerate(storage_providers):
# We special case the module "file_system" so as not to need to
# expose FileStorageProviderBackend
if provider_config["module"] == "file_system":
provider_config["module"] = (
"synapse.rest.media.v1.storage_provider"
".FileStorageProviderBackend"
)
provider_class, parsed_config = load_module(
provider_config, ("media_storage_providers", "<item %i>" % i)
)
wrapper_config = MediaStorageProviderConfig(
provider_config.get("store_local", False),
provider_config.get("store_remote", False),
provider_config.get("store_synchronous", False),
)
self.media_storage_providers.append(
(provider_class, parsed_config, wrapper_config)
)
self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
self.thumbnail_requirements = parse_thumbnail_requirements(
config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES)
)
self.url_preview_enabled = config.get("url_preview_enabled", False)
if self.url_preview_enabled:
try:
check_requirements("url_preview")
except DependencyException as e:
raise ConfigError(
e.message # noqa: B306, DependencyException.message is a property
)
proxy_env = getproxies_environment()
if "url_preview_ip_range_blacklist" not in config:
if "http" not in proxy_env or "https" not in proxy_env:
raise ConfigError(
"For security, you must specify an explicit target IP address "
"blacklist in url_preview_ip_range_blacklist for url previewing "
"to work"
)
else:
if "http" in proxy_env or "https" in proxy_env:
logger.warning("".join(HTTP_PROXY_SET_WARNING))
# we always blacklist '0.0.0.0' and '::', which are supposed to be
# unroutable addresses.
self.url_preview_ip_range_blacklist = generate_ip_set(
config["url_preview_ip_range_blacklist"],
["0.0.0.0", "::"],
config_path=("url_preview_ip_range_blacklist",),
)
self.url_preview_ip_range_whitelist = generate_ip_set(
config.get("url_preview_ip_range_whitelist", ()),
config_path=("url_preview_ip_range_whitelist",),
)
self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
self.url_preview_accept_language = config.get(
"url_preview_accept_language"
) or ["en"]
def generate_config_section(self, data_dir_path, **kwargs):
media_store = os.path.join(data_dir_path, "media_store")
formatted_thumbnail_sizes = "".join(
THUMBNAIL_SIZE_YAML % s for s in DEFAULT_THUMBNAIL_SIZES
)
# strip final NL
formatted_thumbnail_sizes = formatted_thumbnail_sizes[:-1]
ip_range_blacklist = "\n".join(
" # - '%s'" % ip for ip in DEFAULT_IP_RANGE_BLACKLIST
)
return (
r"""
## Media Store ##
# Enable the media store service in the Synapse master. Uncomment the
# following if you are using a separate media store worker.
#
#enable_media_repo: false
# Directory where uploaded images and attachments are stored.
#
media_store_path: "%(media_store)s"
# Media storage providers allow media to be stored in different
# locations.
#
#media_storage_providers:
# - module: file_system
# # Whether to store newly uploaded local files
# store_local: false
# # Whether to store newly downloaded remote files
# store_remote: false
# # Whether to wait for successful storage for local uploads
# store_synchronous: false
# config:
# directory: /mnt/some/other/directory
# The largest allowed upload size in bytes
#
# If you are using a reverse proxy you may also need to set this value in
# your reverse proxy's config. Notably Nginx has a small max body size by default.
# See https://matrix-org.github.io/synapse/latest/reverse_proxy.html.
#
#max_upload_size: 50M
# Maximum number of pixels that will be thumbnailed
#
#max_image_pixels: 32M
# Whether to generate new thumbnails on the fly to precisely match
# the resolution requested by the client. If true then whenever
# a new resolution is requested by the client the server will
# generate a new thumbnail. If false the server will pick a thumbnail
# from a precalculated list.
#
#dynamic_thumbnails: false
# List of thumbnails to precalculate when an image is uploaded.
#
#thumbnail_sizes:
%(formatted_thumbnail_sizes)s
# Is the preview URL API enabled?
#
# 'false' by default: uncomment the following to enable it (and specify a
# url_preview_ip_range_blacklist blacklist).
#
#url_preview_enabled: true
# List of IP address CIDR ranges that the URL preview spider is denied
# from accessing. There are no defaults: you must explicitly
# specify a list for URL previewing to work. You should specify any
# internal services in your network that you do not want synapse to try
# to connect to, otherwise anyone in any Matrix room could cause your
# synapse to issue arbitrary GET requests to your internal services,
# causing serious security issues.
#
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
# listed here, since they correspond to unroutable addresses.)
#
# This must be specified if url_preview_enabled is set. It is recommended that
# you uncomment the following list as a starting point.
#
# Note: The value is ignored when an HTTP proxy is in use
#
#url_preview_ip_range_blacklist:
%(ip_range_blacklist)s
# List of IP address CIDR ranges that the URL preview spider is allowed
# to access even if they are specified in url_preview_ip_range_blacklist.
# This is useful for specifying exceptions to wide-ranging blacklisted
# target IP ranges - e.g. for enabling URL previews for a specific private
# website only visible in your network.
#
#url_preview_ip_range_whitelist:
# - '192.168.1.1'
# Optional list of URL matches that the URL preview spider is
# denied from accessing. You should use url_preview_ip_range_blacklist
# in preference to this, otherwise someone could define a public DNS
# entry that points to a private IP address and circumvent the blacklist.
# This is more useful if you know there is an entire shape of URL that
# you know that will never want synapse to try to spider.
#
# Each list entry is a dictionary of url component attributes as returned
# by urlparse.urlsplit as applied to the absolute form of the URL. See
# https://docs.python.org/2/library/urlparse.html#urlparse.urlsplit
# The values of the dictionary are treated as an filename match pattern
# applied to that component of URLs, unless they start with a ^ in which
# case they are treated as a regular expression match. If all the
# specified component matches for a given list item succeed, the URL is
# blacklisted.
#
#url_preview_url_blacklist:
# # blacklist any URL with a username in its URI
# - username: '*'
#
# # blacklist all *.google.com URLs
# - netloc: 'google.com'
# - netloc: '*.google.com'
#
# # blacklist all plain HTTP URLs
# - scheme: 'http'
#
# # blacklist http(s)://www.acme.com/foo
# - netloc: 'www.acme.com'
# path: '/foo'
#
# # blacklist any URL with a literal IPv4 address
# - netloc: '^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$'
# The largest allowed URL preview spidering size in bytes
#
#max_spider_size: 10M
# A list of values for the Accept-Language HTTP header used when
# downloading webpages during URL preview generation. This allows
# Synapse to specify the preferred languages that URL previews should
# be in when communicating with remote servers.
#
# Each value is a IETF language tag; a 2-3 letter identifier for a
# language, optionally followed by subtags separated by '-', specifying
# a country or region variant.
#
# Multiple values can be provided, and a weight can be added to each by
# using quality value syntax (;q=). '*' translates to any language.
#
# Defaults to "en".
#
# Example:
#
# url_preview_accept_language:
# - en-UK
# - en-US;q=0.9
# - fr;q=0.8
# - *;q=0.7
#
url_preview_accept_language:
# - en
"""
% locals()
)
|
|
from google.protobuf import text_format
from .caffe import get_caffe_resolver
from .errors import KaffeError, print_stderr
from .layers import LayerAdapter, LayerType, NodeKind, NodeDispatch
from .shapes import make_tensor
class Node(object):
def __init__(self, name, kind, layer=None):
self.name = name
self.kind = kind
self.layer = LayerAdapter(layer, kind) if layer else None
self.parents = []
self.children = []
self.data = None #parameters of this node
self.output_shape = None #output shape of this node
self.metadata = {}
def add_parent(self, parent_node):
assert parent_node not in self.parents
self.parents.append(parent_node)
if self not in parent_node.children:
parent_node.children.append(self)
def add_child(self, child_node):
assert child_node not in self.children
self.children.append(child_node)
if self not in child_node.parents:
child_node.parents.append(self)
def get_only_parent(self):
if len(self.parents) != 1:
raise KaffeError('Node (%s) expected to have 1 parent. Found %s.' %
(self, len(self.parents)))
return self.parents[0]
@property
def parameters(self):
""" get parameters stored in a protobuf object
"""
if self.layer is not None:
return self.layer.parameters
return None
@property
def params(self):
""" get parameters stored in a dict
"""
from .protobuf_to_dict import protobuf_to_dict
p = self.parameters
if p is not None:
return protobuf_to_dict(p)
else:
return None
def __str__(self):
return '[%s] %s' % (self.kind, self.name)
def __repr__(self):
return '%s (0x%x)' % (self.name, id(self))
class Graph(object):
def __init__(self, nodes=None, name=None, trace={}):
self.nodes = nodes or []
self.node_lut = {node.name: node for node in self.nodes}
self.output_trace = trace
if name is None or name == '':
self.name = 'MyNet'
else:
self.name = name
def add_node(self, node):
self.nodes.append(node)
self.node_lut[node.name] = node
def get_node(self, name):
try:
return self.node_lut[name]
except KeyError:
raise KaffeError('Layer not found: %s' % name)
def add_name_trace(self, trace, which='caffe'):
self.output_trace[which] = trace
def get_name_trace(self, which=None):
if which is not None:
return self.output_trace[which]
else:
return self.output_trace
def get_input_nodes(self):
return [node for node in self.nodes if len(node.parents) == 0]
def get_output_nodes(self):
return [node for node in self.nodes if len(node.children) == 0]
def topologically_sorted(self):
sorted_nodes = []
unsorted_nodes = list(self.nodes)
temp_marked = set()
perm_marked = set()
def visit(node):
if node in temp_marked:
raise KaffeError('Graph is not a DAG.')
if node in perm_marked:
return
temp_marked.add(node)
for child in node.children:
visit(child)
perm_marked.add(node)
temp_marked.remove(node)
sorted_nodes.insert(0, node)
while len(unsorted_nodes):
visit(unsorted_nodes.pop())
return sorted_nodes
def compute_output_shapes(self):
sorted_nodes = self.topologically_sorted()
for node in sorted_nodes:
node.output_shape = make_tensor(
*NodeKind.compute_output_shape(node))
def replaced(self, new_nodes):
return Graph(nodes=new_nodes, name=self.name, trace=self.output_trace)
def transformed(self, transformers):
graph = self
for transformer in transformers:
graph = transformer(graph)
if graph is None:
raise KaffeError('Transformer failed: {}'.format(transformer))
assert isinstance(graph, Graph)
return graph
def __contains__(self, key):
return key in self.node_lut
def __str__(self):
hdr = '{:<20} {:<30} {:>20} {:>20}'.format('Type', 'Name', 'Param',
'Output')
s = [hdr, '-' * 94]
for node in self.topologically_sorted():
# If the node has learned parameters, display the first one's shape.
# In case of convolutions, this corresponds to the weights.
if node.data is None:
data_shape = '--'
out_shape = node.output_shape or '--'
s.append('{:<20} {:<30} {:>20} {:>20}'.format(
node.kind, node.name, data_shape, tuple(out_shape)))
else:
for d in node.data:
#data_shape = node.data[0].shape if node.data else '--'
data_shape = d.shape
out_shape = node.output_shape or '--'
s.append('{:<20} {:<30} {:>20} {:>20}'.format(
node.kind, node.name, data_shape, tuple(out_shape)))
return '\n'.join(s)
class GraphBuilder(object):
'''Constructs a model graph from a Caffe protocol buffer definition.'''
def __init__(self, def_path, phase='test'):
'''
def_path: Path to the model definition (.prototxt)
data_path: Path to the model data (.caffemodel)
phase: Either 'test' or 'train'. Used for filtering phase-specific nodes.
'''
self.def_path = def_path
self.phase = phase
self.load()
def load(self):
'''Load the layer definitions from the prototxt.'''
self.params = get_caffe_resolver().NetParameter()
with open(self.def_path, 'rb') as def_file:
text_format.Merge(def_file.read(), self.params)
def filter_layers(self, layers):
'''Filter out layers based on the current phase.'''
phase_map = {0: 'train', 1: 'test'}
filtered_layer_names = set()
filtered_layers = []
for layer in layers:
phase = self.phase
if len(layer.include):
phase = phase_map[layer.include[0].phase]
if len(layer.exclude):
phase = phase_map[1 - layer.include[0].phase]
exclude = (phase != self.phase)
# Dropout layers appear in a fair number of Caffe
# test-time networks. These are just ignored. We'll
# filter them out here.
if (not exclude) and (phase == 'test'):
exclude = (layer.type == LayerType.Dropout)
if not exclude:
filtered_layers.append(layer)
# Guard against dupes.
assert layer.name not in filtered_layer_names
filtered_layer_names.add(layer.name)
return filtered_layers
def make_node(self, layer):
'''Create a graph node for the given layer.'''
kind = NodeKind.map_raw_kind(layer.type)
if kind is None:
raise KaffeError('Unknown layer type encountered: %s' % layer.type)
# We want to use the layer's top names (the "output" names), rather than the
# name attribute, which is more of readability thing than a functional one.
# Other layers will refer to a node by its "top name".
return Node(layer.name, kind, layer=layer)
def make_input_nodes(self):
'''
Create data input nodes.
This method is for old-style inputs, where the input specification
was not treated as a first-class layer in the prototext.
Newer models use the "Input layer" type.
'''
nodes = [Node(name, NodeKind.Data) for name in self.params.input]
inputs_num = len(nodes)
if inputs_num > 0:
input_dims_num = len(self.params.input_dim)
if input_dims_num > 0 and input_dims_num != inputs_num * 4:
raise KaffeError('invalid input_dim[%d] param in prototxt' %
(input_dims_num))
input_dims = [[]] * inputs_num
for i in range(input_dims_num):
dim = self.params.input_dim[i]
which = int(i / 4)
input_dims[which].append(int(dim))
for i in range(inputs_num):
if len(self.params.input_shape) == inputs_num:
input_dim = map(int, self.params.input_shape[i].dim)
input_dims[i] = input_dim
nodes[i].output_shape = tuple(input_dims[i])
return nodes
def build(self):
'''
Builds the graph from the Caffe layer definitions.
'''
# Get the layers
layers = self.params.layers or self.params.layer
# Filter out phase-excluded layers
layers = self.filter_layers(layers)
# Get any separately-specified input layers
nodes = self.make_input_nodes()
nodes += [self.make_node(layer) for layer in layers]
# Initialize the graph
graph = Graph(nodes=nodes, name=self.params.name)
# Connect the nodes
#
# A note on layers and outputs:
# In Caffe, each layer can produce multiple outputs ("tops") from a set of inputs
# ("bottoms"). The bottoms refer to other layers' tops. The top can rewrite a bottom
# (in case of in-place operations). Note that the layer's name is not used for establishing
# any connectivity. It's only used for data association. By convention, a layer with a
# single top will often use the same name (although this is not required).
#
# The current implementation only supports single-output nodes (note that a node can still
# have multiple children, since multiple child nodes can refer to the single top's name).
node_outputs = {}
output_trace = {}
for layer in layers:
node = graph.get_node(layer.name)
for input_name in layer.bottom:
assert input_name != layer.name
parent_node = node_outputs.get(input_name)
if (parent_node is None) or (parent_node == node):
parent_node = graph.get_node(input_name)
node.add_parent(parent_node)
if len(layer.top) > 1:
raise KaffeError('Multiple top nodes are not supported.')
for output_name in layer.top:
if output_name == layer.name:
# Output is named the same as the node. No further action required.
continue
# There are two possibilities here:
#
# Case 1: output_name refers to another node in the graph.
# This is an "in-place operation" that overwrites an existing node.
# This would create a cycle in the graph. We'll undo the in-placing
# by substituting this node wherever the overwritten node is referenced.
#
# Case 2: output_name violates the convention layer.name == output_name.
# Since we are working in the single-output regime, we will can rename it to
# match the layer name.
#
# For both cases, future references to this top re-routes to this node.
node_outputs[output_name] = node
if output_name in output_trace:
output_trace[output_name].append(node.name)
else:
output_trace[output_name] = [output_name, node.name]
#build a mapping from real-name to changed-name(for caffe's INPLACE inference)
real2chg = {}
deleted = {}
for k, v in output_trace.items():
real2chg[v[-1]] = k
for n in v:
if n in real2chg:
continue
if n not in deleted:
deleted[n] = '%s.%s' % (k, v[-1])
graph.add_name_trace({
'real2chg': real2chg,
'deleted': deleted
}, 'caffe')
graph.compute_output_shapes()
return graph
class NodeMapper(NodeDispatch):
def __init__(self, graph):
self.graph = graph
def map(self):
nodes = self.graph.topologically_sorted()
# Remove input nodes - we'll handle them separately.
input_nodes = self.graph.get_input_nodes()
nodes = [t for t in nodes if t not in input_nodes]
# Decompose DAG into chains.
chains = []
for node in nodes:
attach_to_chain = None
if len(node.parents) == 1:
parent = node.get_only_parent()
for chain in chains:
if chain[-1] == parent:
# Node is part of an existing chain.
attach_to_chain = chain
break
if attach_to_chain is None:
# Start a new chain for this node.
attach_to_chain = []
chains.append(attach_to_chain)
attach_to_chain.append(node)
# Map each chain.
mapped_chains = []
for chain in chains:
mapped_chains.append(self.map_chain(chain))
return self.commit(mapped_chains)
def map_chain(self, chain):
return [self.map_node(node) for node in chain]
def map_node(self, node):
map_func = self.get_handler(node.kind, 'map')
mapped_node = map_func(node)
assert mapped_node is not None
mapped_node.node = node
return mapped_node
def commit(self, mapped_chains):
raise NotImplementedError('Must be implemented by subclass.')
|
|
from __future__ import print_function, division, absolute_import
import sys
# unittest only added in 3.4 self.subTest()
if sys.version_info[0] < 3 or sys.version_info[1] < 4:
import unittest2 as unittest
else:
import unittest
# unittest.mock is not available in 2.7 (though unittest2 might contain it?)
try:
import unittest.mock as mock
except ImportError:
import mock
import functools
import numpy as np
import imgaug as ia
from imgaug import augmenters as iaa
from imgaug import random as iarandom
from imgaug import parameters as iap
from imgaug.testutils import runtest_pickleable_uint8_img
# imagecorruptions cannot be installed in <=3.4 due to their
# scikit-image requirement
SUPPORTS_LIBRARY = (sys.version_info[0] == 3 and sys.version_info[1] >= 5)
if SUPPORTS_LIBRARY:
import imagecorruptions
from imagecorruptions import corrupt
class Test_get_imgcorrupt_subset(unittest.TestCase):
@unittest.skipUnless(SUPPORTS_LIBRARY,
"imagecorruptions can only be tested for python 3.5+")
def test_by_comparison_with_imagecorruptions(self):
subset_names = ["common", "validation", "all"]
for subset in subset_names:
with self.subTest(subset=subset):
func_names, funcs = iaa.imgcorruptlike.get_corruption_names(
subset)
func_names_exp = imagecorruptions.get_corruption_names(subset)
assert func_names == func_names_exp
for func_name, func in zip(func_names, funcs):
assert getattr(
iaa.imgcorruptlike, "apply_%s" % (func_name,)
) is func
@unittest.skipUnless(SUPPORTS_LIBRARY,
"imagecorruptions can only be tested for python 3.5+")
def test_subset_functions(self):
subset_names = ["common", "validation", "all"]
for subset in subset_names:
func_names, funcs = iaa.imgcorruptlike.get_corruption_names(subset)
image = np.mod(
np.arange(32*32*3), 256
).reshape((32, 32, 3)).astype(np.uint8)
for func_name, func in zip(func_names, funcs):
with self.subTest(subset=subset, name=func_name):
# don't verify here whether e.g. only seed 2 produces
# different results from seed 1, because some methods
# are only dependent on the severity
image_aug1 = func(image, severity=5, seed=1)
image_aug2 = func(image, severity=5, seed=1)
image_aug3 = func(image, severity=1, seed=2)
assert not np.array_equal(image, image_aug1)
assert not np.array_equal(image, image_aug2)
assert not np.array_equal(image_aug2, image_aug3)
assert np.array_equal(image_aug1, image_aug2)
class _CompareFuncWithImageCorruptions(unittest.TestCase):
def _test_by_comparison_with_imagecorruptions(
self,
fname,
shapes=((64, 64), (64, 64, 1), (64, 64, 3)),
dtypes=("uint8",),
severities=(1, 2, 3, 4, 5),
seeds=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)):
for shape in shapes:
for dtype in dtypes:
for severity in severities:
for seed in seeds:
with self.subTest(shape=shape, severity=severity,
seed=seed):
image_imgaug = self.create_image_imgaug(
shape, dtype, 1000 + seed)
image_imgcor = np.copy(image_imgaug)
self._run_single_comparison_test(
fname, image_imgaug, image_imgcor, severity,
seed)
@classmethod
def create_image_imgaug(cls, shape, dtype, seed, tile=None):
rng = iarandom.RNG(1000 + seed)
if dtype.startswith("uint"):
image = rng.integers(0, 256, size=shape, dtype=dtype)
else:
assert dtype.startswith("float")
image = rng.uniform(0.0, 1.0, size=shape)
image = image.astype(dtype)
if tile is not None:
image = np.tile(image, tile)
return image
@classmethod
def _run_single_comparison_test(cls, fname, image_imgaug, image_imgcor,
severity, seed):
image_imgaug_sum = np.sum(image_imgaug)
image_imgcor_sum = np.sum(image_imgcor)
image_aug, image_aug_exp = cls._generate_augmented_images(
fname, image_imgaug, image_imgcor, severity, seed)
# assert that the original image is unchanged,
# i.e. it was not augmented in-place
assert np.isclose(np.sum(image_imgcor), image_imgcor_sum, rtol=0,
atol=1e-4)
assert np.isclose(np.sum(image_imgaug), image_imgaug_sum, rtol=0,
atol=1e-4)
# assert that the functions returned numpy arrays and not PIL images
assert ia.is_np_array(image_aug_exp)
assert ia.is_np_array(image_aug)
assert image_aug.shape == image_imgaug.shape
assert image_aug.dtype.name == image_aug_exp.dtype.name
atol = 1e-4 # set this to 0.5+1e-4 if output is converted to uint8
assert np.allclose(image_aug, image_aug_exp, rtol=0, atol=atol)
@classmethod
def _generate_augmented_images(cls, fname, image_imgaug, image_imgcor,
severity, seed):
func_imgaug = getattr(
iaa.imgcorruptlike,
"apply_%s" % (fname,))
func_imagecor = functools.partial(corrupt, corruption_name=fname)
with iarandom.temporary_numpy_seed(seed):
image_aug_exp = func_imagecor(image_imgcor, severity=severity)
if not ia.is_np_array(image_aug_exp):
image_aug_exp = np.asarray(image_aug_exp)
if image_imgcor.ndim == 2:
image_aug_exp = image_aug_exp[:, :, 0]
elif image_imgcor.shape[-1] == 1:
image_aug_exp = image_aug_exp[:, :, 0:1]
image_aug = func_imgaug(image_imgaug, severity=severity,
seed=seed)
return image_aug, image_aug_exp
@unittest.skipUnless(SUPPORTS_LIBRARY,
"imagecorruptions can only be tested for python 3.5+")
class Test_apply_functions(_CompareFuncWithImageCorruptions):
def test_apply_gaussian_noise(self):
self._test_by_comparison_with_imagecorruptions("gaussian_noise")
def test_apply_shot_noise(self):
self._test_by_comparison_with_imagecorruptions("shot_noise")
def test_apply_impulse_noise(self):
self._test_by_comparison_with_imagecorruptions("impulse_noise")
def test_apply_speckle_noise(self):
self._test_by_comparison_with_imagecorruptions("speckle_noise")
def test_apply_gaussian_blur(self):
self._test_by_comparison_with_imagecorruptions("gaussian_blur")
def test_apply_glass_blur(self):
# glass_blur() is extremely slow, so we run only a reduced set
# of tests here
self._test_by_comparison_with_imagecorruptions(
"glass_blur",
shapes=[(32, 32), (32, 32, 1), (32, 32, 3)],
severities=[1, 4],
seeds=[1, 2, 3])
def test_apply_defocus_blur(self):
self._test_by_comparison_with_imagecorruptions(
"defocus_blur")
def test_apply_motion_blur(self):
self._test_by_comparison_with_imagecorruptions(
"motion_blur")
def test_apply_zoom_blur(self):
self._test_by_comparison_with_imagecorruptions(
"zoom_blur")
def test_apply_fog(self):
self._test_by_comparison_with_imagecorruptions(
"fog")
def test_apply_frost(self):
self._test_by_comparison_with_imagecorruptions(
"frost",
severities=[1, 5],
seeds=[1, 5, 10])
def test_apply_snow(self):
self._test_by_comparison_with_imagecorruptions(
"snow")
def test_apply_spatter(self):
self._test_by_comparison_with_imagecorruptions(
"spatter")
def test_apply_contrast(self):
self._test_by_comparison_with_imagecorruptions("contrast")
def test_apply_brightness(self):
self._test_by_comparison_with_imagecorruptions("brightness")
def test_apply_saturate(self):
self._test_by_comparison_with_imagecorruptions(
"saturate")
def test_apply_jpeg_compression(self):
self._test_by_comparison_with_imagecorruptions(
"jpeg_compression")
def test_apply_pixelate(self):
self._test_by_comparison_with_imagecorruptions(
"pixelate")
def test_apply_elastic_transform(self):
self._test_by_comparison_with_imagecorruptions(
"elastic_transform")
@unittest.skipUnless(SUPPORTS_LIBRARY,
"imagecorruptions can only be tested for python 3.5+")
class TestAugmenters(unittest.TestCase):
@classmethod
def _test_augmenter(cls, augmenter_name, func_expected,
dependent_on_seed):
# this test verifies:
# - called function seems to be the expected function
# - images produced by augmenter match images produced by function
# - a different seed (and sometimes severity) will lead to a
# different image
# - augmenter can be pickled
severity = 5
aug_cls = getattr(iaa.imgcorruptlike, augmenter_name)
image = np.mod(
np.arange(32*32*3), 256
).reshape((32, 32, 3)).astype(np.uint8)
with iap.no_prefetching():
rng = iarandom.RNG(1)
# Replay sampling of severities.
# Even for deterministic values this is required as currently
# there is an advance() at the end of each draw_samples().
_ = iap.Deterministic(1).draw_samples((1,), rng)
# As for the functions above, we can't just change the seed value
# to get different augmentations as many functions are dependend
# only on the severity. So we change only for some functions only
# the seed and for the others severity+seed.
image_aug1 = aug_cls(severity=severity, seed=1)(image=image)
image_aug2 = aug_cls(severity=severity, seed=1)(image=image)
if dependent_on_seed:
image_aug3 = aug_cls(severity=severity, seed=2)(
image=image)
else:
image_aug3 = aug_cls(severity=severity-1, seed=2)(
image=image)
image_aug_exp = func_expected(
image,
severity=severity,
seed=rng.generate_seed_())
assert aug_cls(severity=severity).func is func_expected
assert np.array_equal(image_aug1, image_aug_exp)
assert np.array_equal(image_aug2, image_aug_exp)
assert not np.array_equal(image_aug3, image_aug2)
# pickling test
aug = aug_cls(severity=(1, 5))
runtest_pickleable_uint8_img(aug, shape=(32, 32, 3))
def test_gaussian_noise(self):
self._test_augmenter("GaussianNoise",
iaa.imgcorruptlike.apply_gaussian_noise,
True)
def test_shot_noise(self):
self._test_augmenter("ShotNoise",
iaa.imgcorruptlike.apply_shot_noise,
True)
def test_impulse_noise(self):
self._test_augmenter("ImpulseNoise",
iaa.imgcorruptlike.apply_impulse_noise,
True)
def test_speckle_noise(self):
self._test_augmenter("SpeckleNoise",
iaa.imgcorruptlike.apply_speckle_noise,
True)
def test_gaussian_blur(self):
self._test_augmenter("GaussianBlur",
iaa.imgcorruptlike.apply_gaussian_blur,
False)
def test_glass_blur(self):
self._test_augmenter("GlassBlur",
iaa.imgcorruptlike.apply_glass_blur,
False)
def test_defocus_blur(self):
self._test_augmenter("DefocusBlur",
iaa.imgcorruptlike.apply_defocus_blur,
False)
def test_motion_blur(self):
self._test_augmenter("MotionBlur",
iaa.imgcorruptlike.apply_motion_blur,
False)
def test_zoom_blur(self):
self._test_augmenter("ZoomBlur",
iaa.imgcorruptlike.apply_zoom_blur,
False)
def test_fog(self):
self._test_augmenter("Fog",
iaa.imgcorruptlike.apply_fog,
True)
def test_frost(self):
self._test_augmenter("Frost",
iaa.imgcorruptlike.apply_frost,
False)
def test_snow(self):
self._test_augmenter("Snow",
iaa.imgcorruptlike.apply_snow,
True)
def test_spatter(self):
self._test_augmenter("Spatter",
iaa.imgcorruptlike.apply_spatter,
True)
def test_contrast(self):
self._test_augmenter("Contrast",
iaa.imgcorruptlike.apply_contrast,
False)
def test_brightness(self):
self._test_augmenter("Brightness",
iaa.imgcorruptlike.apply_brightness,
False)
def test_saturate(self):
self._test_augmenter("Saturate",
iaa.imgcorruptlike.apply_saturate,
False)
def test_jpeg_compression(self):
self._test_augmenter("JpegCompression",
iaa.imgcorruptlike.apply_jpeg_compression,
False)
def test_pixelate(self):
self._test_augmenter("Pixelate",
iaa.imgcorruptlike.apply_pixelate,
False)
def test_elastic_transform(self):
self._test_augmenter("ElasticTransform",
iaa.imgcorruptlike.apply_elastic_transform,
True)
|
|
# Copyright (c) 2020 krrr
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging
import socket
import struct
import hashlib
import asyncio
import base64
import sys
import os
import re
from binascii import Error as Base64Error
from configparser import ConfigParser, ParsingError
from collections import deque
__version__ = '0.4.1'
# Don't use "super().__init__()" in constructor of classes of this package (all libraries
# used are using old style)
# global variables shared between modules
config = loop = None
_http_req = re.compile(rb'^(GET|POST|HEAD|CONNECT|OPTIONS|PUT|DELETE|TRACE|PATCH) ')
_accept_html = re.compile(rb'^Accept:[^\r]*text/html', re.IGNORECASE)
_keep_alive = re.compile(rb'^Connection:[^\r]*keep-alive$', re.IGNORECASE)
_error_page = '''<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>wstan error</title>
<style type="text/css">
body {{
font-family: sans-serif;
font-size: 12pt;
height: 100%;
}}
h1 {{
font-size: 18pt;
color: #333;
}}
#frame {{
margin: 0 auto;
margin-top: 80px;
width: 80%;
color: #444;
}}
hr {{ color: #BBB }}
</style>
</head>
<body>
<div id="frame">
<h1>wstan error: {title}</h1>
<hr />
<p>{detail}</p>
</div>
</body>
</html>
'''
async def my_sock_connect(host=None, port=None, *, family=0, proto=0, flags=0):
"""Modified version of BaseEventLoop.create_connection: this function returns sock object.
And it resolve names for Py 3.4- capability."""
assert (host and port)
infos = await loop.getaddrinfo(
host, port, family=family,
type=socket.SOCK_STREAM, proto=proto, flags=flags)
if not infos:
raise OSError('getaddrinfo() returned empty list')
exceptions = []
sock = None
for family, type_, proto, cname, address in infos:
try:
sock = socket.socket(family=family, type=type_, proto=proto)
sock.setblocking(False)
await loop.sock_connect(sock, address)
except OSError as exc:
if sock is not None:
sock.close()
exceptions.append(exc)
except Exception:
if sock is not None:
sock.close()
raise
else:
break
else:
if len(exceptions) == 1:
raise exceptions[0]
else:
model = str(exceptions[0])
if all(str(exc) == model for exc in exceptions): # If they all have the same str(), raise one.
raise exceptions[0]
raise OSError('Multiple exceptions: {}'.format(', '.join(map(str, exceptions))))
return sock
def make_socks_addr(host, port):
return b'\x00\x03' + bytes([len(host)]) + host + struct.pack('>H', port)
def parse_socks_addr(dat, allow_remain=False):
"""Extract address and port from SOCKS request header (only 4 parts:
RSV(0x00) | ATYP | DST.ADDR | DST.PORT). The header will be reused in tunnel server."""
if not dat or dat[0] != 0x00:
raise ValueError
try:
atyp = dat[1]
if atyp == 0x01: # IPv4
port_idx = 6
target_addr = socket.inet_ntoa(dat[2:port_idx])
elif atyp == 0x03: # domain name
port_idx = 3 + dat[2]
target_addr = dat[3:port_idx].decode('ascii')
elif atyp == 0x04: # IPv6
port_idx = 18
target_addr = socket.inet_ntop(socket.AF_INET6, dat[2:port_idx])
else:
raise ValueError("unknown address type")
target_port = struct.unpack('>H', dat[port_idx:port_idx+2])[0]
if allow_remain:
return target_addr, target_port, port_idx + 2
else:
if dat[port_idx+2:]:
raise ValueError
return target_addr, target_port
except (IndexError, struct.error):
raise ValueError
def die(reason):
print(reason, file=sys.stderr)
sys.exit(1)
def load_ini(ini_path):
"""Read config from ini file."""
ini = ConfigParser()
try:
# utf-8 with BOM will kill ConfigParser
with open(ini_path, encoding='utf-8-sig') as f:
ini.read_string('[DEFAULT]\n' + f.read())
except (ParsingError, FileNotFoundError) as e:
die('error reading config file: %s' % e)
ini = ini['DEFAULT']
ret = {}
ret.update(ini)
# fix types
for i in ('port', 'tun-port'):
if i in ini:
ret[i] = ini.getint(i)
for i in ('client', 'server', 'debug', 'compatible'):
if i in ini:
ret[i] = ini.getboolean(i)
for i in ret:
if '-' in i:
ret[i.replace('-', '_')] = ret.pop(i)
return ret.items()
def load_config():
import argparse
from wstan.autobahn.websocket.protocol import parseWsUrl
parser = argparse.ArgumentParser(
description='Ver %s | Tunneling TCP in WebSocket' % __version__)
# common config
parser.add_argument('-g', '--gen-key', help='generate a key and exit', action='store_true')
parser.add_argument('uri', help='URI of server', nargs='?')
parser.add_argument('key', help='base64 encoded 16-byte key', nargs='?')
g = parser.add_mutually_exclusive_group()
g.add_argument('-c', '--client', help='run as client (default, also act as SOCKS5/HTTP(S) server)',
default=True, action='store_true')
g.add_argument('-s', '--server', help='run as server', action='store_true')
parser.add_argument('-d', '--debug', action='store_true')
parser.add_argument('-z', '--compatible', help='useful when server is behind WS proxy', action='store_true')
parser.add_argument('-i', '--ini', help='load config file')
# client config
parser.add_argument('-y', '--proxy', help='let client use a HTTPS proxy (host:port)')
parser.add_argument('-p', '--port', help='listen port of SOCKS5/HTTP(S) server at localhost (defaults 1080)',
type=int, default=1080)
# server config
parser.add_argument('-t', '--tun-addr', help='listen address of server, overrides URI')
parser.add_argument('-r', '--tun-port', help='listen port of server, overrides URI', type=int)
parser.add_argument('--x-forward', help='Use X-Forwarded-For as client IP address when behind proxy',
default=False, action='store_true')
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
if args.gen_key: # option -g can be used without URI and key, just like -h
return args
if args.ini:
for k, v in load_ini(args.ini):
setattr(args, k, v) # file config will override args
for i in ['uri', 'key']:
if not getattr(args, i):
die('%s not specified' % i)
if '?' in args.uri:
die('URI should not contain query')
try:
args.key = base64.b64decode(args.key)
assert len(args.key) == 16
except (Base64Error, AssertionError):
die('invalid key')
args.tun_ssl, args.uri_addr, args.uri_port = parseWsUrl(args.uri)[:3]
if args.proxy and args.client:
try:
args.proxy_host, port = args.proxy.split(':')
args.proxy_port = int(port)
except ValueError:
dir('invalid proxy format')
if args.compatible:
d = get_sha1(args.key)[-1]
args.cookie_key = '_' + chr((d % 26) + 65) # an upper case character
return args
def http_die_soon(req):
"""Disable keep-alive to make HTTP proxy act like SOCKS. By doing this
wstan server can remain unchanged, but it will increase latency."""
dropped = [i for i in req.split(b'\r\n') if not _keep_alive.match(i)]
end = dropped.index(b'')
return b'\r\n'.join(dropped[:end] + [b'Connection: close'] + dropped[end:])
def is_http_req(dat):
return bool(_http_req.match(dat))
def can_return_error_page(dat):
return dat and bool(_http_req.match(dat) and any(map(_accept_html.match, dat.split(b'\r\n'))))
def gen_error_page(title, detail):
body = _error_page.format(title=title, detail=detail).encode()
header = '\r\n'.join(
['HTTP/1.1 599 WSTAN ERROR', 'Content-Type: text/html; charset=UTF-8',
'Content-Length: %d' % len(body), '', '']).encode()
return header + body
def get_sha1(dat):
sha1 = hashlib.sha1()
sha1.update(dat)
return sha1.digest()
class InMemoryLogHandler(logging.Handler):
logs = deque(maxlen=200)
def emit(self, record):
self.logs.append(self.format(record))
def main_entry():
if not sys.version_info >= (3, 3):
die('Python 3.3 or higher required')
global config, loop
config = load_config()
if config.gen_key:
return print('A fresh random key:', base64.b64encode(os.urandom(16)).decode())
logging.basicConfig(level=logging.DEBUG if config.debug else logging.INFO,
format='%(asctime)s %(levelname).1s: %(message)s',
datefmt='%m-%d %H:%M:%S')
try:
loop = asyncio.get_event_loop()
except Exception as e:
logging.warning(e)
loop = asyncio.get_event_loop()
asyncio.set_event_loop(loop)
if config.client:
h = InMemoryLogHandler()
logging.getLogger().addHandler(h)
h.setFormatter(logging.Formatter('%(asctime)s %(levelname).1s: %(message)s', '%H:%M:%S'))
h.setLevel(logging.DEBUG if config.debug else logging.INFO)
if config.debug and hasattr(loop, 'set_debug'):
loop.set_debug(True)
logging.getLogger('asyncio').setLevel(logging.WARNING)
if config.server:
from wstan.server import main
else:
from wstan.client import main
main()
|
|
"""The flunearyou component."""
import asyncio
from datetime import timedelta
from pyflunearyou import Client
from pyflunearyou.errors import FluNearYouError
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from .const import (
CATEGORY_CDC_REPORT,
CATEGORY_USER_REPORT,
DATA_CLIENT,
DOMAIN,
LOGGER,
SENSORS,
TOPIC_UPDATE,
)
DATA_LISTENER = "listener"
DEFAULT_SCAN_INTERVAL = timedelta(minutes=30)
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN): vol.Schema(
{
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
}
)
},
extra=vol.ALLOW_EXTRA,
)
@callback
def async_get_api_category(sensor_type):
"""Get the category that a particular sensor type belongs to."""
try:
return next(
(
category
for category, sensors in SENSORS.items()
for sensor in sensors
if sensor[0] == sensor_type
)
)
except StopIteration as err:
raise ValueError(f"Can't find category sensor type: {sensor_type}") from err
async def async_setup(hass, config):
"""Set up the Flu Near You component."""
hass.data[DOMAIN] = {DATA_CLIENT: {}, DATA_LISTENER: {}}
if DOMAIN not in config:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_LATITUDE: config[DOMAIN].get(CONF_LATITUDE, hass.config.latitude),
CONF_LONGITUDE: config[DOMAIN].get(
CONF_LATITUDE, hass.config.longitude
),
},
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up Flu Near You as config entry."""
websession = aiohttp_client.async_get_clientsession(hass)
fny = FluNearYouData(
hass,
Client(websession),
config_entry.data.get(CONF_LATITUDE, hass.config.latitude),
config_entry.data.get(CONF_LONGITUDE, hass.config.longitude),
)
await fny.async_update()
hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id] = fny
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "sensor")
)
async def refresh(event_time):
"""Refresh data from Flu Near You."""
await fny.async_update()
hass.data[DOMAIN][DATA_LISTENER][config_entry.entry_id] = async_track_time_interval(
hass, refresh, DEFAULT_SCAN_INTERVAL
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload an Flu Near You config entry."""
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
remove_listener()
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
return True
class FluNearYouData:
"""Define a data object to retrieve info from Flu Near You."""
def __init__(self, hass, client, latitude, longitude):
"""Initialize."""
self._async_cancel_time_interval_listener = None
self._client = client
self._hass = hass
self.data = {}
self.latitude = latitude
self.longitude = longitude
self._api_category_count = {
CATEGORY_CDC_REPORT: 0,
CATEGORY_USER_REPORT: 0,
}
self._api_category_locks = {
CATEGORY_CDC_REPORT: asyncio.Lock(),
CATEGORY_USER_REPORT: asyncio.Lock(),
}
async def _async_get_data_from_api(self, api_category):
"""Update and save data for a particular API category."""
if self._api_category_count[api_category] == 0:
return
if api_category == CATEGORY_CDC_REPORT:
api_coro = self._client.cdc_reports.status_by_coordinates(
self.latitude, self.longitude
)
else:
api_coro = self._client.user_reports.status_by_coordinates(
self.latitude, self.longitude
)
try:
self.data[api_category] = await api_coro
except FluNearYouError as err:
LOGGER.error("Unable to get %s data: %s", api_category, err)
self.data[api_category] = None
async def _async_update_listener_action(self, now):
"""Define an async_track_time_interval action to update data."""
await self.async_update()
@callback
def async_deregister_api_interest(self, sensor_type):
"""Decrement the number of entities with data needs from an API category."""
# If this deregistration should leave us with no registration at all, remove the
# time interval:
if sum(self._api_category_count.values()) == 0:
if self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener()
self._async_cancel_time_interval_listener = None
return
api_category = async_get_api_category(sensor_type)
self._api_category_count[api_category] -= 1
async def async_register_api_interest(self, sensor_type):
"""Increment the number of entities with data needs from an API category."""
# If this is the first registration we have, start a time interval:
if not self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener = async_track_time_interval(
self._hass,
self._async_update_listener_action,
DEFAULT_SCAN_INTERVAL,
)
api_category = async_get_api_category(sensor_type)
self._api_category_count[api_category] += 1
# If a sensor registers interest in a particular API call and the data doesn't
# exist for it yet, make the API call and grab the data:
async with self._api_category_locks[api_category]:
if api_category not in self.data:
await self._async_get_data_from_api(api_category)
async def async_update(self):
"""Update Flu Near You data."""
tasks = [
self._async_get_data_from_api(api_category)
for api_category in self._api_category_count
]
await asyncio.gather(*tasks)
LOGGER.debug("Received new data")
async_dispatcher_send(self._hass, TOPIC_UPDATE)
|
|
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo.config import cfg
from neutron.plugins.ml2.drivers.arista import arista_l3_driver as arista
from neutron.tests import base
def setup_arista_config(value='', vrf=False, mlag=False):
cfg.CONF.set_override('primary_l3_host', value, "l3_arista")
cfg.CONF.set_override('primary_l3_host_username', value, "l3_arista")
if vrf:
cfg.CONF.set_override('use_vrf', value, "l3_arista")
if mlag:
cfg.CONF.set_override('secondary_l3_host', value, "l3_arista")
cfg.CONF.set_override('mlag_config', value, "l3_arista")
class AristaL3DriverTestCasesDefaultVrf(base.BaseTestCase):
"""Test cases to test the RPC between Arista Driver and EOS.
Tests all methods used to send commands between Arista L3 Driver and EOS
to program routing functions in Default VRF.
"""
def setUp(self):
super(AristaL3DriverTestCasesDefaultVrf, self).setUp()
setup_arista_config('value')
self.drv = arista.AristaL3Driver()
self.drv._servers = []
self.drv._servers.append(mock.MagicMock())
def test_no_exception_on_correct_configuration(self):
self.assertIsNotNone(self.drv)
def test_create_router_on_eos(self):
router_name = 'test-router-1'
route_domain = '123:123'
self.drv.create_router_on_eos(router_name, route_domain,
self.drv._servers[0])
cmds = ['enable', 'configure', 'exit']
self.drv._servers[0].runCmds.assert_called_once_with(version=1,
cmds=cmds)
def test_delete_router_from_eos(self):
router_name = 'test-router-1'
self.drv.delete_router_from_eos(router_name, self.drv._servers[0])
cmds = ['enable', 'configure', 'exit']
self.drv._servers[0].runCmds.assert_called_once_with(version=1,
cmds=cmds)
def test_add_interface_to_router_on_eos(self):
router_name = 'test-router-1'
segment_id = '123'
router_ip = '10.10.10.10'
gw_ip = '10.10.10.1'
mask = '255.255.255.0'
self.drv.add_interface_to_router(segment_id, router_name, gw_ip,
router_ip, mask, self.drv._servers[0])
cmds = ['enable', 'configure', 'ip routing',
'vlan %s' % segment_id, 'exit',
'interface vlan %s' % segment_id,
'ip address %s/%s' % (gw_ip, mask), 'exit']
self.drv._servers[0].runCmds.assert_called_once_with(version=1,
cmds=cmds)
def test_delete_interface_from_router_on_eos(self):
router_name = 'test-router-1'
segment_id = '123'
self.drv.delete_interface_from_router(segment_id, router_name,
self.drv._servers[0])
cmds = ['enable', 'configure', 'no interface vlan %s' % segment_id,
'exit']
self.drv._servers[0].runCmds.assert_called_once_with(version=1,
cmds=cmds)
class AristaL3DriverTestCasesUsingVRFs(base.BaseTestCase):
"""Test cases to test the RPC between Arista Driver and EOS.
Tests all methods used to send commands between Arista L3 Driver and EOS
to program routing functions using multiple VRFs.
Note that the configuration commands are different when VRFs are used.
"""
def setUp(self):
super(AristaL3DriverTestCasesUsingVRFs, self).setUp()
setup_arista_config('value', vrf=True)
self.drv = arista.AristaL3Driver()
self.drv._servers = []
self.drv._servers.append(mock.MagicMock())
def test_no_exception_on_correct_configuration(self):
self.assertIsNotNone(self.drv)
def test_create_router_on_eos(self):
max_vrfs = 5
routers = ['testRouter-%s' % n for n in range(max_vrfs)]
domains = ['10%s' % n for n in range(max_vrfs)]
for (r, d) in zip(routers, domains):
self.drv.create_router_on_eos(r, d, self.drv._servers[0])
cmds = ['enable', 'configure',
'vrf definition %s' % r,
'rd %(rd)s:%(rd)s' % {'rd': d}, 'exit', 'exit']
self.drv._servers[0].runCmds.assert_called_with(version=1,
cmds=cmds)
def test_delete_router_from_eos(self):
max_vrfs = 5
routers = ['testRouter-%s' % n for n in range(max_vrfs)]
for r in routers:
self.drv.delete_router_from_eos(r, self.drv._servers[0])
cmds = ['enable', 'configure', 'no vrf definition %s' % r,
'exit']
self.drv._servers[0].runCmds.assert_called_with(version=1,
cmds=cmds)
def test_add_interface_to_router_on_eos(self):
router_name = 'test-router-1'
segment_id = '123'
router_ip = '10.10.10.10'
gw_ip = '10.10.10.1'
mask = '255.255.255.0'
self.drv.add_interface_to_router(segment_id, router_name, gw_ip,
router_ip, mask, self.drv._servers[0])
cmds = ['enable', 'configure',
'ip routing vrf %s' % router_name,
'vlan %s' % segment_id, 'exit',
'interface vlan %s' % segment_id,
'vrf forwarding %s' % router_name,
'ip address %s/%s' % (gw_ip, mask), 'exit']
self.drv._servers[0].runCmds.assert_called_once_with(version=1,
cmds=cmds)
def test_delete_interface_from_router_on_eos(self):
router_name = 'test-router-1'
segment_id = '123'
self.drv.delete_interface_from_router(segment_id, router_name,
self.drv._servers[0])
cmds = ['enable', 'configure', 'no interface vlan %s' % segment_id,
'exit']
self.drv._servers[0].runCmds.assert_called_once_with(version=1,
cmds=cmds)
class AristaL3DriverTestCasesMlagConfig(base.BaseTestCase):
"""Test cases to test the RPC between Arista Driver and EOS.
Tests all methods used to send commands between Arista L3 Driver and EOS
to program routing functions in Default VRF using MLAG configuration.
MLAG configuration means that the commands will be sent to both
primary and secondary Arista Switches.
"""
def setUp(self):
super(AristaL3DriverTestCasesMlagConfig, self).setUp()
setup_arista_config('value', mlag=True)
self.drv = arista.AristaL3Driver()
self.drv._servers = []
self.drv._servers.append(mock.MagicMock())
self.drv._servers.append(mock.MagicMock())
def test_no_exception_on_correct_configuration(self):
self.assertIsNotNone(self.drv)
def test_create_router_on_eos(self):
router_name = 'test-router-1'
route_domain = '123:123'
router_mac = '00:11:22:33:44:55'
for s in self.drv._servers:
self.drv.create_router_on_eos(router_name, route_domain, s)
cmds = ['enable', 'configure',
'ip virtual-router mac-address %s' % router_mac, 'exit']
s.runCmds.assert_called_with(version=1, cmds=cmds)
def test_delete_router_from_eos(self):
router_name = 'test-router-1'
for s in self.drv._servers:
self.drv.delete_router_from_eos(router_name, s)
cmds = ['enable', 'configure',
'no ip virtual-router mac-address', 'exit']
s.runCmds.assert_called_once_with(version=1, cmds=cmds)
def test_add_interface_to_router_on_eos(self):
router_name = 'test-router-1'
segment_id = '123'
router_ip = '10.10.10.10'
gw_ip = '10.10.10.1'
mask = '255.255.255.0'
for s in self.drv._servers:
self.drv.add_interface_to_router(segment_id, router_name, gw_ip,
router_ip, mask, s)
cmds = ['enable', 'configure', 'ip routing',
'vlan %s' % segment_id, 'exit',
'interface vlan %s' % segment_id,
'ip address %s' % router_ip,
'ip virtual-router address %s' % gw_ip, 'exit']
s.runCmds.assert_called_once_with(version=1, cmds=cmds)
def test_delete_interface_from_router_on_eos(self):
router_name = 'test-router-1'
segment_id = '123'
for s in self.drv._servers:
self.drv.delete_interface_from_router(segment_id, router_name, s)
cmds = ['enable', 'configure', 'no interface vlan %s' % segment_id,
'exit']
s.runCmds.assert_called_once_with(version=1, cmds=cmds)
class AristaL3DriverTestCases_v4(base.BaseTestCase):
"""Test cases to test the RPC between Arista Driver and EOS.
Tests all methods used to send commands between Arista L3 Driver and EOS
to program routing functions in Default VRF using IPv4.
"""
def setUp(self):
super(AristaL3DriverTestCases_v4, self).setUp()
setup_arista_config('value')
self.drv = arista.AristaL3Driver()
self.drv._servers = []
self.drv._servers.append(mock.MagicMock())
def test_no_exception_on_correct_configuration(self):
self.assertIsNotNone(self.drv)
def test_add_v4_interface_to_router(self):
gateway_ip = '10.10.10.1'
cidrs = ['10.10.10.0/24', '10.11.11.0/24']
# Add couple of IPv4 subnets to router
for cidr in cidrs:
router = {'name': 'test-router-1',
'tenant_id': 'ten-a',
'seg_id': '123',
'cidr': "%s" % cidr,
'gip': "%s" % gateway_ip,
'ip_version': 4}
self.assertFalse(self.drv.add_router_interface(None, router))
def test_delete_v4_interface_from_router(self):
gateway_ip = '10.10.10.1'
cidrs = ['10.10.10.0/24', '10.11.11.0/24']
# remove couple of IPv4 subnets from router
for cidr in cidrs:
router = {'name': 'test-router-1',
'tenant_id': 'ten-a',
'seg_id': '123',
'cidr': "%s" % cidr,
'gip': "%s" % gateway_ip,
'ip_version': 4}
self.assertFalse(self.drv.remove_router_interface(None, router))
class AristaL3DriverTestCases_v6(base.BaseTestCase):
"""Test cases to test the RPC between Arista Driver and EOS.
Tests all methods used to send commands between Arista L3 Driver and EOS
to program routing functions in Default VRF using IPv6.
"""
def setUp(self):
super(AristaL3DriverTestCases_v6, self).setUp()
setup_arista_config('value')
self.drv = arista.AristaL3Driver()
self.drv._servers = []
self.drv._servers.append(mock.MagicMock())
def test_no_exception_on_correct_configuration(self):
self.assertIsNotNone(self.drv)
def test_add_v6_interface_to_router(self):
gateway_ip = '3FFE::1'
cidrs = ['3FFE::/16', '2001::/16']
# Add couple of IPv6 subnets to router
for cidr in cidrs:
router = {'name': 'test-router-1',
'tenant_id': 'ten-a',
'seg_id': '123',
'cidr': "%s" % cidr,
'gip': "%s" % gateway_ip,
'ip_version': 6}
self.assertFalse(self.drv.add_router_interface(None, router))
def test_delete_v6_interface_from_router(self):
gateway_ip = '3FFE::1'
cidrs = ['3FFE::/16', '2001::/16']
# remove couple of IPv6 subnets from router
for cidr in cidrs:
router = {'name': 'test-router-1',
'tenant_id': 'ten-a',
'seg_id': '123',
'cidr': "%s" % cidr,
'gip': "%s" % gateway_ip,
'ip_version': 6}
self.assertFalse(self.drv.remove_router_interface(None, router))
class AristaL3DriverTestCases_MLAG_v6(base.BaseTestCase):
"""Test cases to test the RPC between Arista Driver and EOS.
Tests all methods used to send commands between Arista L3 Driver and EOS
to program routing functions in Default VRF on MLAG'ed switches using IPv6.
"""
def setUp(self):
super(AristaL3DriverTestCases_MLAG_v6, self).setUp()
setup_arista_config('value', mlag=True)
self.drv = arista.AristaL3Driver()
self.drv._servers = []
self.drv._servers.append(mock.MagicMock())
self.drv._servers.append(mock.MagicMock())
def test_no_exception_on_correct_configuration(self):
self.assertIsNotNone(self.drv)
def test_add_v6_interface_to_router(self):
gateway_ip = '3FFE::1'
cidrs = ['3FFE::/16', '2001::/16']
# Add couple of IPv6 subnets to router
for cidr in cidrs:
router = {'name': 'test-router-1',
'tenant_id': 'ten-a',
'seg_id': '123',
'cidr': "%s" % cidr,
'gip': "%s" % gateway_ip,
'ip_version': 6}
self.assertFalse(self.drv.add_router_interface(None, router))
def test_delete_v6_interface_from_router(self):
gateway_ip = '3FFE::1'
cidrs = ['3FFE::/16', '2001::/16']
# remove couple of IPv6 subnets from router
for cidr in cidrs:
router = {'name': 'test-router-1',
'tenant_id': 'ten-a',
'seg_id': '123',
'cidr': "%s" % cidr,
'gip': "%s" % gateway_ip,
'ip_version': 6}
self.assertFalse(self.drv.remove_router_interface(None, router))
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
from ..describe import Description, autoDescribeRoute
from ..rest import Resource, RestException
from girder import events
from girder.constants import AccessType, AssetstoreType, TokenScope
from girder.api import access
from girder.utility.progress import ProgressContext
from girder.utility.s3_assetstore_adapter import DEFAULT_REGION
class Assetstore(Resource):
"""
API Endpoint for managing assetstores. Requires admin privileges.
"""
def __init__(self):
super(Assetstore, self).__init__()
self.resourceName = 'assetstore'
self.route('GET', (), self.find)
self.route('GET', (':id',), self.getAssetstore)
self.route('POST', (), self.createAssetstore)
self.route('POST', (':id', 'import'), self.importData)
self.route('PUT', (':id',), self.updateAssetstore)
self.route('DELETE', (':id',), self.deleteAssetstore)
self.route('GET', (':id', 'files'), self.getAssetstoreFiles)
@access.admin
@autoDescribeRoute(
Description('Get information about an assetstore.')
.modelParam('id', model='assetstore')
.errorResponse()
.errorResponse('You are not an administrator.', 403)
)
def getAssetstore(self, assetstore):
self.model('assetstore').addComputedInfo(assetstore)
return assetstore
@access.admin
@autoDescribeRoute(
Description('List assetstores.')
.pagingParams(defaultSort='name')
.errorResponse()
.errorResponse('You are not an administrator.', 403)
)
def find(self, limit, offset, sort):
return list(self.model('assetstore').list(offset=offset, limit=limit, sort=sort))
@access.admin
@autoDescribeRoute(
Description('Create a new assetstore.')
.responseClass('Assetstore')
.notes('You must be an administrator to call this.')
.param('name', 'Unique name for the assetstore.')
.param('type', 'Type of the assetstore.', dataType='integer')
.param('root', 'Root path on disk (for filesystem type).',
required=False)
.param('perms', 'File creation permissions (for filesystem type).',
required=False)
.param('db', 'Database name (for GridFS type)', required=False)
.param('mongohost', 'Mongo host URI (for GridFS type)', required=False)
.param('replicaset', 'Replica set name (for GridFS type)',
required=False)
.param('shard', 'Shard the collection (for GridFS type). Set to '
'"auto" to set up sharding.', required=False)
.param('bucket', 'The S3 bucket to store data in (for S3 type).',
required=False)
.param('prefix', 'Optional path prefix within the bucket under which '
'files will be stored (for S3 type).', required=False, default='')
.param('accessKeyId', 'The AWS access key ID to use for authentication '
'(for S3 type).', required=False)
.param('secret', 'The AWS secret key to use for authentication (for '
'S3 type).', required=False)
.param('service', 'The S3 service host (for S3 type). Default is '
's3.amazonaws.com. This can be used to specify a protocol and '
'port as well using the form [http[s]://](host domain)[:(port)]. '
'Do not include the bucket name here.', required=False, default='')
.param('readOnly', 'If this assetstore is read-only, set this to true.',
required=False, dataType='boolean', default=False)
.param('region', 'The AWS region to which the S3 bucket belongs.', required=False,
default=DEFAULT_REGION)
.param('inferCredentials', 'The credentials for connecting to S3 will be inferred '
'by Boto rather than explicitly passed. Inferring credentials will '
'ignore accessKeyId and secret.', dataType='boolean', required=False)
.errorResponse()
.errorResponse('You are not an administrator.', 403)
)
def createAssetstore(self, name, type, root, perms, db, mongohost, replicaset, shard, bucket,
prefix, accessKeyId, secret, service, readOnly, region, inferCredentials):
if type == AssetstoreType.FILESYSTEM:
self.requireParams({'root': root})
return self.model('assetstore').createFilesystemAssetstore(
name=name, root=root, perms=perms)
elif type == AssetstoreType.GRIDFS:
self.requireParams({'db': db})
return self.model('assetstore').createGridFsAssetstore(
name=name, db=db, mongohost=mongohost, replicaset=replicaset, shard=shard)
elif type == AssetstoreType.S3:
self.requireParams({'bucket': bucket})
return self.model('assetstore').createS3Assetstore(
name=name, bucket=bucket, prefix=prefix, secret=secret,
accessKeyId=accessKeyId, service=service, readOnly=readOnly, region=region,
inferCredentials=inferCredentials)
else:
raise RestException('Invalid type parameter')
@access.admin(scope=TokenScope.DATA_WRITE)
@autoDescribeRoute(
Description('Import existing data into an assetstore.')
.notes('This does not move or copy the existing data, it just creates '
'references to it in the Girder data hierarchy. Deleting '
'those references will not delete the underlying data. This '
'operation is currently only supported for S3 assetstores.')
.modelParam('id', model='assetstore')
.param('importPath', 'Root path within the underlying storage system '
'to import.', required=False)
.param('destinationId', 'ID of a folder, collection, or user in Girder '
'under which the data will be imported.')
.param('destinationType', 'Type of the destination resource.',
enum=('folder', 'collection', 'user'))
.param('progress', 'Whether to record progress on the import.',
dataType='boolean', default=False, required=False)
.param('leafFoldersAsItems', 'Whether folders containing only files should be '
'imported as items.', dataType='boolean', required=False, default=False)
.param('fileIncludeRegex', 'If set, only filenames matching this regular '
'expression will be imported.', required=False)
.param('fileExcludeRegex', 'If set, only filenames that do not match this regular '
'expression will be imported. If a file matches both the include and exclude regex, '
'it will be excluded.', required=False)
.errorResponse()
.errorResponse('You are not an administrator.', 403)
)
def importData(self, assetstore, importPath, destinationId, destinationType, progress,
leafFoldersAsItems, fileIncludeRegex, fileExcludeRegex):
user = self.getCurrentUser()
parent = self.model(destinationType).load(
destinationId, user=user, level=AccessType.ADMIN, exc=True)
with ProgressContext(progress, user=user, title='Importing data') as ctx:
return self.model('assetstore').importData(
assetstore, parent=parent, parentType=destinationType, params={
'fileIncludeRegex': fileIncludeRegex,
'fileExcludeRegex': fileExcludeRegex,
'importPath': importPath,
}, progress=ctx, user=user, leafFoldersAsItems=leafFoldersAsItems)
@access.admin
@autoDescribeRoute(
Description('Update an existing assetstore.')
.responseClass('Assetstore')
.modelParam('id', model='assetstore')
.param('name', 'Unique name for the assetstore.', strip=True)
.param('root', 'Root path on disk (for Filesystem type)', required=False)
.param('perms', 'File creation permissions (for Filesystem type)', required=False)
.param('db', 'Database name (for GridFS type)', required=False)
.param('mongohost', 'Mongo host URI (for GridFS type)', required=False)
.param('replicaset', 'Replica set name (for GridFS type)', required=False)
.param('shard', 'Shard the collection (for GridFS type). Set to '
'"auto" to set up sharding.', required=False)
.param('bucket', 'The S3 bucket to store data in (for S3 type).', required=False)
.param('prefix', 'Optional path prefix within the bucket under which '
'files will be stored (for S3 type).', required=False, default='')
.param('accessKeyId', 'The AWS access key ID to use for authentication '
'(for S3 type).', required=False)
.param('secret', 'The AWS secret key to use for authentication (for '
'S3 type).', required=False)
.param('service', 'The S3 service host (for S3 type). Default is '
's3.amazonaws.com. This can be used to specify a protocol and '
'port as well using the form [http[s]://](host domain)[:(port)]. '
'Do not include the bucket name here.', required=False, default='')
.param('readOnly', 'If this assetstore is read-only, set this to true.',
required=False, dataType='boolean')
.param('region', 'The AWS region to which the S3 bucket belongs.', required=False,
default=DEFAULT_REGION)
.param('current', 'Whether this is the current assetstore', dataType='boolean')
.param('inferCredentials', 'The credentials for connecting to S3 will be inferred '
'by Boto rather than explicitly passed. Inferring credentials will '
'ignore accessKeyId and secret.', dataType='boolean', required=False)
.errorResponse()
.errorResponse('You are not an administrator.', 403)
)
def updateAssetstore(self, assetstore, name, root, perms, db, mongohost, replicaset, shard,
bucket, prefix, accessKeyId, secret, service, readOnly, region, current,
inferCredentials, params):
assetstore['name'] = name
assetstore['current'] = current
if assetstore['type'] == AssetstoreType.FILESYSTEM:
self.requireParams({'root': root})
assetstore['root'] = root
if perms is not None:
assetstore['perms'] = perms
elif assetstore['type'] == AssetstoreType.GRIDFS:
self.requireParams({'db': db})
assetstore['db'] = db
if mongohost is not None:
assetstore['mongohost'] = mongohost
if replicaset is not None:
assetstore['replicaset'] = replicaset
if shard is not None:
assetstore['shard'] = shard
elif assetstore['type'] == AssetstoreType.S3:
self.requireParams({
'bucket': bucket
})
assetstore['bucket'] = bucket
assetstore['prefix'] = prefix
assetstore['accessKeyId'] = accessKeyId
assetstore['secret'] = secret
assetstore['service'] = service
assetstore['region'] = region
assetstore['inferCredentials'] = inferCredentials
if readOnly is not None:
assetstore['readOnly'] = readOnly
else:
event = events.trigger('assetstore.update', info={
'assetstore': assetstore,
'params': dict(
name=name, current=current, readOnly=readOnly, root=root, perms=perms,
db=db, mongohost=mongohost, replicaset=replicaset, shard=shard, bucket=bucket,
prefix=prefix, accessKeyId=accessKeyId, secret=secret, service=service,
region=region, **params
)
})
if event.defaultPrevented:
return
return self.model('assetstore').save(assetstore)
@access.admin
@autoDescribeRoute(
Description('Delete an assetstore.')
.notes('This will fail if there are any files in the assetstore.')
.modelParam('id', model='assetstore')
.errorResponse(('A parameter was invalid.',
'The assetstore is not empty.'))
.errorResponse('You are not an administrator.', 403)
)
def deleteAssetstore(self, assetstore):
self.model('assetstore').remove(assetstore)
return {'message': 'Deleted assetstore %s.' % assetstore['name']}
@access.admin
@autoDescribeRoute(
Description('Get a list of files controlled by an assetstore.')
.modelParam('id', model='assetstore')
.pagingParams(defaultSort='_id')
.errorResponse()
.errorResponse('You are not an administrator.', 403)
)
def getAssetstoreFiles(self, assetstore, limit, offset, sort):
return list(self.model('file').find(
query={'assetstoreId': assetstore['_id']},
offset=offset, limit=limit, sort=sort))
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import datetime
from typing import Any, Dict, Optional, Union
from croniter import CroniterBadCronError, CroniterBadDateError, croniter
from dateutil.relativedelta import relativedelta
from pendulum import DateTime
from pendulum.tz.timezone import Timezone
from airflow.compat.functools import cached_property
from airflow.exceptions import AirflowTimetableInvalid
from airflow.timetables.base import DagRunInfo, DataInterval, TimeRestriction, Timetable
from airflow.utils.dates import cron_presets
from airflow.utils.timezone import convert_to_utc, make_aware, make_naive
Delta = Union[datetime.timedelta, relativedelta]
class _DataIntervalTimetable(Timetable):
"""Basis for timetable implementations that schedule data intervals.
This kind of timetable classes create periodic data intervals from an
underlying schedule representation (e.g. a cron expression, or a timedelta
instance), and schedule a DagRun at the end of each interval.
"""
def _skip_to_latest(self, earliest: Optional[DateTime]) -> DateTime:
"""Bound the earliest time a run can be scheduled.
This is called when ``catchup=False``. See docstring of subclasses for
exact skipping behaviour of a schedule.
"""
raise NotImplementedError()
def _align(self, current: DateTime) -> DateTime:
"""Align given time to the scheduled.
For fixed schedules (e.g. every midnight); this finds the next time that
aligns to the declared time, if the given time does not align. If the
schedule is not fixed (e.g. every hour), the given time is returned.
"""
raise NotImplementedError()
def _get_next(self, current: DateTime) -> DateTime:
"""Get the first schedule after the current time."""
raise NotImplementedError()
def _get_prev(self, current: DateTime) -> DateTime:
"""Get the last schedule before the current time."""
raise NotImplementedError()
def next_dagrun_info(
self,
*,
last_automated_data_interval: Optional[DataInterval],
restriction: TimeRestriction,
) -> Optional[DagRunInfo]:
earliest = restriction.earliest
if not restriction.catchup:
earliest = self._skip_to_latest(earliest)
elif earliest is not None:
earliest = self._align(earliest)
if last_automated_data_interval is None:
# First run; schedule the run at the first available time matching
# the schedule, and retrospectively create a data interval for it.
if earliest is None:
return None
start = earliest
else: # There's a previous run.
if earliest is not None:
# Catchup is False or DAG has new start date in the future.
# Make sure we get the later one.
start = max(last_automated_data_interval.end, earliest)
else:
# Data interval starts from the end of the previous interval.
start = last_automated_data_interval.end
if restriction.latest is not None and start > restriction.latest:
return None
end = self._get_next(start)
return DagRunInfo.interval(start=start, end=end)
def _is_schedule_fixed(expression: str) -> bool:
"""Figures out if the schedule has a fixed time (e.g. 3 AM every day).
:return: True if the schedule has a fixed time, False if not.
Detection is done by "peeking" the next two cron trigger time; if the
two times have the same minute and hour value, the schedule is fixed,
and we *don't* need to perform the DST fix.
This assumes DST happens on whole minute changes (e.g. 12:59 -> 12:00).
"""
cron = croniter(expression)
next_a = cron.get_next(datetime.datetime)
next_b = cron.get_next(datetime.datetime)
return next_b.minute == next_a.minute and next_b.hour == next_a.hour
class CronDataIntervalTimetable(_DataIntervalTimetable):
"""Timetable that schedules data intervals with a cron expression.
This corresponds to ``schedule_interval=<cron>``, where ``<cron>`` is either
a five/six-segment representation, or one of ``cron_presets``.
The implementation extends on croniter to add timezone awareness. This is
because croniter works only with naive timestamps, and cannot consider DST
when determining the next/previous time.
Don't pass ``@once`` in here; use ``OnceTimetable`` instead.
"""
def __init__(self, cron: str, timezone: Timezone) -> None:
self._expression = cron_presets.get(cron, cron)
self._timezone = timezone
@classmethod
def deserialize(cls, data: Dict[str, Any]) -> "Timetable":
from airflow.serialization.serialized_objects import decode_timezone
return cls(data["expression"], decode_timezone(data["timezone"]))
def __eq__(self, other: Any) -> bool:
"""Both expression and timezone should match.
This is only for testing purposes and should not be relied on otherwise.
"""
if not isinstance(other, CronDataIntervalTimetable):
return NotImplemented
return self._expression == other._expression and self._timezone == other._timezone
@property
def summary(self) -> str:
return self._expression
def serialize(self) -> Dict[str, Any]:
from airflow.serialization.serialized_objects import encode_timezone
return {"expression": self._expression, "timezone": encode_timezone(self._timezone)}
def validate(self) -> None:
try:
croniter(self._expression)
except (CroniterBadCronError, CroniterBadDateError) as e:
raise AirflowTimetableInvalid(str(e))
@cached_property
def _should_fix_dst(self) -> bool:
# This is lazy so instantiating a schedule does not immediately raise
# an exception. Validity is checked with validate() during DAG-bagging.
return not _is_schedule_fixed(self._expression)
def _get_next(self, current: DateTime) -> DateTime:
"""Get the first schedule after specified time, with DST fixed."""
naive = make_naive(current, self._timezone)
cron = croniter(self._expression, start_time=naive)
scheduled = cron.get_next(datetime.datetime)
if not self._should_fix_dst:
return convert_to_utc(make_aware(scheduled, self._timezone))
delta = scheduled - naive
return convert_to_utc(current.in_timezone(self._timezone) + delta)
def _get_prev(self, current: DateTime) -> DateTime:
"""Get the first schedule before specified time, with DST fixed."""
naive = make_naive(current, self._timezone)
cron = croniter(self._expression, start_time=naive)
scheduled = cron.get_prev(datetime.datetime)
if not self._should_fix_dst:
return convert_to_utc(make_aware(scheduled, self._timezone))
delta = naive - scheduled
return convert_to_utc(current.in_timezone(self._timezone) - delta)
def _align(self, current: DateTime) -> DateTime:
"""Get the next scheduled time.
This is ``current + interval``, unless ``current`` falls right on the
interval boundary, when ``current`` is returned.
"""
next_time = self._get_next(current)
if self._get_prev(next_time) != current:
return next_time
return current
def _skip_to_latest(self, earliest: Optional[DateTime]) -> DateTime:
"""Bound the earliest time a run can be scheduled.
The logic is that we move start_date up until one period before, so the
current time is AFTER the period end, and the job can be created...
This is slightly different from the delta version at terminal values.
If the next schedule should start *right now*, we want the data interval
that start now, not the one that ends now.
"""
current_time = DateTime.utcnow()
last_start = self._get_prev(current_time)
next_start = self._get_next(last_start)
if next_start == current_time: # Current time is on interval boundary.
new_start = last_start
elif next_start > current_time: # Current time is between boundaries.
new_start = self._get_prev(last_start)
else:
raise AssertionError("next schedule shouldn't be earlier")
if earliest is None:
return new_start
return max(new_start, earliest)
def infer_manual_data_interval(self, *, run_after: DateTime) -> DataInterval:
# Get the last complete period before run_after, e.g. if a DAG run is
# scheduled at each midnight, the data interval of a manually triggered
# run at 1am 25th is between 0am 24th and 0am 25th.
end = self._get_prev(self._align(run_after))
return DataInterval(start=self._get_prev(end), end=end)
class DeltaDataIntervalTimetable(_DataIntervalTimetable):
"""Timetable that schedules data intervals with a time delta.
This corresponds to ``schedule_interval=<delta>``, where ``<delta>`` is
either a ``datetime.timedelta`` or ``dateutil.relativedelta.relativedelta``
instance.
"""
def __init__(self, delta: Delta) -> None:
self._delta = delta
@classmethod
def deserialize(cls, data: Dict[str, Any]) -> "Timetable":
from airflow.serialization.serialized_objects import decode_relativedelta
delta = data["delta"]
if isinstance(delta, dict):
return cls(decode_relativedelta(delta))
return cls(datetime.timedelta(seconds=delta))
def __eq__(self, other: Any) -> bool:
"""The offset should match.
This is only for testing purposes and should not be relied on otherwise.
"""
if not isinstance(other, DeltaDataIntervalTimetable):
return NotImplemented
return self._delta == other._delta
@property
def summary(self) -> str:
return str(self._delta)
def serialize(self) -> Dict[str, Any]:
from airflow.serialization.serialized_objects import encode_relativedelta
if isinstance(self._delta, datetime.timedelta):
delta = self._delta.total_seconds()
else:
delta = encode_relativedelta(self._delta)
return {"delta": delta}
def validate(self) -> None:
if self._delta.total_seconds() <= 0:
raise AirflowTimetableInvalid("schedule interval must be positive")
def _get_next(self, current: DateTime) -> DateTime:
return convert_to_utc(current + self._delta)
def _get_prev(self, current: DateTime) -> DateTime:
return convert_to_utc(current - self._delta)
def _align(self, current: DateTime) -> DateTime:
return current
def _skip_to_latest(self, earliest: Optional[DateTime]) -> DateTime:
"""Bound the earliest time a run can be scheduled.
The logic is that we move start_date up until one period before, so the
current time is AFTER the period end, and the job can be created...
This is slightly different from the cron version at terminal values.
"""
new_start = self._get_prev(DateTime.utcnow())
if earliest is None:
return new_start
return max(new_start, earliest)
def infer_manual_data_interval(self, run_after: DateTime) -> DataInterval:
return DataInterval(start=self._get_prev(run_after), end=run_after)
|
|
from PyQt4 import QtCore, QtGui
import matplotlib.pyplot as plt
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
import networkx as nx
import random
import sys
import NetworkModels as NM
import strutral_controllability as LBSC # short for Liu & Barabasi Structural Controllability
import exact_controllability as EC # short for 'Exact Controllability'
GLOBAL_NETWORK = nx.Graph()
class Dialog_ER(QtGui.QDialog):
def __init__(self, parent=None, name='title'):
super(Dialog_ER, self).__init__(parent)
self.resize(300, 200)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Node Number N:', parent=self), 0, 0, 1, 1)
self.number_of_nodes = QtGui.QLineEdit(parent=self)
grid.addWidget(self.number_of_nodes, 0, 1, 1, 1)
grid.addWidget(QtGui.QLabel('Connection Probability p:', parent=self), 1, 0, 1, 1)
self.connect_probability = QtGui.QLineEdit(parent=self)
grid.addWidget(self.connect_probability, 1, 1, 1, 1)
buttonBox = QtGui.QDialogButtonBox(parent=self)
buttonBox.setOrientation(QtCore.Qt.Horizontal)
buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
buttonBox.accepted.connect(self.accept)
buttonBox.rejected.connect(self.reject)
layout = QtGui.QVBoxLayout()
layout.addLayout(grid)
spacerItem = QtGui.QSpacerItem(10, 14, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
layout.addItem(spacerItem)
layout.addWidget(buttonBox)
self.setLayout(layout)
def NumberofNodes(self):
(node_num, ok) = self.number_of_nodes.text().toInt()
return node_num
def ConnectProbability(self):
(prob, ok) = self.connect_probability.text().toFloat()
return prob
class Dialog_WS(QtGui.QDialog):
def __init__(self, parent=None, name='title'):
super(Dialog_WS, self).__init__(parent)
self.resize(300, 200)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Node Number N:', parent=self), 0, 0, 1, 1)
self.number_of_nodes = QtGui.QLineEdit(parent=self)
grid.addWidget(self.number_of_nodes, 0, 1, 1, 1)
grid.addWidget(QtGui.QLabel('Nearest Neighbors k:', parent=self), 1,0,1,1)
self.number_of_neighbors = QtGui.QLineEdit(parent=self)
grid.addWidget(self.number_of_neighbors, 1, 1, 1,1)
grid.addWidget(QtGui.QLabel('Connection Probability p:', parent=self), 2, 0, 1, 1)
self.connect_probability = QtGui.QLineEdit(parent=self)
grid.addWidget(self.connect_probability, 2, 1, 1, 1)
buttonBox = QtGui.QDialogButtonBox(parent=self)
buttonBox.setOrientation(QtCore.Qt.Horizontal)
buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
buttonBox.accepted.connect(self.accept)
buttonBox.rejected.connect(self.reject)
layout = QtGui.QVBoxLayout()
layout.addLayout(grid)
spacerItem = QtGui.QSpacerItem(10, 14, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
layout.addItem(spacerItem)
layout.addWidget(buttonBox)
self.setLayout(layout)
def NumberofNodes(self):
(node_num, ok) = self.number_of_nodes.text().toInt()
return node_num
def NumberofNeighbors(self):
(k, ok) = self.number_of_neighbors.text().toInt()
return k
def ConnectProbability(self):
(prob, ok) = self.connect_probability.text().toFloat()
return prob
class Dialog_NW(QtGui.QDialog):
def __init__(self, parent=None, name='title'):
super(Dialog_NW, self).__init__(parent)
self.resize(300, 200)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Node Number N:', parent=self), 0, 0, 1, 1)
self.number_of_nodes = QtGui.QLineEdit(parent=self)
grid.addWidget(self.number_of_nodes, 0, 1, 1, 1)
grid.addWidget(QtGui.QLabel('Nearest Neighbors k:', parent=self), 1,0,1,1)
self.number_of_neighbors = QtGui.QLineEdit(parent=self)
grid.addWidget(self.number_of_neighbors, 1, 1, 1,1)
grid.addWidget(QtGui.QLabel('Connection Probability p:', parent=self), 2, 0, 1, 1)
self.connect_probability = QtGui.QLineEdit(parent=self)
grid.addWidget(self.connect_probability, 2, 1, 1, 1)
buttonBox = QtGui.QDialogButtonBox(parent=self)
buttonBox.setOrientation(QtCore.Qt.Horizontal)
buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
buttonBox.accepted.connect(self.accept)
buttonBox.rejected.connect(self.reject)
layout = QtGui.QVBoxLayout()
layout.addLayout(grid)
spacerItem = QtGui.QSpacerItem(10, 14, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
layout.addItem(spacerItem)
layout.addWidget(buttonBox)
self.setLayout(layout)
def NumberofNodes(self):
(node_num, ok) = self.number_of_nodes.text().toInt()
return node_num
def NumberofNeighbors(self):
(k, ok) = self.number_of_neighbors.text().toInt()
return k
def ConnectProbability(self):
(prob, ok) = self.connect_probability.text().toFloat()
return prob
class Dialog_BA(QtGui.QDialog):
def __init__(self, parent=None, name='title'):
super(Dialog_BA, self).__init__(parent)
self.resize(300, 200)
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel('Node Number N:', parent=self), 0, 0, 1, 1)
self.number_of_nodes = QtGui.QLineEdit(parent=self)
grid.addWidget(self.number_of_nodes, 0, 1, 1, 1)
grid.addWidget(QtGui.QLabel('Added Nodes m (m0=m) :', parent=self), 1, 0, 1, 1)
self.added_nodes_num = QtGui.QLineEdit(parent=self)
grid.addWidget(self.added_nodes_num, 1, 1, 1, 1)
buttonBox = QtGui.QDialogButtonBox(parent=self)
buttonBox.setOrientation(QtCore.Qt.Horizontal)
buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
buttonBox.accepted.connect(self.accept)
buttonBox.rejected.connect(self.reject)
layout = QtGui.QVBoxLayout()
layout.addLayout(grid)
spacerItem = QtGui.QSpacerItem(10, 14, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
layout.addItem(spacerItem)
layout.addWidget(buttonBox)
self.setLayout(layout)
def NumberofNodes(self):
(node_num, ok) = self.number_of_nodes.text().toInt()
return node_num
def NumberofAddedNodes(self):
(m, ok) = self.added_nodes_num.text().toInt()
return m
class Dialog_CentralityDisplayResult(QtGui.QDialog):
def __init__(self, parent=None, name='title'):
super(Dialog_CentralityDisplayResult, self).__init__(parent)
self.resize(400, 500)
grid = QtGui.QGridLayout()
self.edit = QtGui.QTextEdit(self)
self.buttonBox = QtGui.QDialogButtonBox(parent=self)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
btn = QtGui.QPushButton('Plot')
self.buttonBox.addButton(btn, QtGui.QDialogButtonBox.ActionRole)
QtCore.QObject.connect(btn, QtCore.SIGNAL("clicked()"), self.plot_function)
self.column1 = []
self.column2 = []
grid.addWidget(self.edit, 0, 0, 1, 1)
grid.addWidget(self.buttonBox, 1, 0, 1, 1)
layout = QtGui.QVBoxLayout()
layout.addLayout(grid)
self.setLayout(layout)
def add_contents(self, label1, label2, data_col1, data_col2):
self.edit.append(label1+'\t'+label2)
n = len(data_col1)
for i in range(n):
self.edit.append("%s\t%f"%(data_col1[i], data_col2[i]))
def plot_function(self):
plt.plot(self.column1, self.column2, '-bo')
plt.show()
class Dialog_EdgeBetCentralityDisplayResult(QtGui.QDialog):
def __init__(self, parent=None, name='title'):
super(Dialog_EdgeBetCentralityDisplayResult, self).__init__(parent)
self.resize(400, 500)
grid = QtGui.QGridLayout()
self.edit = QtGui.QTextEdit(self)
self.buttonBox = QtGui.QDialogButtonBox(parent=self)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
btn = QtGui.QPushButton('Plot')
self.buttonBox.addButton(btn, QtGui.QDialogButtonBox.ActionRole)
QtCore.QObject.connect(btn, QtCore.SIGNAL("clicked()"), self.plot_function)
self.column1 = []
self.column2 = []
grid.addWidget(self.edit, 0, 0, 1, 1)
grid.addWidget(self.buttonBox, 1, 0, 1, 1)
layout = QtGui.QVBoxLayout()
layout.addLayout(grid)
self.setLayout(layout)
def add_contents(self, label1, label2, data_col1, data_col2):
self.edit.append(label1+'\t'+label2)
n = len(data_col1)
for i in range(n):
self.edit.append("%s-%s\t%f"%(data_col1[i][0], data_col1[i][1], data_col2[i]))
def plot_function(self):
sz = len(self.column2)
x = range(sz)
plt.plot(x, self.column2, '-bo')
plt.show()
class Dialog_DriverNodes(QtGui.QDialog):
def __init__(self, parent=None):
super(Dialog_DriverNodes, self).__init__(parent)
self.resize(400, 500)
self.drivers = [] # driver nodes
self.number_of_drivers = None # number of driver nodes
self.density_of_drivers = None # density of driver nodes (N_D/N)
grid = QtGui.QGridLayout()
self.edit = QtGui.QTextEdit(self)
self.buttonBox = QtGui.QDialogButtonBox(parent=self)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
self.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
grid.addWidget(self.edit, 0, 0, 1, 1)
grid.addWidget(self.buttonBox, 1, 0, 1, 1)
layout = QtGui.QVBoxLayout()
layout.addLayout(grid)
self.setLayout(layout)
def add_contents(self, driverNodes):
self.drivers = driverNodes
self.number_of_drivers = len(driverNodes)
self.edit.append('%d driver nodes as following:'%self.number_of_drivers)
for x in self.drivers:
self.edit.append('%s'%x)
class MyMplCanvas(FigureCanvas):
"""Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.)."""
def __init__(self, parent=None, width=5, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = fig.add_subplot(111)
self.axes.hold(False)
self.compute_initial_figure()
FigureCanvas.__init__(self, fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def compute_initial_figure(self):
pass
class MyStaticMplCanvas(MyMplCanvas):
def compute_initial_figure(self):
G=nx.path_graph(10)
pos=nx.spring_layout(G)
nx.draw(G,pos,ax=self.axes)
class MyDynamicMplCanvas(MyMplCanvas):
"""A canvas that updates itself every second with a new plot."""
def __init__(self, *args, **kwargs):
MyMplCanvas.__init__(self, *args, **kwargs)
#timer = QtCore.QTimer(self)
#timer.timeout.connect(self.update_figure)
#timer.start(1000)
#self.update_figure()
#self.draw_network()
def compute_initial_figure(self):
#self.axes.plot([0, 1, 2, 3], [1, 2, 0, 4], 'r')
pass
def update_figure(self):
# Build a list of 4 random integers between 0 and 10 (both inclusive)
l = [random.randint(0, 10) for i in range(4)]
self.axes.plot([0, 1, 2, 3], l, 'r')
self.draw()
def draw_network(self, G, pos):
nx.draw(G,pos,ax=self.axes, with_labels=True, font_color='w')
#nx.draw_networkx_labels(G, pos, ax=self.axes)
self.draw()
class MyCentralWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(MyCentralWidget,self).__init__(parent)
self.sc = MyDynamicMplCanvas(self, width=5, height=4, dpi=100)
self.toolbar = NavigationToolbar(self.sc, self)
layout = QtGui.QVBoxLayout(self)
layout.addWidget(self.toolbar)
layout.addWidget(self.sc)
self.setLayout(layout)
def update_centralWidget(self, G, pos):
self.sc.draw_network(G, pos)
class MainWindow(QtGui.QMainWindow):
def __init__(self):
super(MainWindow,self).__init__()
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.setWindowTitle('Network Controllability Analysis Software')
self.resize(1000, 800)
#######################################################################################
# file menu
#######################################################################################
self.file_menu = QtGui.QMenu('&File', self)
self.file_menu_open_submenu = QtGui.QMenu("Open")
self.file_menu_save_submenu = QtGui.QMenu("Save Net As")
self.file_menu.addMenu(self.file_menu_open_submenu)
self.file_menu.addMenu(self.file_menu_save_submenu)
self.file_menu.addAction("Quit", self.file_menu_quit_action)
self.file_menu_open_submenu.addAction("Pajek File (.net)", self.file_menu_open_pajeknetwork_action)
self.file_menu_open_submenu.addAction("Graphviz File (.dot)", self.file_menu_open_dotNetwork_action)
self.file_menu_save_submenu.addAction("Pajek Net (.net)", self.file_menu_save_pajeknetwork_action)
self.file_menu_save_submenu.addAction("Graphviz File (.dot)", self.file_menu_save_dotNetwork_action)
self.menuBar().addMenu(self.file_menu)
#######################################################################################
# network menu
#######################################################################################
self.network_menu = QtGui.QMenu('&Networks', self)
# regular network
self.network_menu.addAction('Complete Network', self.network_menu_complete_graph_action)
# sub-random-network menu
self.network_menu_random_network_menu = QtGui.QMenu("Random Networks", self.network_menu)
self.network_menu.addMenu(self.network_menu_random_network_menu)
self.network_menu_random_network_menu.addAction('ER Network', self.network_menu_ERNetwork_action)
self.network_menu_random_network_menu.addAction('Directed ER Network', self.directed_network_menu_ERNetwork_action)
# sub-small-world menu
self.network_menu_smallworld_menu = QtGui.QMenu("Small World Networks", self.network_menu)
self.network_menu.addMenu(self.network_menu_smallworld_menu)
self.network_menu_smallworld_menu.addAction('WS Small World', self.network_menu_WSNetwork_action)
self.network_menu_smallworld_menu.addAction('Directed WS Small World', self.network_menu_directed_WSNetwork_action)
self.network_menu_smallworld_menu.addAction('NW Small World', self.network_menu_NWNetwork_action)
self.network_menu_smallworld_menu.addAction('Directed NW Small World', self.network_menu_directed_NWNetwork_action)
# sub-scale-free menu
self.network_menu_scale_free_menu = QtGui.QMenu('Scale-Free Networks', self.network_menu)
self.network_menu.addMenu(self.network_menu_scale_free_menu)
self.network_menu_scale_free_menu.addAction('BA Scale Free Network', self.network_menu_BANetwork_action)
self.network_menu_scale_free_menu.addAction('Directed BA Scale Free Network',self.network_menu_directed_BANetwork_action)
self.network_menu_scale_free_menu.addAction('Parametric Scale Free Network', self.network_menu_SFNetwork_action)
# sub-real-network menu
self.network_menu_real_network_menu = QtGui.QMenu("Real Networks", self.network_menu)
self.network_menu.addMenu(self.network_menu_real_network_menu)
self.network_menu_real_network_menu.addAction('Karate Club Network', self.network_menu_karate_club_network_action)
self.menuBar().addMenu(self.network_menu)
###############################################################################################
# Features menu
###############################################################################################
self.feature_menu = QtGui.QMenu("Features", self)
self.feature_menu.addAction("Degree Distribution", self.feature_menu_degree_action)
self.feature_menu.addAction("Clustering Coefficients", self.feature_menu_clusteringcoefficient_action)
self.feature_menu.addAction("Diameter", self.feature_menu_diameter_action)
self.menuBar().addMenu(self.feature_menu)
###############################################################################################
# centrality menu
###############################################################################################
self.centrality_menu = QtGui.QMenu('&Centrality', self)
self.centrality_menu.addAction('Degree Centrality', self.centrality_menu_NodeDegree_action)
self.centrality_menu.addAction('Betweenness Centrality', self.centrality_menu_NodeBetweenness_action)
self.centrality_menu.addAction('Edge Betweenness Centrality', self.centrality_menu_EdgeBetweenness_action)
self.centrality_menu.addAction('Closeness Centrality', self.centrality_menu_ClosenessBetweenness_action)
self.centrality_menu.addAction('Eigenvector Centrality', self.centrality_menu_EigenvectorBetweenness_action)
self.centrality_menu.addSeparator()
self.centrality_menu.addAction('Current-flow Betweenness Centrality', self.centrality_menu_CurrentFlowBetweennessCentrality_action)
self.centrality_menu.addAction('Current-flow Closeness Centrality', self.centrality_menu_CurrentFlowClosenessCentrality_action)
self.centrality_menu.addSeparator()
self.centrality_menu.addAction('Katz Centrality', self.centrality_menu_KatzCentrality_action)
self.centrality_menu.addSeparator()
self.centrality_menu.addAction('Load Centrality', self.centrality_menu_LoadCentrality_action)
self.menuBar().addMenu(self.centrality_menu)
##############################################################################################
# controllability menu
##############################################################################################
self.controllability_menu = QtGui.QMenu('&Controllability', self)
self.controllability_menu.addAction('Structral Controllability', self.controllability_menu_StructralControllability_action)
self.controllability_menu.addSeparator()
self.controllability_menu.addAction('Exact Controllability', self.controllability_menu_ExactControllability_action)
self.controllability_menu.addAction('Number of Drivers', self.controllability_menu_NumberOfDrivers_action)
self.controllability_menu.addAction('Number of Drivers (Faster Algo.)', self.controllability_menu_FasterExactControllability_action)
self.menuBar().addMenu(self.controllability_menu)
###############################################################################################
# Robustness menu
###############################################################################################
self.robustness_menu = QtGui.QMenu('&Robustness', self)
self.robustness_menu.addAction('Random Attack', self.robustness_menu_RondomAttack_action)
self.robustness_menu.addAction('Recalculated Max-Degree Attack', self.robustness_menu_RecaculatedMaxDegree_action)
self.robustness_menu.addAction('Recalculated Max-Betweenness Attack', self.robustness_menu_RecaculatedMaxBetweenness_action)
self.robustness_menu.addAction('Cascaded Attack based on Node-Capacity', self.robustness_menu_CascadeBasedonNodeCapacity_action)
self.menuBar().addMenu(self.robustness_menu)
##############################################################################################
# Draw Menu
##############################################################################################
self.draw_menu = QtGui.QMenu("&Draw", self)
self.draw_menu_layout_submenu = QtGui.QMenu("Layouts", self.draw_menu)
self.draw_menu.addMenu(self.draw_menu_layout_submenu)
self.draw_menu_layout_submenu.addAction("Circle Layout", self.draw_menu_circleLayout_action)
self.draw_menu_layout_submenu.addAction("Random Layout", self.draw_menu_randomLayout_action)
self.draw_menu_layout_submenu.addAction("Shell Layout", self.draw_menu_shellLayout_action)
self.draw_menu_layout_submenu.addAction("Spring Layout", self.draw_menu_springLayout_action)
self.draw_menu_layout_submenu.addAction("Spectral Layout", self.draw_menu_spectralLayout_action)
self.menuBar().addMenu(self.draw_menu)
# about menu
self.about_menu = QtGui.QMenu('&About', self)
self.about_menu.addAction('Network Info', self.about_menu_aboutNetwork_action)
self.about_menu.addAction('About', self.about_menu_About_action)
self.menuBar().addMenu(self.about_menu)
# status bar
#self.statusbar = QtGui.QStatusBar(self)
self.statusBar().showMessage(nx.info(GLOBAL_NETWORK))
# central Widget
self.main_widget = MyCentralWidget(self)
self.main_widget.setFocus()
self.setCentralWidget(self.main_widget)
G=nx.path_graph(10)
pos=nx.spring_layout(G)
self.main_widget.update_centralWidget(G, pos)
##################################################################
#
# File Menu Actions
#
##################################################################
# open pajek
def file_menu_open_pajeknetwork_action(self):
filename = QtGui.QFileDialog.getOpenFileName(self, "Open Pajek (.net) file", "./Nets", "Pajek Files (*.net)")
filename = str(filename.toUtf8())
if filename:
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = nx.read_pajek(filename)
if GLOBAL_NETWORK.is_multigraph():
if GLOBAL_NETWORK.is_directed():
GLOBAL_NETWORK = nx.DiGraph(GLOBAL_NETWORK)
else:
GLOBAL_NETWORK = nx.Graph(GLOBAL_NETWORK)
pos = nx.layout.circular_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
else:
return
# save pajek
def file_menu_save_pajeknetwork_action(self):
# valid check, empty network no needed to be saved
global GLOBAL_NETWORK
if (not GLOBAL_NETWORK.nodes()) and (not GLOBAL_NETWORK.edges()):
QtGui.QMessageBox.warning(self, "Warning", "There is no Networks to Save !")
return
# save files
filename = QtGui.QFileDialog.getSaveFileName(self, "Save as...", "./Nets", "Pajek Files (*.net)")
filename = str(filename.toUtf8())
if filename:
nx.write_pajek(GLOBAL_NETWORK, filename)
QtGui.QMessageBox.information(self, "title", "Save Net Files Successfully !")
else:
pass
# open graphviz net
def file_menu_open_dotNetwork_action(self):
QtGui.QMessageBox.information(self, "Info", "Developing..., will come back soon")
# save graphviz net
def file_menu_save_dotNetwork_action(self):
# valid check, empty network no needed to be saved
global GLOBAL_NETWORK
if (not GLOBAL_NETWORK.nodes()) and (not GLOBAL_NETWORK.edges()):
QtGui.QMessageBox.warning(self, "Warning", "There is no Networks to Save !")
return
# save files
filename = QtGui.QFileDialog.getSaveFileName(self, "Save as...", "./Nets", "Graphviz Files (*.dot)")
filename = str(filename.toUtf8())
if filename:
nx.write_pajek(GLOBAL_NETWORK, filename)
QtGui.QMessageBox.information(self, "title", "Save Net Files Successfully !")
else:
pass
def file_menu_quit_action(self):
QtGui.qApp.exit()
##################################################################
#
# Network Models Actions
#
##################################################################
def network_menu_complete_graph_action(self):
text, ok = QtGui.QInputDialog.getText(self, 'Input The Parameter', 'Node Num N:')
if ok:
n = int(text)
if n <= 0 or n >= 1000:
QtGui.QMessageBox.critical(self, "ERROR", "N must be an integer in (0, 1000)")
return
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = nx.complete_graph(n)
pos = nx.layout.circular_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def network_menu_ERNetwork_action(self):
dialog = Dialog_ER(self)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
n = dialog.NumberofNodes()
p = dialog.ConnectProbability()
if n <= 0 or n >= 1000:
QtGui.QMessageBox.critical(self, "ERROR", "N must be an integer in (0, 1000)")
return
if p <= 0.0 or p >= 1.0:
QtGui.QMessageBox.critical(self, "ERROR", "p must be a float number in (0.0, 1.0)")
return
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = nx.erdos_renyi_graph(n, p)
pos = nx.spring_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def directed_network_menu_ERNetwork_action(self):
dialog = Dialog_ER(self)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
n = dialog.NumberofNodes()
p = dialog.ConnectProbability()
if n <= 0 or n >= 1000:
QtGui.QMessageBox.critical(self, "ERROR", "N must be an integer in (0, 1000)")
return
if p <= 0.0 or p >= 1.0:
QtGui.QMessageBox.critical(self, "ERROR", "p must be a float number in (0.0, 1.0)")
return
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = nx.erdos_renyi_graph(n, p, directed=True)
pos = nx.spectral_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def network_menu_WSNetwork_action(self):
dialog = Dialog_WS(self)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
n = dialog.NumberofNodes()
k = dialog.NumberofNeighbors()
p = dialog.ConnectProbability()
if n <= 0 or n >= 1000:
QtGui.QMessageBox.critical(self, "ERROR", "N must be an integer in (0, 1000)")
return
if k % 2 == 1:
QtGui.QMessageBox.critical(self, "ERROR", "k must be an even number & k < n")
return
if p <= 0.0 or p >= 1.0:
QtGui.QMessageBox.critical(self, "ERROR", "p must be a float number in (0.0, 1.0)")
return
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = nx.watts_strogatz_graph(n, k, p)
pos = nx.layout.circular_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def network_menu_directed_WSNetwork_action(self):
dialog = Dialog_WS(self)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
n = dialog.NumberofNodes()
k = dialog.NumberofNeighbors()
p = dialog.ConnectProbability()
if n <= 0 or n >= 1000:
QtGui.QMessageBox.critical(self, "ERROR", "N must be an integer in (0, 1000)")
return
if k % 2 == 1:
QtGui.QMessageBox.critical(self, "ERROR", "k must be an even number & k < n")
return
if p <= 0.0 or p >= 1.0:
QtGui.QMessageBox.critical(self, "ERROR", "p must be a float number in (0.0, 1.0)")
return
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = NM.directed_watts_strogatz_graph(n,k,p)
pos = nx.layout.circular_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def network_menu_NWNetwork_action(self):
dialog = Dialog_NW(self)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
n = dialog.NumberofNodes()
k = dialog.NumberofNeighbors()
p = dialog.ConnectProbability()
if n <= 0 or n >= 1000:
QtGui.QMessageBox.critical(self, "ERROR", "N must be an integer in (0, 1000)")
return
if k % 2 == 1:
QtGui.QMessageBox.critical(self, "ERROR", "k must be an even number & k < n")
return
if p <= 0.0 or p >= 1.0:
QtGui.QMessageBox.critical(self, "ERROR", "p must be a float number in (0.0, 1.0)")
return
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = nx.newman_watts_strogatz_graph(n, k, p)
pos = nx.layout.circular_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def network_menu_directed_NWNetwork_action(self):
dialog = Dialog_NW(self)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
n = dialog.NumberofNodes()
k = dialog.NumberofNeighbors()
p = dialog.ConnectProbability()
if n <= 0 or n >= 1000:
QtGui.QMessageBox.critical(self, "ERROR", "N must be an integer in (0, 1000)")
return
if k % 2 == 1:
QtGui.QMessageBox.critical(self, "ERROR", "k must be an even number & k < n")
return
if p <= 0.0 or p >= 1.0:
QtGui.QMessageBox.critical(self, "ERROR", "p must be a float number in (0.0, 1.0)")
return
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = NM.directed_newman_watts_strogatz_graph(n,k,p)
pos = nx.layout.circular_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def network_menu_BANetwork_action(self):
dialog = Dialog_BA(self)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
n = dialog.NumberofNodes()
m = dialog.NumberofAddedNodes()
if n <= 0 or n >= 1000:
QtGui.QMessageBox.critical(self, "ERROR", "N must be an integer in (0, 1000)")
return
if m > n:
QtGui.QMessageBox.critical(self, "ERROR", "added nodes must has m < n")
return
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = nx.barabasi_albert_graph(n, m)
pos = nx.layout.spring_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def network_menu_directed_BANetwork_action(self):
dialog = Dialog_BA(self)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
n = dialog.NumberofNodes()
m = dialog.NumberofAddedNodes()
if n <= 0 or n >= 1000:
QtGui.QMessageBox.critical(self, "ERROR", "N must be an integer in (0, 1000)")
return
if m > n:
QtGui.QMessageBox.critical(self, "ERROR", "added nodes must has m < n")
return
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = NM.directed_barabasi_albert_graph(n, m)
pos = nx.layout.spring_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def network_menu_SFNetwork_action(self):
pass
def network_menu_karate_club_network_action(self):
global GLOBAL_NETWORK
GLOBAL_NETWORK.clear()
GLOBAL_NETWORK = nx.karate_club_graph()
pos = nx.layout.spring_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
##############################################################################
#
# Features (degree & degree distribution, betweenness, closeness, eigenvector)
#
###############################################################################
def feature_menu_degree_action(self):
dialog = Dialog_CentralityDisplayResult(self)
dialog.setWindowTitle('degree distribution')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
col1 = []
col2 = []
global GLOBAL_NETWORK
for x, y in nx.degree(GLOBAL_NETWORK).iteritems():
col1.append(x)
col2.append(y)
dialog.column1 = col1
dialog.column2 = col2
dialog.add_contents('NodeID', 'Degree', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/features/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'NodeId Degree'
for i in range(len(col1)):
print >> fp, '%s %d'%(col1[i], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def feature_menu_clusteringcoefficient_action(self):
dialog = Dialog_CentralityDisplayResult(self)
dialog.setWindowTitle('clustering coefficient distribution')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
col1 = []
col2 = []
global GLOBAL_NETWORK
for x, y in nx.clustering(GLOBAL_NETWORK).iteritems():
col1.append(x)
col2.append(y)
dialog.column1 = col1
dialog.column2 = col2
dialog.add_contents('NodeID', 'Clustering Coefficient', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/features/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'NodeId ClusteringCoefficient'
for i in range(len(col1)):
print >> fp, '%s %f'%(col1[i], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def feature_menu_diameter_action(self):
global GLOBAL_NETWORK
d = nx.diameter(GLOBAL_NETWORK)
QtGui.QMessageBox.about(self, "Prompt", "The Network Diameter is %d" %(d))
##################################################################
#
# Centrality (degree, betweenness, closeness, eigenvector) Actions
#
##################################################################
def centrality_menu_NodeDegree_action(self):
dialog = Dialog_CentralityDisplayResult(self)
dialog.setWindowTitle('node degree centrality')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
col1 = []
col2 = []
global GLOBAL_NETWORK
for x, y in nx.degree_centrality(GLOBAL_NETWORK).iteritems():
col1.append(x)
col2.append(y)
dialog.column1 = col1
dialog.column2 = col2
dialog.add_contents('NodeID', 'Degree Centrality', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/centrality/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'NodeId DegreeCentrality'
for i in range(len(col1)):
print >> fp, '%s %f'%(col1[i], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def centrality_menu_NodeBetweenness_action(self):
dialog = Dialog_CentralityDisplayResult(self)
dialog.setWindowTitle('node betweenness centrality')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
col1 = []
col2 = []
global GLOBAL_NETWORK
for x, y in nx.betweenness_centrality(GLOBAL_NETWORK).iteritems():
col1.append(x)
col2.append(y)
dialog.column1 = col1
dialog.column2 = col2
dialog.add_contents('NodeID', 'Betweenness Centrality', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/centrality/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'NodeId BetweennessCentrality'
for i in range(len(col1)):
print >> fp, '%s %f'%(col1[i], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def centrality_menu_EdgeBetweenness_action(self):
dialog = Dialog_EdgeBetCentralityDisplayResult(self)
dialog.setWindowTitle('edge betweenness centrality')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
col1 = []
col2 = []
global GLOBAL_NETWORK
for (u, v), y in nx.edge_betweenness_centrality(GLOBAL_NETWORK).iteritems():
col1.append((u, v))
col2.append(y)
dialog.column1 = col1
dialog.column2 = col2
dialog.add_contents('edge', 'edge betweenness centrality', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/centrality/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'Edge EdgeBetweennessCentrality'
for i in range(len(col1)):
print >> fp, '%s-%s %f'%(col1[i][0],col1[i][1], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def centrality_menu_ClosenessBetweenness_action(self):
dialog = Dialog_CentralityDisplayResult(self)
dialog.setWindowTitle('node closeness centrality')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
col1 = []
col2 = []
dialog.column1 = col1
dialog.column2 = col2
global GLOBAL_NETWORK
for x, y in nx.closeness_centrality(GLOBAL_NETWORK).iteritems():
col1.append(x)
col2.append(y)
dialog.add_contents('NodeID', 'Closeness Centrality', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/centrality/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'NodeId ClosenessCentrality'
for i in range(len(col1)):
print >> fp, '%s %f'%(col1[i], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def centrality_menu_EigenvectorBetweenness_action(self):
dialog = Dialog_CentralityDisplayResult(self)
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
dialog.setWindowTitle('node eigenvector centrality')
col1 = []
col2 = []
global GLOBAL_NETWORK
for x, y in nx.eigenvector_centrality(GLOBAL_NETWORK).iteritems():
col1.append(x)
col2.append(y)
dialog.column1 = col1
dialog.column2 = col2
dialog.add_contents('NodeID', 'Eigenvector Centrality', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/centrality/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'NodeId EigenvectorCentrality'
for i in range(len(col1)):
print >> fp, '%s %f'%(col1[i], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def centrality_menu_CurrentFlowBetweennessCentrality_action(self):
dialog = Dialog_CentralityDisplayResult(self)
dialog.setWindowTitle('node current-flow betweenness centrality')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
col1 = []
col2 = []
dialog.column1 = col1
dialog.column2 = col2
global GLOBAL_NETWORK
for x, y in nx.current_flow_betweenness_centrality(GLOBAL_NETWORK).iteritems():
col1.append(x)
col2.append(y)
dialog.add_contents('NodeID', 'Current-flow Betweenness Centrality', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/centrality/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'NodeId CurrentFlowBetweennessCentrality'
for i in range(len(col1)):
print >> fp, '%s %f'%(col1[i], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def centrality_menu_CurrentFlowClosenessCentrality_action(self):
dialog = Dialog_CentralityDisplayResult(self)
dialog.setWindowTitle('node current-flow closeness centrality')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
col1 = []
col2 = []
dialog.column1 = col1
dialog.column2 = col2
global GLOBAL_NETWORK
for x, y in nx.current_flow_closeness_centrality(GLOBAL_NETWORK).iteritems():
col1.append(x)
col2.append(y)
dialog.add_contents('NodeID', 'Current-flow Closeness Centrality', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/centrality/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'NodeId CurrentFlowClosenessCentrality'
for i in range(len(col1)):
print >> fp, '%s %f'%(col1[i], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def centrality_menu_KatzCentrality_action(self):
dialog = Dialog_CentralityDisplayResult(self)
dialog.setWindowTitle('katz centrality')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
col1 = []
col2 = []
dialog.column1 = col1
dialog.column2 = col2
global GLOBAL_NETWORK
for x, y in nx.katz.katz_centrality(GLOBAL_NETWORK).iteritems():
col1.append(x)
col2.append(y)
dialog.add_contents('NodeID', 'Katz Centrality', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/centrality/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'NodeId KatzCentrality'
for i in range(len(col1)):
print >> fp, '%s %f'%(col1[i], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def centrality_menu_LoadCentrality_action(self):
dialog = Dialog_CentralityDisplayResult(self)
dialog.setWindowTitle('load centrality')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText('Save')
dialog.buttonBox.button(QtGui.QDialogButtonBox.Cancel).setText('Close')
col1 = []
col2 = []
dialog.column1 = col1
dialog.column2 = col2
global GLOBAL_NETWORK
for x, y in nx.load_centrality(GLOBAL_NETWORK).iteritems():
col1.append(x)
col2.append(y)
dialog.add_contents('NodeID', 'Load Centrality', col1, col2)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/centrality/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, 'NodeId LoadCentrality'
for i in range(len(col1)):
print >> fp, '%s %f'%(col1[i], col2[i])
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def controllability_menu_StructralControllability_action(self):
global GLOBAL_NETWORK
if not GLOBAL_NETWORK:
QtGui.QMessageBox.critical(None, 'Error', 'No Network found!!!\nGenerate/open a new network !!!')
return
if not nx.is_directed(GLOBAL_NETWORK):
QtGui.QMessageBox.warning(None, 'Exception', 'Only works on <i><b>directed</b></i> networks !\nPlease change a directed network')
return
driverNodes = LBSC.control_nodes(GLOBAL_NETWORK)
dialog = Dialog_DriverNodes(self)
dialog.setWindowTitle('driver nodes list')
dialog.add_contents(driverNodes)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/driver_nodes/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, '%d driver nodes'%dialog.number_of_drivers
for x in driverNodes:
print >> fp, '%s'%(x)
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def controllability_menu_ExactControllability_action(self):
global GLOBAL_NETWORK
if not GLOBAL_NETWORK:
QtGui.QMessageBox.critical(None, 'Error', 'No Network found!!!\nGenerate/open a new network !!!')
return
(nDrivers, DriverNodes) = EC.get_driver_nodes(GLOBAL_NETWORK)
dialog = Dialog_DriverNodes(self)
dialog.setWindowTitle('driver nodes list')
dialog.add_contents(DriverNodes)
result = dialog.exec_()
if result == QtGui.QDialog.Accepted:
fname = QtGui.QFileDialog.getSaveFileName(self, 'Save file to', './results/driver_nodes/', "Text Files (*.txt)")
if fname:
with open(fname, 'w') as fp:
print >> fp, '%d driver nodes'%dialog.number_of_drivers
for x in driverNodes:
print >> fp, '%s'%(x)
QtGui.QMessageBox.information(self, 'Message', 'Save Successfully !')
else:
pass
def controllability_menu_NumberOfDrivers_action(self):
global GLOBAL_NETWORK
if not GLOBAL_NETWORK:
QtGui.QMessageBox.critical(None, 'Error', 'No Network found!!!\nGenerate/open a new network !!!')
return
ND, ND_labmda = EC.get_number_of_driver_nodes(GLOBAL_NETWORK)
QtGui.QMessageBox.about(self, 'Results', 'Number of driver nodes: %d'%ND)
def controllability_menu_FasterExactControllability_action(self):
global GLOBAL_NETWORK
if not GLOBAL_NETWORK:
QtGui.QMessageBox.critical(None, 'Error', 'No Network found!!!\nGenerate/open a new network !!!')
return
ND = EC.get_number_of_drivers_fast_rank(GLOBAL_NETWORK)
QtGui.QMessageBox.about(self, 'Results', 'Number of driver nodes: %d'%ND)
def robustness_menu_RondomAttack_action(self):
pass
def robustness_menu_RecaculatedMaxDegree_action(self):
pass
def robustness_menu_RecaculatedMaxBetweenness_action(self):
pass
def robustness_menu_CascadeBasedonNodeCapacity_action(self):
pass
##################################################################
#
# Draw Actions (Layout)
#
##################################################################
def draw_menu_circleLayout_action(self):
global GLOBAL_NETWORK
pos = nx.layout.circular_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def draw_menu_randomLayout_action(self):
global GLOBAL_NETWORK
pos = nx.layout.random_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def draw_menu_shellLayout_action(self):
global GLOBAL_NETWORK
pos = nx.layout.shell_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def draw_menu_springLayout_action(self):
global GLOBAL_NETWORK
pos = nx.layout.spring_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def draw_menu_spectralLayout_action(self):
global GLOBAL_NETWORK
pos = nx.layout.spectral_layout(GLOBAL_NETWORK)
self.main_widget.update_centralWidget(GLOBAL_NETWORK, pos)
def about_menu_About_action(self):
QtGui.QMessageBox.about(self, "About",
"""
Network Controllability Analysis Software
Copyright (C) 2015 Xin-Feng Li (silfer.lee@gmail.com)
This program is distributed under BSD License
""")
def about_menu_aboutNetwork_action(self):
global GLOBAL_NETWORK
s = nx.info(GLOBAL_NETWORK)
QtGui.QMessageBox.about(self, "Basic Network Info", s)
if __name__ == "__main__":
qApp = QtGui.QApplication(sys.argv)
w = MainWindow()
w.show()
sys.exit(qApp.exec_())
|
|
# Copyright (C) 2014 Taylor Turpen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from boto.mturk.connection import MTurkConnection, MTurkRequestError
from boto.mturk.question import ExternalQuestion, QuestionForm, Overview, HTMLQuestion, QuestionContent, FormattedContent, FreeTextAnswer, AnswerSpecification, Question, Flash, SelectionAnswer
from boto.mturk import qualification
from apps.mturk.exceptions import IncorrectTextFieldCount
from django.template import RequestContext, loader, Context
from django.conf import settings
from datetime import timedelta
import os
import logging
import pdb
class AssignmentHandler():
def __init__(self,connection):
self.conn = connection
self.logger = logging.getLogger("transcription_engine.mechanicalturk_handler")
def approve_assignment(self,assignment_id,feedback=None):
self.conn.approve_assignment(assignment_id, feedback)
def reject_assignment(self,assignment_id,feedback=None):
return self.conn.approve_assignment(assignment_id,feedback)
def get_assignment(self,assignment_id,response_groups=None):
return self.conn.get_assignment(assignment_id, response_groups)
def get_submitted_transcriptions(self,hit_id,audio_clip_id):
"""Given the hit_id and the audio clip id, find all transcriptions
in the submitted assignments"""
allassignments = self.conn.get_assignments(hit_id)
response = []
for assignment in allassignments:
for result_set in assignment.answers:
for question_form_answer in result_set:
if question_form_answer.qid == audio_clip_id:
if len(question_form_answer.fields) != 1:
raise IncorrectTextFieldCount
response.append(question_form_answer.fields[0])
self.logger.info("Retrieved transcription(%s) for audio clip(%s)"%(response,audio_clip_id))
return response
def get_all_submitted_transcriptions(self,hit_id):
"""Given the hit_id find all transcriptions
in the submitted assignments."""
allassignments = self.conn.get_assignments(hit_id)
response = []
assignment_ids = []
for assignment in allassignments:
assignment_ids.append(assignment.AssignmentId)
response.extend(self.get_assignment_submitted_transcriptions(assignment))
return response
def get_assignment_submitted_transcriptions(self,assignment):
"""Given the assignment return all the transcriptions."""
response = []
for result_set in assignment.answers:
for question_form_answer in result_set:
if len(question_form_answer.fields) != 1:
raise IncorrectTextFieldCount
response.append({"audio_clip_id": question_form_answer.qid,
"assignment_id": assignment.AssignmentId,
"transcription": question_form_answer.fields[0],
"worker_id" : assignment.WorkerId,
})
self.logger.info("Retrieved transcriptions for assignment(%s)"%(assignment))
return response
def get_assignment_submitted_text_dict(self,assignment,id_tag,text_tag):
"""Given the assignment return all the text_tags."""
response = []
for result_set in assignment.answers:
for question_form_answer in result_set:
if len(question_form_answer.fields) != 1:
raise IncorrectTextFieldCount
response.append({id_tag: question_form_answer.qid,
"assignment_id" : assignment.AssignmentId,
text_tag : question_form_answer.fields[0],
"worker_id" : assignment.WorkerId,
})
self.logger.info("Retrieved %s for assignment(%s)"%(text_tag,assignment))
return response
class TurkerHandler():
def __init__(self,connection):
self.conn = connection
def block_worker(self,worker_id,reason):
self.conn.block_worker(worker_id, reason)
def un_block_worker(self,worker_id,reason):
self.conn.un_block_worker(worker_id, reason)
class HitHandler():
DEFAULT_DURATION = 60*10
DEFAULT_REWARD = 0.02
DEFAULT_MAX_ASSIGNMENTS = 3
def __init__(self,connection):
self.vocaroo_url = "https://vocaroo.com/?minimal"
self.conn = connection
self.templates = {}
self.html_tags = {"audio_url" : "${audiourl}",
"title" : "${title}",
"description" : "${description}",
"audioclip_id" : "${audioclipid}",
"prompt" : "${prompt}",
"underscored_prompt": "${underscored_prompt}",
"prompt_id": "${prompt_id}",
"disable_script" : "${disable_script}",
"audio_id" : "${audio_id}",
"flash_url": "${flash_url}"}
# #Transcription html templates
# self.transcription_head = open(os.path.join(template_dir,"transcriptionhead.html")).read()
# self.transcription_tail = open(os.path.join(template_dir,"transcriptiontail.html")).read()
# self.transcription_question = open(os.path.join(template_dir,"transcriptionquestion.html")).read()
#
# #Elicitation html templates
# self.elicitation_head = open(os.path.join(template_dir,"nonflash_elicitationhead.html")).read()
# self.elicitation_tail = open(os.path.join(template_dir,"elicitationtail.html")).read()
# self.elicitation_question = open(os.path.join(template_dir,"elicitationquestion.html")).read()
# self.flash_xml = open(os.path.join(template_dir,"flashApplication.xml")).read()
# self.templates["transcription"] = open(os.path.join(template_dir,"vanilla_transcription.html")).read()
#
self.mic_selections = ["Laptop","Headset","Cellphone","Other"]
self.disable_input_script = 'document.getElementById("${input_id}").disabled = true;'
def dispose_HIT(self,hit_id):
self.conn.dispose_hit(hit_id)
def get_HITs(self):
return self.conn.get_all_hits()
def get_HIT(self,hit_id,response_groups=None):
return self.conn.get_hit(hit_id, response_groups)
def estimate_html_HIT_cost(self,prompts,reward_per_clip,
max_assignments):
return reward_per_clip * len(prompts) * max_assignments
def make_html_elicitation_multiprompt_HIT(self,promptset_indices,hit_title,prompt_title,keywords,
hit_description,
duration=DEFAULT_DURATION,
reward_per_clip=DEFAULT_REWARD,
max_assignments=DEFAULT_MAX_ASSIGNMENTS,
template='elicitation/cmumultipromptelicitationhit.html',
lifetime=timedelta(7)):
overview = Overview()
overview.append_field("Title", "Record yourself speaking the words in the prompt.")
descriptions = ["The following prompts are in English. FLUENT English only.",
"Click the prompt to record your voice (Redirects to recording Page).",
"Follow the directions on that page.",
"You MUST record yourself saying the prompts TWICE.",
"Copy and paste the EACH URL in a SEPARATE box below the prompt.",
"Each prompt must have two DIFFERENT recordings.",
"You must NOT copy and paste the same URL twice for each recording."
]
keywords = "audio, recording, elicitation, English"
template = loader.get_template(template)
context = Context({"descriptions": descriptions,
"promptset_indices": promptset_indices,
})
html = template.render(context)
html_question = HTMLQuestion(html,800)
open(settings.HIT_HTML_FILE,"w").write(html)
quals = qualification.Qualifications()
quals.add(qualification.LocaleRequirement("EqualTo","US"))
#reward calculation
reward = reward_per_clip*len(promptset_indices)
#pdb.set_trace()
try:
return self.conn.create_hit(title=hit_title,
question=html_question,
max_assignments=max_assignments,
description=hit_description,
keywords=keywords,
duration = duration,
qualifications = quals,
reward = reward,
lifetime = lifetime)
except MTurkRequestError as e:
if e.reason != "OK":
raise
else:
print(e)
return False
return False
def make_html_elicitation_HIT(self,prompt_pairs,hit_title,prompt_title,keywords,
hit_description,
duration=DEFAULT_DURATION,
reward_per_clip=DEFAULT_REWARD,
max_assignments=DEFAULT_MAX_ASSIGNMENTS,
template='common/csaesrhit.html'):
overview = Overview()
overview.append_field("Title", "Record yourself speaking the words in the prompt.")
descriptions = ["The following prompts are in English.",
"Click the prompt to record your voice (Redirects to recording Page).",
"Follow the directions on that page.",
"You MUST record yourself saying the prompt TWICE.",
"Copy and paste the EACH URL a SEPARATE box below the prompt.",
"Each prompt must have two DIFFERENT recordings.",
"You must NOT copy and paste the same URL twice for each recording."
]
keywords = "audio, recording, elicitation, English"
template = loader.get_template(template)
prompt_ids = [prompt[0] for prompt in prompt_pairs]
prompt_ids_words = [(prompt[0]," ".join(prompt[1]),"_".join(prompt[1])) for prompt in prompt_pairs]
context = Context({"descriptions": descriptions,
"prompt_ids" : prompt_ids,
"prompt_pairs": prompt_ids_words,
})
html = template.render(context)
html_question = HTMLQuestion(html,800)
open(settings.HIT_HTML_FILE,"w").write(html)
quals = qualification.Qualifications()
quals.add(qualification.LocaleRequirement("EqualTo","US"))
#reward calculation
reward = reward_per_clip*len(prompt_pairs)
#pdb.set_trace()
try:
return self.conn.create_hit(title=hit_title,
question=html_question,
max_assignments=max_assignments,
description=hit_description,
keywords=keywords,
duration = duration,
qualifications = quals,
reward = reward)
except MTurkRequestError as e:
if e.reason != "OK":
raise
else:
print(e)
return False
return False
def make_html_spanish_transcription_HIT(self,
audio_clip_tups,
hit_title,
question_title,
hit_description="Type the words you hear.",
keywords="audio, transcription, English",
duration=DEFAULT_DURATION,
reward_per_clip=DEFAULT_REWARD,
max_assignments=DEFAULT_MAX_ASSIGNMENTS,
template='elicitation/ldchub4transcriptionhit.html',
descriptions=["Listen to the clip and write the words that are said."],
lifetime=timedelta(7)):
overview = Overview()
overview.append_field("Title", hit_title)
count = 0
audioset_url_ids = []
for acurl, acid in audio_clip_tups:
audioset_url_ids.append((acurl,acid,count))
count += 1
template = loader.get_template(template)
context = Context({"descriptions": descriptions,
"audioset_url_ids": audioset_url_ids,
})
html = template.render(context)
html_question = HTMLQuestion(html,800)
open(settings.HIT_HTML_FILE,"w").write(html)
quals = qualification.Qualifications()
quals.add(qualification.LocaleRequirement("EqualTo","US"))
#reward calculation
reward = reward_per_clip*len(audioset_url_ids)
try:
response = self.conn.create_hit(title=hit_title,
question=html_question,
max_assignments=max_assignments,
description=hit_description,
keywords=keywords,
duration = duration,
reward = reward,
lifetime=lifetime)
return response
except MTurkRequestError as e:
if e.reason != "OK":
raise
else:
print(e)
return False
return False
def make_html_transcription_HIT(self,audio_clip_urls,hit_title,question_title,description,keywords,
duration=DEFAULT_DURATION,reward_per_clip=DEFAULT_REWARD,max_assignments=DEFAULT_MAX_ASSIGNMENTS):
overview = Overview()
overview.append_field("Title", "Type the words in the following audio clip in order.")
descriptions = ["The following audio clips are in English.",
"Transcribe the audio clip by typing the words that the person \
says in order.",
"Do not use abbreviations: 'street' and NOT 'st.'",
"Write numbers long-form, as in: 'twenty fifth' NOT '25th'.",
"Write letters (see example).",
"Punctuation does not matter.",
"Hotkeys: press Tab to play the next clip."]
keywords = "audio, transcription, English"
html_head = self.transcription_head.replace(self.html_tags["title"],hit_title)
for description in descriptions:
html_head = html_head.replace(self.html_tags["description"],
"<li>"+description+"</li>\n"+self.html_tags["description"])
count = 0
questions = []
inputs = []
for acurl,acid in audio_clip_urls:
input_id = str(acid)
question = self.transcription_question.replace(self.html_tags["audio_url"],acurl)
question = question.replace(self.html_tags["audioclip_id"],str(acid))
question = question.replace("${count}",input_id)
count += 1
questions.append(question)
inputs.append(input_id)
for input_id in inputs:
script = self.disable_input_script.replace("${input_id}",input_id)
html_head = html_head.replace(self.html_tags["disable_script"],script+\
"\n"+self.html_tags["disable_script"])
if(self.html_tags["audio_id"]) in html_head:
html_head = html_head.replace(self.html_tags["audio_id"],"'"+\
input_id+"'"+","+self.html_tags["audio_id"])
html_head = html_head.replace(self.html_tags["disable_script"],"")
html_head = html_head.replace(","+self.html_tags["audio_id"],"")
html_head = html_head.replace(self.html_tags["description"],"")
html = html_head
for question in questions:
html += question
count += 1
html += self.transcription_tail
html_question = HTMLQuestion(html,800)
#reward calculation
reward = reward_per_clip*len(audio_clip_urls)
try:
return self.conn.create_hit(title=hit_title,
question=html_question,
max_assignments=max_assignments,
description=description,
keywords=keywords,
duration = duration,
reward = reward)
except MTurkRequestError as e:
if e.reason != "OK":
raise
else:
print(e)
return False
return False
def make_question_form_HIT(self,audio_clip_urls,hit_title,question_title,description,keywords,
duration=DEFAULT_DURATION,reward=DEFAULT_REWARD):
overview = Overview()
overview.append_field("Title",hit_title)
#overview.append(FormattedContent('<a target = "_blank" href="url">hyperlink</a>'))
question_form = QuestionForm()
question_form.append(overview)
for ac in audio_clip_urls:
audio_html = self.transcription_question.replace(self.audio_url_tag,ac)
qc = QuestionContent()
qc.append_field("Title",question_title)
qc.append(FormattedContent(audio_html))
fta = FreeTextAnswer()
q = Question(identifier="transcription",
content=qc,
answer_spec=AnswerSpecification(fta))
question_form.append(q)
try:
response = self.conn.create_hit(questions=question_form,
max_assignments=1,
title=hit_title,
description=description,
keywords=keywords,
duration=duration,
reward=reward)
except MTurkRequestError as e:
if e.reason != "OK":
raise
return question_form, response
def fal_elicitations():
aws_id = os.environ['AWS_ACCESS_KEY_ID']
aws_k = os.environ['AWS_ACCESS_KEY']
try:
conn = MTurkConnection(aws_access_key_id=aws_id,\
aws_secret_access_key=aws_k,\
host=settings.MTURK_HOST)
print "Connection HOST: %s" % settings.MTURK_HOST
except Exception as e:
print(e)
hh = HitHandler(conn)
hit_title = "Sequential Audio Elicitation"
question_title = "Speak and Record your Voice"
keywords = "audio, elicitation, speech, recording"
hit_description = "Speak English prompts and record your voice."
max_assignments = 15
reward_per_clip = .62
duration = 60*50
one_month = timedelta(40)
raise Exception#Disable this when ready to submit hits
#Make sure to set the sequential_template_number below
sequential_template_number = None
response = hh.make_html_elicitation_multiprompt_HIT([], hit_title, question_title,
keywords,
duration=duration,
hit_description=hit_description,
max_assignments=max_assignments,
reward_per_clip=reward_per_clip,
lifetime=one_month)
if response and len(response) > 0:
r = response[0]
print("HITId: %s"%r.HITId)
def main():
fal_elicitations()
pass
if __name__=="__main__":
main()
|
|
from importlib import import_module
import os
import sys
from django.apps import apps
from django.db.migrations.recorder import MigrationRecorder
from django.db.migrations.graph import MigrationGraph
from django.utils import six
from django.conf import settings
MIGRATIONS_MODULE_NAME = 'migrations'
class MigrationLoader(object):
"""
Loads migration files from disk, and their status from the database.
Migration files are expected to live in the "migrations" directory of
an app. Their names are entirely unimportant from a code perspective,
but will probably follow the 1234_name.py convention.
On initialisation, this class will scan those directories, and open and
read the python files, looking for a class called Migration, which should
inherit from django.db.migrations.Migration. See
django.db.migrations.migration for what that looks like.
Some migrations will be marked as "replacing" another set of migrations.
These are loaded into a separate set of migrations away from the main ones.
If all the migrations they replace are either unapplied or missing from
disk, then they are injected into the main set, replacing the named migrations.
Any dependency pointers to the replaced migrations are re-pointed to the
new migration.
This does mean that this class MUST also talk to the database as well as
to disk, but this is probably fine. We're already not just operating
in memory.
"""
def __init__(self, connection, load=True):
self.connection = connection
self.disk_migrations = None
self.applied_migrations = None
if load:
self.build_graph()
@classmethod
def migrations_module(cls, app_label):
if app_label in settings.MIGRATION_MODULES:
return settings.MIGRATION_MODULES[app_label]
else:
app_package_name = apps.get_app_config(app_label).name
return '%s.%s' % (app_package_name, MIGRATIONS_MODULE_NAME)
def load_disk(self):
"""
Loads the migrations from all INSTALLED_APPS from disk.
"""
self.disk_migrations = {}
self.unmigrated_apps = set()
self.migrated_apps = set()
for app_config in apps.get_app_configs():
if app_config.models_module is None:
continue
# Get the migrations module directory
module_name = self.migrations_module(app_config.label)
was_loaded = module_name in sys.modules
try:
module = import_module(module_name)
except ImportError as e:
# I hate doing this, but I don't want to squash other import errors.
# Might be better to try a directory check directly.
if "No module named" in str(e) and MIGRATIONS_MODULE_NAME in str(e):
self.unmigrated_apps.add(app_config.label)
continue
raise
else:
# PY3 will happily import empty dirs as namespaces.
if not hasattr(module, '__file__'):
continue
# Module is not a package (e.g. migrations.py).
if not hasattr(module, '__path__'):
continue
# Force a reload if it's already loaded (tests need this)
if was_loaded:
six.moves.reload_module(module)
self.migrated_apps.add(app_config.label)
directory = os.path.dirname(module.__file__)
# Scan for .py[c|o] files
migration_names = set()
for name in os.listdir(directory):
if name.endswith(".py") or name.endswith(".pyc") or name.endswith(".pyo"):
import_name = name.rsplit(".", 1)[0]
if import_name[0] not in "_.~":
migration_names.add(import_name)
# Load them
south_style_migrations = False
for migration_name in migration_names:
try:
migration_module = import_module("%s.%s" % (module_name, migration_name))
except ImportError as e:
# Ignore South import errors, as we're triggering them
if "south" in str(e).lower():
south_style_migrations = True
break
raise
if not hasattr(migration_module, "Migration"):
raise BadMigrationError("Migration %s in app %s has no Migration class" % (migration_name, app_config.label))
# Ignore South-style migrations
if hasattr(migration_module.Migration, "forwards"):
south_style_migrations = True
break
self.disk_migrations[app_config.label, migration_name] = migration_module.Migration(migration_name, app_config.label)
if south_style_migrations:
self.unmigrated_apps.add(app_config.label)
def get_migration(self, app_label, name_prefix):
"Gets the migration exactly named, or raises KeyError"
return self.graph.nodes[app_label, name_prefix]
def get_migration_by_prefix(self, app_label, name_prefix):
"Returns the migration(s) which match the given app label and name _prefix_"
# Do the search
results = []
for l, n in self.disk_migrations:
if l == app_label and n.startswith(name_prefix):
results.append((l, n))
if len(results) > 1:
raise AmbiguityError("There is more than one migration for '%s' with the prefix '%s'" % (app_label, name_prefix))
elif len(results) == 0:
raise KeyError("There no migrations for '%s' with the prefix '%s'" % (app_label, name_prefix))
else:
return self.disk_migrations[results[0]]
def build_graph(self):
"""
Builds a migration dependency graph using both the disk and database.
You'll need to rebuild the graph if you apply migrations. This isn't
usually a problem as generally migration stuff runs in a one-shot process.
"""
# Load disk data
self.load_disk()
# Load database data
recorder = MigrationRecorder(self.connection)
self.applied_migrations = recorder.applied_migrations()
# Do a first pass to separate out replacing and non-replacing migrations
normal = {}
replacing = {}
for key, migration in self.disk_migrations.items():
if migration.replaces:
replacing[key] = migration
else:
normal[key] = migration
# Calculate reverse dependencies - i.e., for each migration, what depends on it?
# This is just for dependency re-pointing when applying replacements,
# so we ignore run_before here.
reverse_dependencies = {}
for key, migration in normal.items():
for parent in migration.dependencies:
reverse_dependencies.setdefault(parent, set()).add(key)
# Carry out replacements if we can - that is, if all replaced migrations
# are either unapplied or missing.
for key, migration in replacing.items():
# Ensure this replacement migration is not in applied_migrations
self.applied_migrations.discard(key)
# Do the check. We can replace if all our replace targets are
# applied, or if all of them are unapplied.
applied_statuses = [(target in self.applied_migrations) for target in migration.replaces]
can_replace = all(applied_statuses) or (not any(applied_statuses))
if not can_replace:
continue
# Alright, time to replace. Step through the replaced migrations
# and remove, repointing dependencies if needs be.
for replaced in migration.replaces:
if replaced in normal:
# We don't care if the replaced migration doesn't exist;
# the usage pattern here is to delete things after a while.
del normal[replaced]
for child_key in reverse_dependencies.get(replaced, set()):
if child_key in migration.replaces:
continue
normal[child_key].dependencies.remove(replaced)
normal[child_key].dependencies.append(key)
normal[key] = migration
# Mark the replacement as applied if all its replaced ones are
if all(applied_statuses):
self.applied_migrations.add(key)
# Finally, make a graph and load everything into it
self.graph = MigrationGraph()
for key, migration in normal.items():
self.graph.add_node(key, migration)
for key, migration in normal.items():
for parent in migration.dependencies:
self.graph.add_dependency(key, parent)
def detect_conflicts(self):
"""
Looks through the loaded graph and detects any conflicts - apps
with more than one leaf migration. Returns a dict of the app labels
that conflict with the migration names that conflict.
"""
seen_apps = {}
conflicting_apps = set()
for app_label, migration_name in self.graph.leaf_nodes():
if app_label in seen_apps:
conflicting_apps.add(app_label)
seen_apps.setdefault(app_label, set()).add(migration_name)
return dict((app_label, seen_apps[app_label]) for app_label in conflicting_apps)
class BadMigrationError(Exception):
"""
Raised when there's a bad migration (unreadable/bad format/etc.)
"""
pass
class AmbiguityError(Exception):
"""
Raised when more than one migration matches a name prefix
"""
pass
|
|
"""
Form classes
"""
from __future__ import absolute_import, unicode_literals
import copy
import warnings
from django.core.exceptions import ValidationError
from django.forms.fields import Field, FileField
from django.forms.util import flatatt, ErrorDict, ErrorList
from django.forms.widgets import Media, media_property, TextInput, Textarea
from django.utils.datastructures import SortedDict
from django.utils.html import conditional_escape, format_html
from django.utils.encoding import smart_text, force_text, python_2_unicode_compatible
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils import six
__all__ = ('BaseForm', 'Form')
NON_FIELD_ERRORS = '__all__'
def pretty_name(name):
"""Converts 'first_name' to 'First name'"""
if not name:
return ''
return name.replace('_', ' ').capitalize()
def get_declared_fields(bases, attrs, with_base_fields=True):
"""
Create a list of form field instances from the passed in 'attrs', plus any
similar fields on the base classes (in 'bases'). This is used by both the
Form and ModelForm metaclasses.
If 'with_base_fields' is True, all fields from the bases are used.
Otherwise, only fields in the 'declared_fields' attribute on the bases are
used. The distinction is useful in ModelForm subclassing.
Also integrates any additional media definitions.
"""
fields = [(field_name, attrs.pop(field_name)) for field_name, obj in list(six.iteritems(attrs)) if isinstance(obj, Field)]
fields.sort(key=lambda x: x[1].creation_counter)
# If this class is subclassing another Form, add that Form's fields.
# Note that we loop over the bases in *reverse*. This is necessary in
# order to preserve the correct order of fields.
if with_base_fields:
for base in bases[::-1]:
if hasattr(base, 'base_fields'):
fields = list(six.iteritems(base.base_fields)) + fields
else:
for base in bases[::-1]:
if hasattr(base, 'declared_fields'):
fields = list(six.iteritems(base.declared_fields)) + fields
return SortedDict(fields)
class DeclarativeFieldsMetaclass(type):
"""
Metaclass that converts Field attributes to a dictionary called
'base_fields', taking into account parent class 'base_fields' as well.
"""
def __new__(cls, name, bases, attrs):
attrs['base_fields'] = get_declared_fields(bases, attrs)
new_class = super(DeclarativeFieldsMetaclass,
cls).__new__(cls, name, bases, attrs)
if 'media' not in attrs:
new_class.media = media_property(new_class)
return new_class
@python_2_unicode_compatible
class BaseForm(object):
# This is the main implementation of all the Form logic. Note that this
# class is different than Form. See the comments by the Form class for more
# information. Any improvements to the form API should be made to *this*
# class, not to the Form class.
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=None,
empty_permitted=False):
self.is_bound = data is not None or files is not None
self.data = data or {}
self.files = files or {}
self.auto_id = auto_id
self.prefix = prefix
self.initial = initial or {}
self.error_class = error_class
# Translators: This is the default suffix added to form field labels
self.label_suffix = label_suffix if label_suffix is not None else _(':')
self.empty_permitted = empty_permitted
self._errors = None # Stores the errors after clean() has been called.
self._changed_data = None
# The base_fields class attribute is the *class-wide* definition of
# fields. Because a particular *instance* of the class might want to
# alter self.fields, we create self.fields here by copying base_fields.
# Instances should always modify self.fields; they should not modify
# self.base_fields.
self.fields = copy.deepcopy(self.base_fields)
def __str__(self):
return self.as_table()
def __iter__(self):
for name in self.fields:
yield self[name]
def __getitem__(self, name):
"Returns a BoundField with the given name."
try:
field = self.fields[name]
except KeyError:
raise KeyError('Key %r not found in Form' % name)
return BoundField(self, field, name)
@property
def errors(self):
"Returns an ErrorDict for the data provided for the form"
if self._errors is None:
self.full_clean()
return self._errors
def is_valid(self):
"""
Returns True if the form has no errors. Otherwise, False. If errors are
being ignored, returns False.
"""
return self.is_bound and not bool(self.errors)
def add_prefix(self, field_name):
"""
Returns the field name with a prefix appended, if this Form has a
prefix set.
Subclasses may wish to override.
"""
return '%s-%s' % (self.prefix, field_name) if self.prefix else field_name
def add_initial_prefix(self, field_name):
"""
Add a 'initial' prefix for checking dynamic initial values
"""
return 'initial-%s' % self.add_prefix(field_name)
def _html_output(self, normal_row, error_row, row_ender, help_text_html, errors_on_separate_row):
"Helper function for outputting HTML. Used by as_table(), as_ul(), as_p()."
top_errors = self.non_field_errors() # Errors that should be displayed above all fields.
output, hidden_fields = [], []
for name, field in self.fields.items():
html_class_attr = ''
bf = self[name]
# Escape and cache in local variable.
bf_errors = self.error_class([conditional_escape(error) for error in bf.errors])
if bf.is_hidden:
if bf_errors:
top_errors.extend(
[_('(Hidden field %(name)s) %(error)s') % {'name': name, 'error': force_text(e)}
for e in bf_errors])
hidden_fields.append(six.text_type(bf))
else:
# Create a 'class="..."' atribute if the row should have any
# CSS classes applied.
css_classes = bf.css_classes()
if css_classes:
html_class_attr = ' class="%s"' % css_classes
if errors_on_separate_row and bf_errors:
output.append(error_row % force_text(bf_errors))
if bf.label:
label = conditional_escape(force_text(bf.label))
label = bf.label_tag(label) or ''
else:
label = ''
if field.help_text:
help_text = help_text_html % force_text(field.help_text)
else:
help_text = ''
output.append(normal_row % {
'errors': force_text(bf_errors),
'label': force_text(label),
'field': six.text_type(bf),
'help_text': help_text,
'html_class_attr': html_class_attr
})
if top_errors:
output.insert(0, error_row % force_text(top_errors))
if hidden_fields: # Insert any hidden fields in the last row.
str_hidden = ''.join(hidden_fields)
if output:
last_row = output[-1]
# Chop off the trailing row_ender (e.g. '</td></tr>') and
# insert the hidden fields.
if not last_row.endswith(row_ender):
# This can happen in the as_p() case (and possibly others
# that users write): if there are only top errors, we may
# not be able to conscript the last row for our purposes,
# so insert a new, empty row.
last_row = (normal_row % {'errors': '', 'label': '',
'field': '', 'help_text':'',
'html_class_attr': html_class_attr})
output.append(last_row)
output[-1] = last_row[:-len(row_ender)] + str_hidden + row_ender
else:
# If there aren't any rows in the output, just append the
# hidden fields.
output.append(str_hidden)
return mark_safe('\n'.join(output))
def as_table(self):
"Returns this form rendered as HTML <tr>s -- excluding the <table></table>."
return self._html_output(
normal_row = '<tr%(html_class_attr)s><th>%(label)s</th><td>%(errors)s%(field)s%(help_text)s</td></tr>',
error_row = '<tr><td colspan="2">%s</td></tr>',
row_ender = '</td></tr>',
help_text_html = '<br /><span class="helptext">%s</span>',
errors_on_separate_row = False)
def as_ul(self):
"Returns this form rendered as HTML <li>s -- excluding the <ul></ul>."
return self._html_output(
normal_row = '<li%(html_class_attr)s>%(errors)s%(label)s %(field)s%(help_text)s</li>',
error_row = '<li>%s</li>',
row_ender = '</li>',
help_text_html = ' <span class="helptext">%s</span>',
errors_on_separate_row = False)
def as_p(self):
"Returns this form rendered as HTML <p>s."
return self._html_output(
normal_row = '<p%(html_class_attr)s>%(label)s %(field)s%(help_text)s</p>',
error_row = '%s',
row_ender = '</p>',
help_text_html = ' <span class="helptext">%s</span>',
errors_on_separate_row = True)
def non_field_errors(self):
"""
Returns an ErrorList of errors that aren't associated with a particular
field -- i.e., from Form.clean(). Returns an empty ErrorList if there
are none.
"""
return self.errors.get(NON_FIELD_ERRORS, self.error_class())
def _raw_value(self, fieldname):
"""
Returns the raw_value for a particular field name. This is just a
convenient wrapper around widget.value_from_datadict.
"""
field = self.fields[fieldname]
prefix = self.add_prefix(fieldname)
return field.widget.value_from_datadict(self.data, self.files, prefix)
def full_clean(self):
"""
Cleans all of self.data and populates self._errors and
self.cleaned_data.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data has
# changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
self._post_clean()
def _clean_fields(self):
for name, field in self.fields.items():
# value_from_datadict() gets the data from the data dictionaries.
# Each widget type knows how to retrieve its own data, because some
# widgets split data over several HTML fields.
value = field.widget.value_from_datadict(self.data, self.files, self.add_prefix(name))
try:
if isinstance(field, FileField):
initial = self.initial.get(name, field.initial)
value = field.clean(value, initial)
else:
value = field.clean(value)
self.cleaned_data[name] = value
if hasattr(self, 'clean_%s' % name):
value = getattr(self, 'clean_%s' % name)()
self.cleaned_data[name] = value
except ValidationError as e:
self._errors[name] = self.error_class(e.messages)
if name in self.cleaned_data:
del self.cleaned_data[name]
def _clean_form(self):
try:
self.cleaned_data = self.clean()
except ValidationError as e:
self._errors[NON_FIELD_ERRORS] = self.error_class(e.messages)
def _post_clean(self):
"""
An internal hook for performing additional cleaning after form cleaning
is complete. Used for model validation in model forms.
"""
pass
def clean(self):
"""
Hook for doing any extra form-wide cleaning after Field.clean() been
called on every field. Any ValidationError raised by this method will
not be associated with a particular field; it will have a special-case
association with the field named '__all__'.
"""
return self.cleaned_data
def has_changed(self):
"""
Returns True if data differs from initial.
"""
return bool(self.changed_data)
@property
def changed_data(self):
if self._changed_data is None:
self._changed_data = []
# XXX: For now we're asking the individual widgets whether or not the
# data has changed. It would probably be more efficient to hash the
# initial data, store it in a hidden field, and compare a hash of the
# submitted data, but we'd need a way to easily get the string value
# for a given field. Right now, that logic is embedded in the render
# method of each widget.
for name, field in self.fields.items():
prefixed_name = self.add_prefix(name)
data_value = field.widget.value_from_datadict(self.data, self.files, prefixed_name)
if not field.show_hidden_initial:
initial_value = self.initial.get(name, field.initial)
if callable(initial_value):
initial_value = initial_value()
else:
initial_prefixed_name = self.add_initial_prefix(name)
hidden_widget = field.hidden_widget()
try:
initial_value = field.to_python(hidden_widget.value_from_datadict(
self.data, self.files, initial_prefixed_name))
except ValidationError:
# Always assume data has changed if validation fails.
self._changed_data.append(name)
continue
if hasattr(field.widget, '_has_changed'):
warnings.warn("The _has_changed method on widgets is deprecated,"
" define it at field level instead.",
PendingDeprecationWarning, stacklevel=2)
if field.widget._has_changed(initial_value, data_value):
self._changed_data.append(name)
elif field._has_changed(initial_value, data_value):
self._changed_data.append(name)
return self._changed_data
@property
def media(self):
"""
Provide a description of all media required to render the widgets on this form
"""
media = Media()
for field in self.fields.values():
media = media + field.widget.media
return media
def is_multipart(self):
"""
Returns True if the form needs to be multipart-encoded, i.e. it has
FileInput. Otherwise, False.
"""
for field in self.fields.values():
if field.widget.needs_multipart_form:
return True
return False
def hidden_fields(self):
"""
Returns a list of all the BoundField objects that are hidden fields.
Useful for manual form layout in templates.
"""
return [field for field in self if field.is_hidden]
def visible_fields(self):
"""
Returns a list of BoundField objects that aren't hidden fields.
The opposite of the hidden_fields() method.
"""
return [field for field in self if not field.is_hidden]
class Form(six.with_metaclass(DeclarativeFieldsMetaclass, BaseForm)):
"A collection of Fields, plus their associated data."
# This is a separate class from BaseForm in order to abstract the way
# self.fields is specified. This class (Form) is the one that does the
# fancy metaclass stuff purely for the semantic sugar -- it allows one
# to define a form using declarative syntax.
# BaseForm itself has no way of designating self.fields.
@python_2_unicode_compatible
class BoundField(object):
"A Field plus data"
def __init__(self, form, field, name):
self.form = form
self.field = field
self.name = name
self.html_name = form.add_prefix(name)
self.html_initial_name = form.add_initial_prefix(name)
self.html_initial_id = form.add_initial_prefix(self.auto_id)
if self.field.label is None:
self.label = pretty_name(name)
else:
self.label = self.field.label
self.help_text = field.help_text or ''
def __str__(self):
"""Renders this field as an HTML widget."""
if self.field.show_hidden_initial:
return self.as_widget() + self.as_hidden(only_initial=True)
return self.as_widget()
def __iter__(self):
"""
Yields rendered strings that comprise all widgets in this BoundField.
This really is only useful for RadioSelect widgets, so that you can
iterate over individual radio buttons in a template.
"""
for subwidget in self.field.widget.subwidgets(self.html_name, self.value()):
yield subwidget
def __len__(self):
return len(list(self.__iter__()))
def __getitem__(self, idx):
return list(self.__iter__())[idx]
@property
def errors(self):
"""
Returns an ErrorList for this field. Returns an empty ErrorList
if there are none.
"""
return self.form.errors.get(self.name, self.form.error_class())
def as_widget(self, widget=None, attrs=None, only_initial=False):
"""
Renders the field by rendering the passed widget, adding any HTML
attributes passed as attrs. If no widget is specified, then the
field's default widget will be used.
"""
if not widget:
widget = self.field.widget
if self.field.localize:
widget.is_localized = True
attrs = attrs or {}
auto_id = self.auto_id
if auto_id and 'id' not in attrs and 'id' not in widget.attrs:
if not only_initial:
attrs['id'] = auto_id
else:
attrs['id'] = self.html_initial_id
if not only_initial:
name = self.html_name
else:
name = self.html_initial_name
return widget.render(name, self.value(), attrs=attrs)
def as_text(self, attrs=None, **kwargs):
"""
Returns a string of HTML for representing this as an <input type="text">.
"""
return self.as_widget(TextInput(), attrs, **kwargs)
def as_textarea(self, attrs=None, **kwargs):
"Returns a string of HTML for representing this as a <textarea>."
return self.as_widget(Textarea(), attrs, **kwargs)
def as_hidden(self, attrs=None, **kwargs):
"""
Returns a string of HTML for representing this as an <input type="hidden">.
"""
return self.as_widget(self.field.hidden_widget(), attrs, **kwargs)
@property
def data(self):
"""
Returns the data for this BoundField, or None if it wasn't given.
"""
return self.field.widget.value_from_datadict(self.form.data, self.form.files, self.html_name)
def value(self):
"""
Returns the value for this BoundField, using the initial value if
the form is not bound or the data otherwise.
"""
if not self.form.is_bound:
data = self.form.initial.get(self.name, self.field.initial)
if callable(data):
data = data()
else:
data = self.field.bound_data(
self.data, self.form.initial.get(self.name, self.field.initial)
)
return self.field.prepare_value(data)
def label_tag(self, contents=None, attrs=None, label_suffix=None):
"""
Wraps the given contents in a <label>, if the field has an ID attribute.
contents should be 'mark_safe'd to avoid HTML escaping. If contents
aren't given, uses the field's HTML-escaped label.
If attrs are given, they're used as HTML attributes on the <label> tag.
label_suffix allows overriding the form's label_suffix.
"""
contents = contents or self.label
# Only add the suffix if the label does not end in punctuation.
label_suffix = label_suffix if label_suffix is not None else self.form.label_suffix
# Translators: If found as last label character, these punctuation
# characters will prevent the default label_suffix to be appended to the label
if label_suffix and contents and contents[-1] not in _(':?.!'):
contents = format_html('{0}{1}', contents, label_suffix)
widget = self.field.widget
id_ = widget.attrs.get('id') or self.auto_id
if id_:
id_for_label = widget.id_for_label(id_)
if id_for_label:
attrs = dict(attrs or {}, **{'for': id_for_label})
attrs = flatatt(attrs) if attrs else ''
contents = format_html('<label{0}>{1}</label>', attrs, contents)
else:
contents = conditional_escape(contents)
return mark_safe(contents)
def css_classes(self, extra_classes=None):
"""
Returns a string of space-separated CSS classes for this field.
"""
if hasattr(extra_classes, 'split'):
extra_classes = extra_classes.split()
extra_classes = set(extra_classes or [])
if self.errors and hasattr(self.form, 'error_css_class'):
extra_classes.add(self.form.error_css_class)
if self.field.required and hasattr(self.form, 'required_css_class'):
extra_classes.add(self.form.required_css_class)
return ' '.join(extra_classes)
@property
def is_hidden(self):
"Returns True if this BoundField's widget is hidden."
return self.field.widget.is_hidden
@property
def auto_id(self):
"""
Calculates and returns the ID attribute for this BoundField, if the
associated Form has specified auto_id. Returns an empty string otherwise.
"""
auto_id = self.form.auto_id
if auto_id and '%s' in smart_text(auto_id):
return smart_text(auto_id) % self.html_name
elif auto_id:
return self.html_name
return ''
@property
def id_for_label(self):
"""
Wrapper around the field widget's `id_for_label` method.
Useful, for example, for focusing on this field regardless of whether
it has a single widget or a MultiWidget.
"""
widget = self.field.widget
id_ = widget.attrs.get('id') or self.auto_id
return widget.id_for_label(id_)
|
|
# ftypes.py - Functional dictionary and list types for Python 2.1+
#
# Author: Dave Benjamin <ramen@ramenfest.com>
# Version: 1.1.2
"""
Introduction
------------
The purpose of this module is to provide a dictionary and list type that can
aid in relational algebra, functional programming, list-oriented programming,
and perhaps even code obfuscation.
There is a certain dark side to this library: operator overloading. Almost
all of the useful features of these types are invoked through operators. My
rationale is that certain functional/relational methods have become so well-
known that they deserve to be liberated from the highly syntactic domain of
functions to the free and flowy rivers of infix notation. You may disagree.
Part of my inspiration for these ideas came from the late computer scientist
and mathematician Edsger Wybe Dijkstra (11 May 1930 -- 6 August 2002), who
argued that programs should be based on mathematical logic and methodology.
Throughout the process of learning about functional programming techniques,
I began to see the resemblence between pure mathematics and functional
algorithms, and wanted to follow this train of thought further.
The "map" function, for example, is so common and useful (to me, anyway)
that there ought to be a symbolic notation for it. Instead of always having
to write "map(func, list)", perhaps it should be "list MAP func", where "MAP"
could be substituted with the "M"-looking Greek letter of your choice. =)
In fear of accidently reinventing APL, I tried to ignore the temptation to
create such an operator in Python, but it seemed so logical and natural
after awhile that I really wanted to try it in practice.
As you will see, I have indeed implemented map as an operator (*), as well
as filter (/), reduce (()), sort (%), zip (**), and many other common
FP-related functions. As is usually the case with liberal operator
overloading, the choice in operator symbols is somewhat arbitrary. I am
reasonably happy with my choices so far, but I did not reach this without
sometimes painful self-deliberation.
Another factor that contributed to this code was a need for powerful tools
to deal with result sets from database queries. My "discovery" that result
sets could be mapped to lists of dictionaries was somewhat of an epiphany,
and enabled me to find very concise solutions to common database and web
templating problems. Not only can dictionaries represent database rows, but
they also function as namespaces for template evaluation.
Defining a result set (or, more formally, a "relation") as a list of
dictionaries allowed me to apply these types to the domain of relational
algebra: operators such as select (which is essentially the same operation
as filter), project (also implemented using the / operator), distinct
(implemented using the unary -), and union (+, merely list concatenation)
fit nicely into this list/dictionary model. The list constructor is
extended to allow for concise specification of literal result sets as
follows:
list(['id', 'fname', 'lname' ],
#---------------------------#
[1234, 'Guido', 'van Rossum'],
[1235, 'Alex', 'Martelli' ],
[1236, 'Tim', 'Peters' ])
This constructor will return a list of three dictionaries, each containing
the keys "id", "fname", and "lname", pointing to the respective values for
each of the rows above. Since most database APIs can provide results in this
form, literal and actual result sets can be swapped freely. As a result,
you can test database code without the database, even through the interpreter
if you desire. This has been very useful.
The examples below should demonstrate the usage of the aforementioned
features, plus many others. You may wish to consult the code itself for
more ideas about how to use these types.
How-To
------
Import these types:
from ftypes import *
Import these types without overwriting the original ones:
from ftypes import list as flist, dict as fdict
Instantiate these types:
dict() -> {}
dict({'a': 5, 'b': 6}) -> {'b': 6, 'a': 5}
dict(((1, 2), (3, 4))) -> {3: 4, 1: 2}
dict(('a', 'b', 'c'), (1, 2, 3)) -> {'b': 2, 'c': 3, 'a': 1}
list() -> []
list([1, 2, 3]) -> [1, 2, 3]
++ list(['st', 'state' ],
['AZ', 'Arizona' ],
['CA', 'California' ],
['PZ', 'Planet Zektar']) -> [{'st': 'AZ', 'state': 'Arizona'},
{'st': 'CA', 'state': 'California'},
{'st': 'PZ', 'state': 'Planet Zektar'}]
Do functional things:
list([1, 3, 5, 7]) * (lambda x: x + 1) -> [2, 4, 6, 8] (map)
list([2, 3, 4, 5]) / (lambda x: x % 2) -> [3, 5] (filter)
list(range(5)).reduce(operator.add) -> 10 (reduce)
list('abcde') % (lambda x, y: cmp(y, x)) -> ['e','d','c','b','a'] (sort)
list([0, '0', [], '[]']) / operator.truth -> ['0', '[]'] (any)
list([1, 2, 3]) ** [4, 5, 6] -> [[1, 4], [2, 5], [3, 6]] (zip)
The map (*) and filter (/) operators are also available for the dict type.
The given function will be applied to the dictionary's values.
Do relational things:
states.st -> ['AZ', 'CA', 'PZ'] (column)
(states / (lambda x: x.st != 'CA')).st -> ['AZ', 'PZ'] (select)
(states / ['st']) -> [{'st': 'AZ'},
{'st': 'CA'},
{'st': 'PZ'}] (project)
-list([1, 2, 2, 3, 6, 3, 2]) -> [1, 2, 3, 6] (distinct)
Note: The definition of states can be found above as indicated (++).
Other (maybe) useful tricks:
list([1, 2, 3]) / {1: 1, 3: 1}.has_key -> [1, 3] (dict set filter)
dict({'a': 5, 'b': 6}).a -> 5 (object-style dict lookup)
dict({'a': 5, 'b': 6}.items()) -> {'b': 6, 'a': 5} (identity)
dict({'a': 5, 'b': 6}).items() * list -> [['b', 6], ['a', 5]] (cast)
~list([(1, 2), (3, 4)]) -> [[1, 3], [2, 4]] (transpose)
~dict({1: 2, 3: 4}) -> {2: 1, 4: 3} (dict transpose)
dict().set('a', 5).set('b', 6).unset('a') -> {'b': 6} (mutator chaining)
d = dict(); (5 + d.put('a', 6 + 7)) * d.a -> 234 (memoization)
list(range(5)) * list(range(4)).get -> [0, 1, 2, 3, None] (list get)
list(['hello', 'world']).join(' ') -> 'hello world' (string join)
dict({'a': 5, 'b': 6}).eval('a + b') -> 11 (eval within a namespace)
Callables:
Dictionaries and lists can be made callable, ie. they can be invoked
like functions. This behavior can be activated by supplying the named
parameter "__call__" to the constructors. For example:
list([1,2,3], __call__=list.reduce)(operator.add) -> 6
I believe that this fits the definition of a "closure".
The ability to add methods via keyword arguments is not restricted to
__call__, by the way. You can in fact supply any method you would like
to override as a keyword argument to the dict and list constructors.
Sets and Histograms:
As a matter of convience, set and constructor functions have been
provided, both returning dictionaries.
Use set(1, 2, 3) as an alternative to dict({1: 1, 2: 1, 3: 1}).
To convert a list "liszt" to a set, write set(*liszt). The binary
operators &, |, and - have been overridden to function as set
intersection, union, and difference, respectively. The "in"
operator is an alias for has_key, as with more recent versions of
Python's built-in dictionary, so it can be used to test for set
containment.
The histogram function counts the number of occurrences (frequency) of
each element in a list. It takes a single list as its argument (unlike
set(), which accepts a variable number of arguments) and returns a
dictionary where the list elements are the keys and their values are
their respective frequency counts.
Both of these constructors deal only with hashable types. They will
pass on any named parameters to dict().
Afterword
---------
Thanks to Guido for such a powerful and flexible language! I welcome any
ideas, contributions, and criticism from the community. Thanks also to
Alex Martelli for the fantastic "curry" implementation on ActiveState's
Python Cookbook, and to Tim Peters for starting the helpful discussion on
extracting unique elements from a list.
Peace!
Dave Benjamin <ramen@ramenfest.com>
"""
from __future__ import nested_scopes
from UserDict import UserDict
from UserList import UserList
from pprint import pformat
__all__ = ['dict', 'list', 'odict', 'oset', 'set']
# List Class
# ----------
class list(UserList):
def __init__(self, *args, **kwds):
# Import keyword arguments into the object dictionary.
# Callables are automatically curried so that they take
# "self" as the first argument".
for key, val in kwds.items():
if callable(val):
self.__dict__[key] = curry(val, self)
else:
self.__dict__[key] = val
if len(args) == 0:
# No arguments: empty list.
UserList.__init__(self)
elif len(args) == 1:
# One argument: list.
UserList.__init__(self, args[0])
else:
# Two arguments: list of dictionaries.
UserList.__init__(self, [dict(args[0], row) for row in args[1:]])
def copy(self):
"""Copy constructor."""
return self[:]
def column(self, key):
"""Get column."""
return list([item[key] for item in self])
def flip(self):
"""Convert list of dictionaries to dictionary of lists."""
result = dict()
if not self: return result
for key in self[0].keys():
result[key] = self.column(key)
return result
def get(self, idx, default=None):
"""Get item."""
try:
return self.data[idx]
except IndexError:
return default
def join(self, sep=''):
"""String join with reversed semantics."""
return sep.join(self)
def reduce(self, func, *initial):
"""Reduce to a single value by iteratively applying a function."""
if initial: return reduce(func, self, initial[0])
return reduce(func, self)
def __mul__(self, func_or_n):
"""Map/repeat (*)."""
if callable(func_or_n):
# Function: map operation.
return list([func_or_n(x) for x in self])
else:
# Number: repeat operation.
return list(self.data * func_or_n)
def __div__(self, func_or_keys):
"""Filter/select/project (/)."""
if callable(func_or_keys):
# Function: select (filter) operation.
return list([x for x in self if func_or_keys(x)])
else:
# Key list: project operation.
return list([dict(x) / func_or_keys for x in self])
def __mod__(self, func):
"""Sort (%)."""
result = self[:]
result.sort(func)
return result
def __pow__(self, other):
"""Zip (**)."""
return list(zip(self, other)) * list
def __invert__(self):
"""Transpose (~)."""
if not self: return list()
return list(zip(*self)) * list
def __neg__(self):
"""Distinct (unary -)."""
result = list()
try:
# Hash method (faster).
seen = dict()
for item in self:
if item not in seen:
seen[item] = 1
result.append(item)
except TypeError:
# Linear method (more compatible).
for item in self:
if item not in result:
result.append(item)
return result
def __getattr__(self, key):
"""Get column or attribute (.)."""
if key == '__methods__':
return UserList.__dict__.keys()
if key == '__members__':
if self.data and hasattr(self.data[0], 'keys'):
return self.data[0].keys()
else:
return []
if self.__dict__.has_key(key):
return self.__dict__[key]
if self.data:
head = self.data[0]
if hasattr(head, 'has_key') and head.has_key(key):
return self.column(key)
#if hasattr(head, key):
if hasattr(head, '__dict__') and head.__dict__.has_key(key):
return list([getattr(x, key) for x in self])
raise AttributeError, key
def __str__(self):
"""Built-in pretty-printer."""
return pformat(self.data)
# Dictionary Class
# ----------------
class dict(UserDict):
def __init__(self, *args, **kwds):
# Import keyword arguments into the object dictionary.
# Callables are automatically curried so that they take
# "self" as the first argument".
for key, val in kwds.items():
if callable(val):
self.__dict__[key] = curry(val, self)
else:
self.__dict__[key] = val
if len(args) == 0:
# No arguments: empty dictionary.
UserDict.__init__(self)
elif len(args) == 1:
# One argument: dictionary or item list.
if hasattr(args[0], 'items'):
# Dictionary.
UserDict.__init__(self, args[0])
else:
# Item list.
UserDict.__init__(self)
for key, val in args[0]:
self[key] = val
else:
# Two arguments: key and value lists.
UserDict.__init__(self)
for key, val in zip(args[0], args[1]):
self[key] = val
def copy(self):
"""Copy constructor."""
return dict(self.data)
def keys(self):
"""Returns keys as overloaded list."""
return list(self.data.keys())
def values(self):
"""Returns values as overloaded list."""
return list(self.data.values())
def items(self):
"""Returns items as overloaded lists of tuples."""
return list(self.data.items())
def eval(self, expr, vars={}):
"""Evaluate an expression using self as the namespace (())."""
return eval(expr, self.data, vars)
def flip(self):
"""Convert dictionary of lists to list of dictionaries."""
return list(self.keys(), *~self.values())
def set(self, key, val=1):
"""Assignment as method. Returns self."""
self[key] = val
return self
def unset(self, key):
"""Deletion as method. Returns self."""
del self[key]
return self
def put(self, key, val):
"""Assignment as method. Returns the assigned value."""
self[key] = val
return val
def __and__(self, other):
"""Intersection (&)."""
result = dict()
for key in self.keys():
if other.has_key(key):
result[key] = self[key]
return result
def __or__(self, other):
"""Union (|)."""
result = dict(self.data)
result.update(other)
return result
def __add__(self, other):
"""
Merge (+).
The merge operation is similar to a union except that data is
never overwritten. If three dictionaries with the same set of
keys are merged, the resulting dictionary's values will be
three-element lists.
If you want destructive behavior, use the union (|) operator
instead, since it pays no consideration to duplicate keys.
"""
result = dict(self.data)
for key in other.keys():
if result.has_key(key):
if hasattr(result[key], 'append'):
result[key].append(other[key])
else:
result[key] = list([result[key], other[key]])
else:
result[key] = other[key]
return result
def __sub__(self, other):
"""Difference (-)."""
result = dict()
for key in self.keys():
if not other.has_key(key):
result[key] = self[key]
return result
def __mul__(self, func_or_n):
"""Map/repeat (*)."""
result = dict()
if callable(func_or_n):
for key in self.keys():
result[key] = func_or_n(key, self[key])
else:
for key in self.keys():
result[key] = list([self[key]]) * func_or_n
return result
def __div__(self, func_or_keys):
"""Filter/extract (/)."""
result = dict()
if callable(func_or_keys):
for key in self.keys():
if func_or_keys(key, self[key]):
result[key] = self[key]
else:
for key in func_or_keys:
result[key] = self[key]
return result
def __pow__(self, other):
"""Compose (**)."""
result = dict()
for key in self.keys():
result[key] = other[self[key]]
return result
def __invert__(self):
"""Transpose (~)."""
result = dict()
for key in self.keys():
result[self[key]] = key
return result
def __contains__(self, other):
"""Contains key (in)."""
return self.has_key(other)
def __getattr__(self, key):
"""Get field or attribute (.)."""
if key == '__methods__':
return UserDict.__dict__.keys()
if key == '__members__':
return self.keys()
if self.__dict__.has_key(key) or self.data.has_key(key):
return self[key]
raise AttributeError, key
def __str__(self):
"""Built-in pretty-printer."""
return pformat(self.data)
# Ordered Dictionary Class
# ------------------------
class odict(dict):
def __init__(self, *args, **kwds):
self._keys = {}
dict.__init__(self, *args, **kwds)
def __delitem__(self, key):
dict.__delitem__(self, key)
del self._keys[key]
def __setitem__(self, key, item):
dict.__setitem__(self, key, item)
if not self._keys.has_key(key):
self._keys[key] = max([0] + self._keys.values()) + 1
def clear(self):
dict.clear(self)
self._keys = {}
def copy(self):
result = odict(self)
result._keys = self._keys.copy()
return result
def keys(self):
result = [(y, x) for x, y in self._keys.items()]
result.sort()
return list([x[1] for x in result])
def values(self):
return list(map(self.get, self.keys()))
def items(self):
return list(zip(self.keys(), self.values()))
def popitem(self):
try:
keys = [(y, x) for x, y in self._keys.items()]
keys.sort()
keys.reverse()
key = keys[0][1]
except IndexError:
raise KeyError('dictionary is empty')
val = self[key]
del self[key]
return (key, val)
def setdefault(self, key, failobj=None):
dict.setdefault(self, key, failobj)
if not self._keys.has_key(key):
self._keys[key] = max([0] + self._keys.values()) + 1
def update(self, other):
dict.update(self, other)
for key in other.keys():
if not self._keys.has_key(key):
self._keys[key] = max([0] + self._keys.values()) + 1
# Custom Dictionary Constructors
# ------------------------------
# Dictionary set constructor. Elements must be hashable.
# Example: set('a', 'b', 'c') -> {'a': 1, 'b': 1, 'c': 1}
set = lambda *x, **y: dict(x, [1] * len(x), **y)
# Ordered dictionary set constructor. Elements must be hashable.
oset = lambda *x, **y: odict(x, [1] * len(x), **y)
# Dictionary histogram constructor. Elements must be hashable.
# Example: histo(['a', 'b', 'b', 'a', 'b', 'c']) -> {'a': 2, 'b': 3, 'c': 1}
histo = lambda x, **y: list(x).reduce(_histo, dict(**y))
_histo = lambda x, y: x.set(y, x.get(y, 0) + 1)
# Comparators
# -----------
# Case-insensitive, reversed, and reversed case-insensitive comparators.
cmpi = lambda x, y: cmp(x.lower(), y.lower())
revcmp = lambda x, y: cmp(y, x)
revcmpi = lambda x, y: cmp(y.lower(), x.lower())
def reorder(key, order):
"""
Returns a comparator that reorders a row set (list of dictionaries).
The order is specified as a key (column) and a list of ordered values.
"""
return lambda x, y, k=key, o=dict(order, range(len(order))): \
cmp(o.get(x[k]), o.get(y[k]))
# Helper Functions
# ----------------
def curry(*args, **create_time_kwds):
"""
Bind arguments to a function.
Author: Alex Martelli
Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52549
"""
func = args[0]
create_time_args = args[1:]
def curried_function(*call_time_args, **call_time_kwds):
args = create_time_args + call_time_args
kwds = create_time_kwds.copy()
kwds.update(call_time_kwds)
return func(*args, **kwds)
return curried_function
|
|
# Copyright 2013-present Barefoot Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import p4
from collections import defaultdict
def _get_extracted_headers(parse_state):
extracted = set()
return extracted
# def _get_hdr_name(hdr):
# if hdr.virtual:
# return hdr.base_name
# elif hdr.index is not None:
# return hdr.base_name
# else:
# return hdr.name
def _find_parser_paths(hlir):
# Helps reduce the running time of this function by caching visited
# states. I claim that new header sets cannot be discovered if we visit the
# same parse state, with the same set of previously visited parser states
# and the same tag stacks indices.
class State:
def __init__(self, parse_state, current_path, tag_stacks_index):
self.current_state = parse_state
self.visited_states = frozenset(current_path)
self.stacks = frozenset(tag_stacks_index.items())
def __eq__(self, other):
return (self.current_state == other.current_state)\
and (self.visited_states == other.visited_states)\
and (self.stacks == other.stacks)
def __hash__(self):
return hash((self.current_state, self.visited_states, self.stacks))
def _find_paths(state, paths, current_path, path_hdrs, tag_stacks_index,
recursion_states):
rec_state = State(state, current_path, tag_stacks_index)
if rec_state in recursion_states:
return
recursion_states.add(rec_state)
try:
next_states = set(state.branch_to.values())
except:
paths.add(frozenset(path_hdrs))
return
extracted_headers = set()
for call in state.call_sequence:
if call[0] == p4.parse_call.extract:
hdr = call[1]
if hdr.virtual:
base_name = hdr.base_name
current_index = tag_stacks_index[base_name]
if current_index > hdr.max_index:
paths.add(frozenset(path_hdrs))
return
tag_stacks_index[base_name] += 1
name = base_name + "[%d]" % current_index
hdr = hlir.p4_header_instances[name]
extracted_headers.add(hdr)
if len(extracted_headers & path_hdrs) != 0:
paths.add(frozenset(extracted_headers | path_hdrs))
return
for next_state in next_states:
_find_paths(next_state, paths, current_path + [state],
extracted_headers | path_hdrs, tag_stacks_index.copy(),
recursion_states)
paths = set()
start_state = hlir.p4_parse_states["start"]
_find_paths(start_state, paths, [], set(), defaultdict(int), set())
return paths
def _find_compatible_headers(hlir):
def _find_rec(state, current_path, path_hdrs, compatibles):
if state in current_path: return
try:
next_states = set(state.branch_to.values())
except:
return
extracted_headers = _get_extracted_headers(state)
for hdr1, hdr2 in itertools.product(path_hdrs, extracted_headers):
compatibles.add( (hdr1, hdr2) )
compatibles.add( (hdr2, hdr1) )
for next_state in next_states:
_find_rec(next_state, current_path + [state],
path_hdrs | extracted_headers, compatibles)
compatibles = set()
start_state = hlir.p4_parse_states["start"]
_find_rec(start_state, [], set(), compatibles)
return compatibles
def _get_headers_in_condition(p4_expression, hdrs):
try:
if p4_expression.op == "valid" and not p4_expression.right.metadata:
hdrs.add(p4_expression.right)
_get_headers_in_condition(p4_expression.left, hdrs)
_get_headers_in_condition(p4_expression.right, hdrs)
except AttributeError:
return
class Solver():
TRUE = 0
FALSE = 1
DONT_KNOW = 2
def __init__(self, hlir):
self.hlir = hlir
# self.compatible_headers = _find_compatible_headers(hlir)
self.paths = _find_parser_paths(hlir)
self.compatible_headers = {}
self.implied_headers = {}
all_headers = set()
for _, hdr in hlir.p4_header_instances.items():
if hdr.metadata or hdr.virtual: continue
all_headers.add(hdr)
for _, hdr in hlir.p4_header_instances.items():
if hdr.metadata or hdr.virtual: continue
self.compatible_headers[hdr] = set()
self.implied_headers[hdr] = all_headers.copy()
for path in self.paths:
for hdr in path:
self.compatible_headers[hdr] |= path
self.implied_headers[hdr] &= path
# print "COMPATIBLE_HEADERS"
# for hdr, s in self.compatible_headers.items():
# print hdr, ":", [str(h) for h in s]
# print "IMPLIED_HEADERS"
# for hdr, s in self.implied_headers.items():
# print hdr, ":", [str(h) for h in s]
def _check_header_values_coherent(self, hdrs_valid):
for hdr1, hdr2 in itertools.product(hdrs_valid, repeat = 2):
if hdr2 not in self.compatible_headers[hdr1] and\
hdrs_valid[hdr1] and hdrs_valid[hdr2]:
return False
if hdr1 in self.implied_headers[hdr2] and\
hdrs_valid[hdr2] and not hdrs_valid[hdr1]:
return False
if hdr2 in self.implied_headers[hdr1] and\
hdrs_valid[hdr1] and not hdrs_valid[hdr2]:
return False
return True
def _check_condition(self, c, hdrs_valid):
if not c: return Solver.TRUE
if type(c) is bool:
if c:
return Solver.TRUE
else:
return Solver.FALSE
assert(type(c) is p4.p4_expression)
if c.op == "valid":
if c.right.metadata: # a metadata header is always valid in P4
return Solver.TRUE
if hdrs_valid[c.right]:
return Solver.TRUE
else:
return Solver.FALSE
elif c.op == "and":
left = self._check_condition(c.left, hdrs_valid)
right = self._check_condition(c.right, hdrs_valid)
if left == Solver.TRUE and right == Solver.TRUE: return Solver.TRUE
if left == Solver.FALSE or right == Solver.FALSE: return Solver.FALSE
return Solver.DONT_KNOW
elif c.op == "or":
left = self._check_condition(c.left, hdrs_valid)
right = self._check_condition(c.right, hdrs_valid)
if left == Solver.TRUE or right == Solver.TRUE: return Solver.TRUE
if left == Solver.FALSE and right == Solver.FALSE: return Solver.FALSE
return Solver.DONT_KNOW
elif c.op == "not":
right = self._check_condition(c.right, hdrs_valid)
if right == Solver.TRUE: return Solver.FALSE
if right == Solver.FALSE: return Solver.TRUE
return Solver.DONT_KNOW
return Solver.DONT_KNOW
# unknonw_cond is a condition (p4_expression) we want to evaluate
# known_conds is a list of 2-tuples (condition, value), where condition is a
# p4_expression and value the boolean value of condition
def evaluate_condition(self, dangerous_hdrs,
unknown_cond, known_conds):
used_hdrs = set()
_get_headers_in_condition(unknown_cond, used_hdrs)
if known_conds:
for c in zip(*known_conds)[0]:
_get_headers_in_condition(c, used_hdrs)
if (used_hdrs & dangerous_hdrs): return None
used_hdrs_ordered = list(used_hdrs)
used_hdrs_valid = {}
num_used_hdrs = len(used_hdrs)
result = None
for values in itertools.product([True, False], repeat = num_used_hdrs):
for idx, hdr in enumerate(used_hdrs_ordered):
used_hdrs_valid[hdr] = values[idx]
if not self._check_header_values_coherent(used_hdrs_valid): continue
violated = False
for known_c, value in known_conds:
check_c = self._check_condition(known_c, used_hdrs_valid)
if check_c == Solver.FALSE and value:
violated = True
break
elif check_c == Solver.TRUE and not value:
violated = True
break
elif check_c == Solver.DONT_KNOW:
pass
if violated:
continue
unknown_value = self._check_condition(unknown_cond, used_hdrs_valid)
if unknown_value == Solver.DONT_KNOW: return None
if result is None:
result = unknown_value
elif result != unknown_value:
return None
if result == Solver.TRUE:
return True
elif result == Solver.FALSE:
return False
return result
|
|
from sqlite3 import connect
from hashlib import sha1
from os import urandom
from random import randrange
import datetime
import calendar
from calendar import monthrange
f = "data/roomres.db"
'''
---------------------------------------
Find list of values functions
----------------------------------------
'''
def cursorToList(c):
items = c.fetchall()
books = []
for item in items:
book = []
for info in item:
book.append(str(info))
books.append(book)
return books
def find():
checkCreateTable()
db = connect(f)
c = db.cursor()
c.execute("SELECT * from rooms")
a = c
l = cursorToList(a)
db.close()
return l
def findP(field,value):
checkCreateTable()
db = connect(f)
c = db.cursor()
if type(value) is int:
c.execute("SELECT * from rooms WHERE %s = %d" % (field,value))
elif type(value) is str or type(value) is unicode:
c.execute("SELECT * from rooms WHERE %s = \"%s\"" % (field,value))
else:
print "error: "
print type(value)
c.execute("SELECT * from rooms")
a = c
l = cursorToList(a)
db.close()
return l
def findBooked():
checkCreateTable()
db = connect(f)
c = db.cursor()
c.execute("SELECT * from rooms WHERE club!=\"\"")
a = c
l = cursorToList(a)
db.close()
return l
def findUnbooked(field,value):
checkCreateTable()
db = connect(f)
c = db.cursor()
c.execute("SELECT * from rooms WHERE date = \"%s\" AND club=\"\"" % (value))
a = c
l = cursorToList(a)
db.close()
return l
'''
------------------------------
UPDATE DB FUNCTIONS
------------------------------
'''
def checkCreateTable():
db = connect(f)
c = db.cursor()
try:
c.execute("SELECT * FROM rooms")
except:
c.execute("CREATE TABLE rooms (club TEXT, email TEXT, room INT, date TEXT, weekday TEXT, time TEXT)");
db.commit()
db.close()
def booked(date, room):
db = connect(f)
c = db.cursor()
query = "SELECT date, room FROM rooms WHERE date=? AND room=? AND club!=\"\""
info = c.execute(query, (date, room))
value = False
for record in info:
value = True
db.commit()
db.close()
return value
def addBook(email, date, room):
print "HEYYYY"
print email,date,room
db = connect(f)
c = db.cursor()
checkCreateTable()
msg = "Sorry, " + str(room) + " is booked on " + date
if not booked(date, room):
try:
query = ("SELECT * from users WHERE email=?")
c.execute(query,(email,))
club = c.fetchall()[0][5]
except:
club = "N/A"
now = datetime.datetime.now()
time = now.strftime("%H:%M")
#weekday = datetime.datetime.strptime(date, '%Y-%m-%d').strftime('%A')
#print date
#print time
'''
print "club:"
print club
print "email:"
print email
'''
query = ("UPDATE rooms SET club=? , email=?, time=? WHERE room=? and date=?")
c.execute(query, (club, email, time,room,date))
msg = (club + " has now booked " + str(room) + " on " + date)
db.commit()
db.close()
return msg
def getFirstWeekdayDate(month, weekday):
dayDict = {}
dayDict["Monday"] = 0
dayDict["Tuesday"] = 1
dayDict["Wednesday"] = 2
dayDict["Thursday"] = 3
dayDict["Friday"] = 4
dayDict["Saturday"] = 5
dayDict["Sunday"] = 6
cal = calendar.Calendar(0)
mon = cal.monthdatescalendar(datetime.datetime.now().year,int(month))
firstweek = mon[0]
day = firstweek[dayDict[weekday]]
if day.month != month:
day = mon[1][dayDict[weekday]]
return day.day
def adminAddRoom(room, month, day):
if room == None or room == "":
return "One or more fields was not filled"
if int(room) < 101 or int(room) > 1030:
return "Room does not exist!!!"
month = int(month)
db = connect(f)
c = db.cursor()
checkCreateTable()
now = datetime.datetime.now()
time = now.strftime("%H:%M")
club = ""
mDays = calendar.monthrange(2017,int(month))[1]
fDay = getFirstWeekdayDate(month, day)
monthStr = str(month)
if month < 10:
monthStr = "0" + monthStr
print fDay, mDays
while fDay <= mDays:
fDayStr = str(fDay)
if (fDay < 10):
fDayStr = "0" + fDayStr
date = str(now.year) + "-" + str(monthStr) + "-" + str(fDayStr)
query = ("INSERT INTO rooms VALUES (?, ?, ?, ?, ?, ?)")
c.execute(query, (club, "admin", room, date, day, time))
fDay+=7
db.commit()
db.close()
return "Put room for booking up"
def removeBook(room, date):
db = connect(f)
c = db.cursor()
checkCreateTable()
msg = str(room) + " is actually now booked on " + date
if booked(date, room):
query = "UPDATE rooms SET club=\"\", email=\"admin\" WHERE date=? and room=?"
c.execute(query, (date, room))
msg = str(room) + " is now available on " + date
db.commit()
db.close()
return msg
def changeBook(date,room,newr,club):
if newr == "" or newr == None:
return "new room empty"
db = connect(f)
c = db.cursor()
checkCreateTable()
query = "UPDATE rooms SET room=? WHERE date=? and room=? and club=?"
c.execute(query, (newr,date,room,club))
db.commit()
db.close()
return "change success"
#change date to day and month
def adminRemoveRoom(room, month, day):
if room == None or room == "":
return "One or more fields was not filled"
if int(room) < 101 or int(room) > 1030:
return "Room does not exist!!!"
month = int(month)
db = connect(f)
c = db.cursor()
checkCreateTable()
now = datetime.datetime.now()
time = now.strftime("%H:%M")
mDays = calendar.monthrange(2017,int(month))[1]
fDay = getFirstWeekdayDate(month, day)
monthStr = str(month)
if month < 10:
monthStr = "0" + monthStr
while fDay <= mDays:
fDayStr = str(fDay)
if (fDay < 10):
fDayStr = "0" + fDayStr
date = str(now.year) + "-" + str(monthStr) + "-" + str(fDayStr)
query = "DELETE from rooms where date=? and room=?"
c.execute(query,(date,room))
fDay+=7
db.commit()
db.close()
return "Remove room for booking"
if __name__=="__main__":
print "check"
#addBook("test@example.com", "2016-01-29", 235)
|
|
# Copyright 2014 Yajie Miao Carnegie Mellon University
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
# WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
# See the Apache 2 License for the specific language governing permissions and
# limitations under the License.
import cPickle
from io_func import smart_open
class LearningRate(object):
def __init__(self):
'''constructor'''
def get_rate(self):
pass
def get_next_rate(self, current_error):
pass
class LearningRateConstant(LearningRate):
def __init__(self, learning_rate = 0.08, epoch_num = 20):
self.learning_rate = learning_rate
self.epoch = 1
self.epoch_num = epoch_num
self.rate = learning_rate
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
if ( self.epoch >= self.epoch_num):
self.rate = 0.0
else:
self.rate = self.learning_rate
self.epoch += 1
return self.rate
class LearningRateExpDecay(LearningRate):
def __init__(self, start_rate = 0.08, scale_by = 0.5,
min_derror_decay_start = 0.05, min_derror_stop = 0.05, init_error = 100,
decay=False, min_epoch_decay_start=15, zero_rate = 0.0):
self.start_rate = start_rate
self.init_error = init_error
self.rate = start_rate
self.scale_by = scale_by
self.min_derror_decay_start = min_derror_decay_start
self.min_derror_stop = min_derror_stop
self.lowest_error = init_error
self.epoch = 1
self.decay = decay
self.zero_rate = zero_rate
self.min_epoch_decay_start = min_epoch_decay_start
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
diff_error = 0.0
diff_error = self.lowest_error - current_error
if (current_error < self.lowest_error):
self.lowest_error = current_error
if (self.decay):
if (diff_error < self.min_derror_stop):
self.rate = 0.0
else:
self.rate *= self.scale_by
else:
if ((diff_error < self.min_derror_decay_start) and (self.epoch > self.min_epoch_decay_start)):
self.decay = True
self.rate *= self.scale_by
self.epoch += 1
return self.rate
class LearningMinLrate(LearningRate):
def __init__(self, start_rate = 0.08, scale_by = 0.5,
min_derror_decay_start = 0.05,
min_lrate_stop = 0.0002, init_error = 100,
decay=False, min_epoch_decay_start=15):
self.start_rate = start_rate
self.init_error = init_error
self.rate = start_rate
self.scale_by = scale_by
self.min_lrate_stop = min_lrate_stop
self.lowest_error = init_error
self.min_derror_decay_start = min_derror_decay_start
self.epoch = 1
self.decay = decay
self.min_epoch_decay_start = min_epoch_decay_start
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
diff_error = 0.0
diff_error = self.lowest_error - current_error
if (current_error < self.lowest_error):
self.lowest_error = current_error
if (self.decay):
if (self.rate < self.min_lrate_stop):
self.rate = 0.0
else:
self.rate *= self.scale_by
else:
if (diff_error < self.min_derror_decay_start) and (self.epoch >= self.min_epoch_decay_start):
self.decay = True
self.rate *= self.scale_by
self.epoch += 1
return self.rate
class LearningFixedLrate(LearningRate):
def __init__(self, start_rate = 0.08, scale_by = 0.5,
decay_start_epoch = 10, init_error = 100,
decay=False, stop_after_deday_epoch=6):
self.start_rate = start_rate
self.init_error = init_error
self.rate = start_rate
self.scale_by = scale_by
self.decay_start_epoch = decay_start_epoch
self.stop_after_deday_epoch = stop_after_deday_epoch
self.lowest_error = init_error
self.epoch = 1
self.decay = decay
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
diff_error = 0.0
diff_error = self.lowest_error - current_error
if (current_error < self.lowest_error):
self.lowest_error = current_error
if (self.decay):
if (self.epoch >= self.decay_start_epoch + self.stop_after_deday_epoch):
self.rate = 0.0
else:
self.rate *= self.scale_by
else:
if (self.epoch >= self.decay_start_epoch):
self.decay = True
self.rate *= self.scale_by
self.epoch += 1
return self.rate
# save and load the learning rate class
def _lrate2file(lrate, filename='file.out'):
with smart_open(filename, "wb") as output:
cPickle.dump(lrate, output, cPickle.HIGHEST_PROTOCOL)
def _file2lrate(filename='file.in'):
return cPickle.load(smart_open(filename,'rb'))
# functions to save and resume the learning rate
# the following 4 fields are written into <lrate_file>, each field per line
# lrate.epoch: the current epoch
# lrate.rate: the current learning rate
# lrate.lowest_error: the current lowest learning rate
# lrate.decay: whether decay has started
def save_lrate(lrate, lrate_file):
file_open = smart_open(lrate_file, 'w') # always overwrite
file_open.write(str(lrate.epoch) + '\n')
file_open.write(str(lrate.rate) + '\n')
file_open.write(str(lrate.lowest_error) + '\n')
file_open.write(str(int(lrate.decay)) + '\n')
file_open.close()
def resume_lrate(lrate, lrate_file):
file_open = smart_open(lrate_file, 'r')
line = file_open.readline().replace('\n','')
lrate.epoch = int(line)
line = file_open.readline().replace('\n','')
lrate.rate = float(line)
line = file_open.readline().replace('\n','')
lrate.lowest_error = float(line)
line = file_open.readline().replace('\n','')
lrate.decay = bool(int(line))
file_open.close()
|
|
# Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import debtcollector
from eventlet import greenthread
from neutron_lib.api import converters
from neutron_lib import constants
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_serialization import jsonutils
from oslo_utils import importutils
from oslo_utils import timeutils
import six
from sqlalchemy.orm import exc
from sqlalchemy import sql
from neutron._i18n import _, _LE, _LI, _LW
from neutron.agent.common import utils
from neutron.api.rpc.callbacks import version_manager
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import _deprecate
from neutron.common import constants as n_const
from neutron import context
from neutron.db import api as db_api
from neutron.db.models import agent as agent_model
from neutron.extensions import agent as ext_agent
from neutron.extensions import availability_zone as az_ext
from neutron import manager
LOG = logging.getLogger(__name__)
AGENT_OPTS = [
cfg.StrOpt('dhcp_load_type', default='networks',
choices=['networks', 'subnets', 'ports'],
help=_('Representing the resource type whose load is being '
'reported by the agent. This can be "networks", '
'"subnets" or "ports". '
'When specified (Default is networks), the server will '
'extract particular load sent as part of its agent '
'configuration object from the agent report state, '
'which is the number of resources being consumed, at '
'every report_interval.'
'dhcp_load_type can be used in combination with '
'network_scheduler_driver = '
'neutron.scheduler.dhcp_agent_scheduler.WeightScheduler '
'When the network_scheduler_driver is WeightScheduler, '
'dhcp_load_type can be configured to represent the '
'choice for the resource being balanced. '
'Example: dhcp_load_type=networks')),
cfg.BoolOpt('enable_new_agents', default=True,
help=_("Agent starts with admin_state_up=False when "
"enable_new_agents=False. In the case, user's "
"resources will not be scheduled automatically to the "
"agent until admin changes admin_state_up to True.")),
]
cfg.CONF.register_opts(AGENT_OPTS)
# this is the ratio from agent_down_time to the time we use to consider
# the agents down for considering their resource versions in the
# version_manager callback
DOWNTIME_VERSIONS_RATIO = 2
_deprecate._moved_global('Agent', new_module=agent_model)
class AgentAvailabilityZoneMixin(az_ext.AvailabilityZonePluginBase):
"""Mixin class to add availability_zone extension to AgentDbMixin."""
def _list_availability_zones(self, context, filters=None):
result = {}
query = self._get_collection_query(context, agent_model.Agent,
filters=filters)
columns = (agent_model.Agent.admin_state_up,
agent_model.Agent.availability_zone,
agent_model.Agent.agent_type)
for agent in query.with_entities(*columns).group_by(*columns):
if not agent.availability_zone:
continue
if agent.agent_type == constants.AGENT_TYPE_DHCP:
resource = 'network'
elif agent.agent_type == constants.AGENT_TYPE_L3:
resource = 'router'
else:
continue
key = (agent.availability_zone, resource)
result[key] = agent.admin_state_up or result.get(key, False)
return result
@db_api.retry_if_session_inactive()
def get_availability_zones(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
"""Return a list of availability zones."""
# NOTE(hichihara): 'tenant_id' is dummy for policy check.
# it is not visible via API.
return [{'state': 'available' if v else 'unavailable',
'name': k[0], 'resource': k[1],
'tenant_id': context.tenant_id}
for k, v in six.iteritems(self._list_availability_zones(
context, filters))]
@db_api.retry_if_session_inactive()
def validate_availability_zones(self, context, resource_type,
availability_zones):
"""Verify that the availability zones exist."""
if not availability_zones:
return
if resource_type == 'network':
agent_type = constants.AGENT_TYPE_DHCP
elif resource_type == 'router':
agent_type = constants.AGENT_TYPE_L3
else:
return
query = context.session.query(
agent_model.Agent.availability_zone).filter_by(
agent_type=agent_type).group_by(
agent_model.Agent.availability_zone)
query = query.filter(
agent_model.Agent.availability_zone.in_(availability_zones))
azs = [item[0] for item in query]
diff = set(availability_zones) - set(azs)
if diff:
raise az_ext.AvailabilityZoneNotFound(availability_zone=diff.pop())
class AgentDbMixin(ext_agent.AgentPluginBase, AgentAvailabilityZoneMixin):
"""Mixin class to add agent extension to db_base_plugin_v2."""
def _get_agent(self, context, id):
try:
agent = self._get_by_id(context, agent_model.Agent, id)
except exc.NoResultFound:
raise ext_agent.AgentNotFound(id=id)
return agent
@db_api.retry_if_session_inactive()
def get_enabled_agent_on_host(self, context, agent_type, host):
"""Return agent of agent_type for the specified host."""
query = context.session.query(agent_model.Agent)
query = query.filter(agent_model.Agent.agent_type == agent_type,
agent_model.Agent.host == host,
agent_model.Agent.admin_state_up == sql.true())
try:
agent = query.one()
except exc.NoResultFound:
LOG.debug('No enabled %(agent_type)s agent on host '
'%(host)s', {'agent_type': agent_type, 'host': host})
return
if utils.is_agent_down(agent.heartbeat_timestamp):
LOG.warning(_LW('%(agent_type)s agent %(agent_id)s is not active'),
{'agent_type': agent_type, 'agent_id': agent.id})
return agent
@debtcollector.removals.remove(
message="This will be removed in the future. "
"Please use 'neutron.agent.common.utils.is_agent_down' "
"instead.",
version='ocata'
)
@staticmethod
def is_agent_down(heart_beat_time):
return utils.is_agent_down(heart_beat_time)
@staticmethod
def is_agent_considered_for_versions(agent_dict):
return not timeutils.is_older_than(agent_dict['heartbeat_timestamp'],
cfg.CONF.agent_down_time *
DOWNTIME_VERSIONS_RATIO)
def get_configuration_dict(self, agent_db):
return self._get_dict(agent_db, 'configurations')
def _get_dict(self, agent_db, dict_name, ignore_missing=False):
json_value = None
try:
json_value = getattr(agent_db, dict_name)
conf = jsonutils.loads(json_value)
except Exception:
if json_value or not ignore_missing:
msg = _LW('Dictionary %(dict_name)s for agent %(agent_type)s '
'on host %(host)s is invalid.')
LOG.warning(msg, {'dict_name': dict_name,
'agent_type': agent_db.agent_type,
'host': agent_db.host})
conf = {}
return conf
def _get_agent_load(self, agent):
configs = agent.get('configurations', {})
load_type = None
load = 0
if(agent['agent_type'] == constants.AGENT_TYPE_DHCP):
load_type = cfg.CONF.dhcp_load_type
if load_type:
load = int(configs.get(load_type, 0))
return load
def _make_agent_dict(self, agent, fields=None):
attr = ext_agent.RESOURCE_ATTRIBUTE_MAP.get(
ext_agent.RESOURCE_NAME + 's')
res = dict((k, agent[k]) for k in attr
if k not in ['alive', 'configurations'])
res['alive'] = not utils.is_agent_down(
res['heartbeat_timestamp']
)
res['configurations'] = self._get_dict(agent, 'configurations')
res['resource_versions'] = self._get_dict(agent, 'resource_versions',
ignore_missing=True)
res['availability_zone'] = agent['availability_zone']
return self._fields(res, fields)
@db_api.retry_if_session_inactive()
def delete_agent(self, context, id):
agent = self._get_agent(context, id)
registry.notify(resources.AGENT, events.BEFORE_DELETE, self,
context=context, agent=agent)
with context.session.begin(subtransactions=True):
context.session.delete(agent)
@db_api.retry_if_session_inactive()
def update_agent(self, context, id, agent):
agent_data = agent['agent']
with context.session.begin(subtransactions=True):
agent = self._get_agent(context, id)
agent.update(agent_data)
return self._make_agent_dict(agent)
@db_api.retry_if_session_inactive()
def get_agents_db(self, context, filters=None):
query = self._get_collection_query(context,
agent_model.Agent,
filters=filters)
return query.all()
@db_api.retry_if_session_inactive()
def get_agents(self, context, filters=None, fields=None):
agents = self._get_collection(context, agent_model.Agent,
self._make_agent_dict,
filters=filters, fields=fields)
alive = filters and filters.get('alive', None)
if alive:
alive = converters.convert_to_boolean(alive[0])
agents = [agent for agent in agents if agent['alive'] == alive]
return agents
@db_api.retry_db_errors
def agent_health_check(self):
"""Scan agents and log if some are considered dead."""
agents = self.get_agents(context.get_admin_context(),
filters={'admin_state_up': [True]})
dead_agents = [agent for agent in agents if not agent['alive']]
if dead_agents:
data = '%20s %20s %s\n' % ('Type', 'Last heartbeat', "host")
data += '\n'.join(['%20s %20s %s' %
(agent['agent_type'],
agent['heartbeat_timestamp'],
agent['host']) for agent in dead_agents])
LOG.warning(_LW("Agent healthcheck: found %(count)s dead agents "
"out of %(total)s:\n%(data)s"),
{'count': len(dead_agents),
'total': len(agents),
'data': data})
else:
LOG.debug("Agent healthcheck: found %s active agents",
len(agents))
def _get_agent_by_type_and_host(self, context, agent_type, host):
query = self._model_query(context, agent_model.Agent)
try:
agent_db = query.filter(agent_model.Agent.agent_type == agent_type,
agent_model.Agent.host == host).one()
return agent_db
except exc.NoResultFound:
raise ext_agent.AgentNotFoundByTypeHost(agent_type=agent_type,
host=host)
except exc.MultipleResultsFound:
raise ext_agent.MultipleAgentFoundByTypeHost(agent_type=agent_type,
host=host)
@db_api.retry_if_session_inactive()
def get_agent(self, context, id, fields=None):
agent = self._get_agent(context, id)
return self._make_agent_dict(agent, fields)
@db_api.retry_if_session_inactive()
def filter_hosts_with_network_access(
self, context, network_id, candidate_hosts):
"""Filter hosts with access to network_id.
This method returns a subset of candidate_hosts with the ones with
network access to network_id.
A plugin can overload this method to define its own host network_id
based filter.
"""
return candidate_hosts
def _log_heartbeat(self, state, agent_db, agent_conf):
if agent_conf.get('log_agent_heartbeats'):
delta = timeutils.utcnow() - agent_db.heartbeat_timestamp
LOG.info(_LI("Heartbeat received from %(type)s agent on "
"host %(host)s, uuid %(uuid)s after %(delta)s"),
{'type': agent_db.agent_type,
'host': agent_db.host,
'uuid': state.get('uuid'),
'delta': delta})
@db_api.retry_if_session_inactive()
def create_or_update_agent(self, context, agent_state):
"""Registers new agent in the database or updates existing.
Returns tuple of agent status and state.
Status is from server point of view: alive, new or revived.
It could be used by agent to do some sync with the server if needed.
"""
status = n_const.AGENT_ALIVE
with context.session.begin(subtransactions=True):
res_keys = ['agent_type', 'binary', 'host', 'topic']
res = dict((k, agent_state[k]) for k in res_keys)
if 'availability_zone' in agent_state:
res['availability_zone'] = agent_state['availability_zone']
configurations_dict = agent_state.get('configurations', {})
res['configurations'] = jsonutils.dumps(configurations_dict)
resource_versions_dict = agent_state.get('resource_versions')
if resource_versions_dict:
res['resource_versions'] = jsonutils.dumps(
resource_versions_dict)
res['load'] = self._get_agent_load(agent_state)
current_time = timeutils.utcnow()
try:
agent_db = self._get_agent_by_type_and_host(
context, agent_state['agent_type'], agent_state['host'])
if not agent_db.is_active:
status = n_const.AGENT_REVIVED
if 'resource_versions' not in agent_state:
# updating agent_state with resource_versions taken
# from db so that
# _update_local_agent_resource_versions() will call
# version_manager and bring it up to date
agent_state['resource_versions'] = self._get_dict(
agent_db, 'resource_versions', ignore_missing=True)
res['heartbeat_timestamp'] = current_time
if agent_state.get('start_flag'):
res['started_at'] = current_time
greenthread.sleep(0)
self._log_heartbeat(agent_state, agent_db, configurations_dict)
agent_db.update(res)
event_type = events.AFTER_UPDATE
except ext_agent.AgentNotFoundByTypeHost:
greenthread.sleep(0)
res['created_at'] = current_time
res['started_at'] = current_time
res['heartbeat_timestamp'] = current_time
res['admin_state_up'] = cfg.CONF.enable_new_agents
agent_db = agent_model.Agent(**res)
greenthread.sleep(0)
context.session.add(agent_db)
event_type = events.AFTER_CREATE
self._log_heartbeat(agent_state, agent_db, configurations_dict)
status = n_const.AGENT_NEW
greenthread.sleep(0)
registry.notify(resources.AGENT, event_type, self, context=context,
host=agent_state['host'], plugin=self,
agent=agent_state)
return status, agent_state
def _get_agents_considered_for_versions(self):
up_agents = self.get_agents(context.get_admin_context(),
filters={'admin_state_up': [True]})
return filter(self.is_agent_considered_for_versions, up_agents)
def get_agents_resource_versions(self, tracker):
"""Get the known agent resource versions and update the tracker.
This function looks up into the database and updates every agent
resource versions.
This method is called from version_manager when the cached information
has passed TTL.
:param tracker: receives a version_manager.ResourceConsumerTracker
"""
for agent in self._get_agents_considered_for_versions():
resource_versions = agent.get('resource_versions', {})
consumer = version_manager.AgentConsumer(
agent_type=agent['agent_type'], host=agent['host'])
LOG.debug("Update consumer %(consumer)s versions to: "
"%(versions)s", {'consumer': consumer,
'versions': resource_versions})
tracker.set_versions(consumer, resource_versions)
class AgentExtRpcCallback(object):
"""Processes the rpc report in plugin implementations.
This class implements the server side of an rpc interface. The client side
can be found in neutron.agent.rpc.PluginReportStateAPI. For more
information on changing rpc interfaces, see doc/source/devref/rpc_api.rst.
API version history:
1.0 - Initial version.
1.1 - report_state now returns agent state.
"""
target = oslo_messaging.Target(version='1.1',
namespace=n_const.RPC_NAMESPACE_STATE)
START_TIME = timeutils.utcnow()
def __init__(self, plugin=None):
super(AgentExtRpcCallback, self).__init__()
self.plugin = plugin
#TODO(ajo): fix the resources circular dependency issue by dynamically
# registering object types in the RPC callbacks api
resources_rpc = importutils.import_module(
'neutron.api.rpc.handlers.resources_rpc')
# Initialize RPC api directed to other neutron-servers
self.server_versions_rpc = resources_rpc.ResourcesPushToServersRpcApi()
@db_api.retry_if_session_inactive()
def report_state(self, context, **kwargs):
"""Report state from agent to server.
Returns - agent's status: AGENT_NEW, AGENT_REVIVED, AGENT_ALIVE
"""
time = kwargs['time']
time = timeutils.parse_strtime(time)
agent_state = kwargs['agent_state']['agent_state']
self._check_clock_sync_on_agent_start(agent_state, time)
if self.START_TIME > time:
time_agent = datetime.datetime.isoformat(time)
time_server = datetime.datetime.isoformat(self.START_TIME)
log_dict = {'agent_time': time_agent, 'server_time': time_server}
LOG.debug("Stale message received with timestamp: %(agent_time)s. "
"Skipping processing because it's older than the "
"server start timestamp: %(server_time)s", log_dict)
return
if not self.plugin:
self.plugin = manager.NeutronManager.get_plugin()
agent_status, agent_state = self.plugin.create_or_update_agent(
context, agent_state)
self._update_local_agent_resource_versions(context, agent_state)
return agent_status
def _update_local_agent_resource_versions(self, context, agent_state):
resource_versions_dict = agent_state.get('resource_versions')
if not resource_versions_dict:
return
version_manager.update_versions(
version_manager.AgentConsumer(agent_type=agent_state['agent_type'],
host=agent_state['host']),
resource_versions_dict)
# report other neutron-servers about this quickly
self.server_versions_rpc.report_agent_resource_versions(
context, agent_state['agent_type'], agent_state['host'],
resource_versions_dict)
def _check_clock_sync_on_agent_start(self, agent_state, agent_time):
"""Checks if the server and the agent times are in sync.
Method checks if the agent time is in sync with the server time
on start up. Ignores it, on subsequent re-connects.
"""
if agent_state.get('start_flag'):
time_server_now = timeutils.utcnow()
diff = abs(timeutils.delta_seconds(time_server_now, agent_time))
if diff > cfg.CONF.agent_down_time:
agent_name = agent_state['agent_type']
time_agent = datetime.datetime.isoformat(agent_time)
host = agent_state['host']
log_dict = {'host': host,
'agent_name': agent_name,
'agent_time': time_agent,
'threshold': cfg.CONF.agent_down_time,
'serv_time': (datetime.datetime.isoformat
(time_server_now)),
'diff': diff}
LOG.error(_LE("Message received from the host: %(host)s "
"during the registration of %(agent_name)s has "
"a timestamp: %(agent_time)s. This differs from "
"the current server timestamp: %(serv_time)s by "
"%(diff)s seconds, which is more than the "
"threshold agent down"
"time: %(threshold)s."), log_dict)
_deprecate._MovedGlobals()
|
|
#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
from datetime import datetime
import logging
import os
import shutil
import subprocess
import sys
# Generates the sky_sdk from the template at sky/sdk.
# This script has a split personality of both making our deployment sdk
# as well as being a required part of developing locally, since all
# of our framework assumes it's working from the SDK.
SKY_TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
SKY_DIR = os.path.dirname(SKY_TOOLS_DIR)
SRC_ROOT = os.path.dirname(SKY_DIR)
DEFAULT_REL_BUILD_DIR = os.path.join('out', 'android_Release')
def git_revision():
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
def gen_filter(path):
if os.path.isdir(path):
return True
_, ext = os.path.splitext(path)
# Don't include all .dart, just .mojom.dart.
return path.endswith('.mojom.dart')
def dart_filter(path):
if os.path.isdir(path):
return True
_, ext = os.path.splitext(path)
# .dart includes '.mojom.dart'
return ext == '.dart'
def ensure_dir_exists(path):
if not os.path.exists(path):
os.makedirs(path)
def copy(from_root, to_root, filter_func=None):
assert os.path.exists(from_root), "%s does not exist!" % from_root
if os.path.isfile(from_root):
ensure_dir_exists(os.path.dirname(to_root))
shutil.copy(from_root, to_root)
return
ensure_dir_exists(to_root)
for root, dirs, files in os.walk(from_root):
# filter_func expects paths not names, so wrap it to make them absolute.
wrapped_filter = None
if filter_func:
wrapped_filter = lambda name: filter_func(os.path.join(root, name))
for name in filter(wrapped_filter, files):
from_path = os.path.join(root, name)
root_rel_path = os.path.relpath(from_path, from_root)
to_path = os.path.join(to_root, root_rel_path)
to_dir = os.path.dirname(to_path)
if not os.path.exists(to_dir):
os.makedirs(to_dir)
shutil.copy(from_path, to_path)
dirs[:] = filter(wrapped_filter, dirs)
def link(from_root, to_root, filter_func=None):
ensure_dir_exists(os.path.dirname(to_root))
os.symlink(from_root, to_root)
def make_relative_symlink(source, link_name):
rel_source = os.path.relpath(source, os.path.dirname(link_name))
os.symlink(rel_source, link_name)
def confirm(prompt):
response = raw_input('%s [N]|y: ' % prompt)
return response and response.lower() == 'y'
def delete_all_non_hidden_files_in_directory(root, non_interactive=False):
to_delete = [os.path.join(root, p)
for p in os.listdir(root) if not p.startswith('.')]
if not to_delete:
return
if not non_interactive:
prompt = 'This will delete everything in %s:\n%s\nAre you sure?' % (
root, '\n'.join(to_delete))
if not confirm(prompt):
print 'User aborted.'
sys.exit(2)
for path in to_delete:
if os.path.isdir(path) and not os.path.islink(path):
shutil.rmtree(path)
else:
os.remove(path)
def main():
logging.basicConfig(level=logging.WARN)
parser = argparse.ArgumentParser(description='Deploy a new sky_sdk.')
parser.add_argument('sdk_root', type=str)
parser.add_argument('--build-dir', action='store', type=str,
default=os.path.join(SRC_ROOT, DEFAULT_REL_BUILD_DIR))
parser.add_argument('--extra-mojom-dir', action='append',
type=str,
dest='extra_mojom_dirs',
metavar='EXTRA_MOJOM_DIR',
help='Extra root directory for mojom packages. '
'Can be specified multiple times.',
default=[])
parser.add_argument('--non-interactive', action='store_true')
parser.add_argument('--dev-environment', action='store_true')
parser.add_argument('--commit', action='store_true')
parser.add_argument('--fake-pub-get-into', action='store', type=str)
args = parser.parse_args()
build_dir = os.path.abspath(args.build_dir)
sdk_root = os.path.abspath(args.sdk_root)
print 'Building SDK from %s into %s' % (build_dir, sdk_root)
start_time = datetime.now()
# These are separate ideas but don't need a separate flag yet.
use_links = args.dev_environment
skip_apks = args.dev_environment
should_commit = args.commit
generate_licenses = not args.dev_environment
# We save a bunch of time in --dev-environment mode by symlinking whole
# directories when possible. Any names which conflict with generated
# directories can't be symlinked and must be copied.
copy_or_link = link if use_links else copy
def sdk_path(rel_path):
return os.path.join(sdk_root, rel_path)
def src_path(rel_path):
return os.path.join(SRC_ROOT, rel_path)
ensure_dir_exists(sdk_root)
delete_all_non_hidden_files_in_directory(sdk_root, args.non_interactive)
# Manually clear sdk_root above to avoid deleting dot-files.
copy(src_path('sky/sdk'), sdk_root)
copy_or_link(src_path('sky/examples'), sdk_path('examples'))
# Sky package
copy_or_link(src_path('sky/framework'), sdk_path('packages/sky/lib/framework'))
copy_or_link(src_path('sky/assets'), sdk_path('packages/sky/lib/assets'))
# Sky SDK additions:
copy_or_link(src_path('sky/engine/bindings/builtin.dart'),
sdk_path('packages/sky/sdk_additions/dart_sky_builtins.dart'))
bindings_path = os.path.join(build_dir, 'gen/sky/bindings')
# dart_sky.dart has many supporting files:
copy(bindings_path, sdk_path('packages/sky/sdk_additions'),
dart_filter)
# Mojo package, lots of overlap with gen, must be copied:
copy(src_path('mojo/public'), sdk_path('packages/mojo/lib/public'),
dart_filter)
# By convention the generated .mojom.dart files in a pub package
# go under $PACKAGE/lib/mojom.
# The mojo package owns all the .mojom.dart files that are not in the 'sky'
# mojom module.
def non_sky_gen_filter(path):
if os.path.isdir(path) and path.endswith('sky'):
return False
return gen_filter(path)
mojo_package_mojom_dir = sdk_path('packages/mojo/lib/mojom')
copy(os.path.join(build_dir, 'gen/dart-gen/mojom'), mojo_package_mojom_dir,
non_sky_gen_filter)
# The Sky package owns the .mojom.dart files in the 'sky' mojom module.
def sky_gen_filter(path):
if os.path.isfile(path) and not os.path.dirname(path).endswith('sky'):
return False
return gen_filter(path)
sky_package_mojom_dir = sdk_path('packages/sky/lib/mojom')
copy(os.path.join(build_dir, 'gen/dart-gen/mojom'), sky_package_mojom_dir,
sky_gen_filter)
# Mojo SDK additions:
copy_or_link(src_path('mojo/public/dart/bindings.dart'),
sdk_path('packages/mojo/sdk_additions/dart_mojo_bindings.dart'))
copy_or_link(src_path('mojo/public/dart/core.dart'),
sdk_path('packages/mojo/sdk_additions/dart_mojo_core.dart'))
if not skip_apks:
ensure_dir_exists(sdk_path('packages/sky/apks'))
shutil.copy(os.path.join(build_dir, 'apks', 'SkyDemo.apk'),
sdk_path('packages/sky/apks'))
if generate_licenses:
with open(sdk_path('LICENSES.sky'), 'w') as license_file:
subprocess.check_call([src_path('tools/licenses.py'), 'credits'],
stdout=license_file)
copy_or_link(src_path('AUTHORS'), sdk_path('packages/mojo/AUTHORS'))
copy_or_link(src_path('LICENSE'), sdk_path('packages/mojo/LICENSE'))
copy_or_link(src_path('AUTHORS'), sdk_path('packages/sky/AUTHORS'))
copy_or_link(src_path('LICENSE'), sdk_path('packages/sky/LICENSE'))
if args.fake_pub_get_into:
packages_dir = os.path.abspath(args.fake_pub_get_into)
ensure_dir_exists(packages_dir)
make_relative_symlink(sdk_path('packages/mojo/lib'),
os.path.join(packages_dir, 'mojo'))
make_relative_symlink(sdk_path('packages/sky/lib'),
os.path.join(packages_dir, 'sky'))
mojom_dirs = [ mojo_package_mojom_dir, sky_package_mojom_dir ]
mojom_dirs += args.extra_mojom_dirs
for mojom_dir in mojom_dirs:
copy(mojom_dir, os.path.join(packages_dir, 'mojom'), gen_filter)
if should_commit:
# Kinda a hack to make a prettier build dir for the commit:
script_path = os.path.relpath(os.path.abspath(__file__), SRC_ROOT)
rel_build_dir = os.path.relpath(build_dir, SRC_ROOT)
revision = git_revision()
commit_url = "https://github.com/domokit/mojo/commit/%s" % revision
pattern = """Autogenerated from %s
Using %s and build output from %s.
"""
commit_message = pattern % (commit_url, script_path, rel_build_dir)
subprocess.check_call(['git', 'add', '.'], cwd=sdk_root)
subprocess.check_call([
'git', 'commit',
'-m', commit_message
], cwd=sdk_root)
time_delta = datetime.now() - start_time
print 'SDK built at %s in %ss' % (sdk_root, time_delta.total_seconds())
if __name__ == '__main__':
main()
|
|
# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utilities for standard operations on URIs of different kinds."""
from __future__ import print_function
import re
import sys
import urllib
import urllib2
from chromite.lib.paygen import filelib
from chromite.lib.paygen import gslib
# This module allows files from different storage types to be handled
# in a common way, for supported operations.
PROTOCOL_GS = gslib.PROTOCOL
PROTOCOL_HTTP = 'http'
PROTOCOL_HTTPS = 'https'
PROTOCOLS = (PROTOCOL_GS,
PROTOCOL_HTTP,
PROTOCOL_HTTPS)
PROTOCOL_SEP = '://'
EXTRACT_PROTOCOL_RE = re.compile(r'^(\w+)%s' % PROTOCOL_SEP)
SPLIT_URI_RE = re.compile(r'^(\w+)%s(.*)$' % PROTOCOL_SEP)
TYPE_GS = PROTOCOL_GS
TYPE_HTTP = PROTOCOL_HTTP
TYPE_HTTPS = PROTOCOL_HTTPS
TYPE_LOCAL = 'file'
class NotSupportedForType(RuntimeError):
"""Raised when operation is not supported for a particular file type"""
def __init__(self, uri_type, extra_msg=None):
# pylint: disable=protected-access
function = sys._getframe(1).f_code.co_name
msg = 'Function %s not supported for %s URIs' % (function, uri_type)
if extra_msg:
msg += ', ' + extra_msg
RuntimeError.__init__(self, msg)
class NotSupportedForTypes(RuntimeError):
"""Raised when operation is not supported for all particular file type"""
def __init__(self, extra_msg=None, *uri_types):
# pylint: disable=protected-access
function = sys._getframe(1).f_code.co_name
msg = ('Function %s not supported for set of URIs with types: %s' %
(function, ', '.join(uri_types)))
if extra_msg:
msg += ', ' + extra_msg
RuntimeError.__init__(self, msg)
class NotSupportedBetweenTypes(RuntimeError):
"""Raised when operation is not supported between particular file types"""
def __init__(self, uri_type1, uri_type2, extra_msg=None):
# pylint: disable=protected-access
function = sys._getframe(1).f_code.co_name
msg = ('Function %s not supported between %s and %s URIs' %
(function, uri_type1, uri_type2))
if extra_msg:
msg += ', ' + extra_msg
RuntimeError.__init__(self, msg)
class MissingURLError(RuntimeError):
"""Raised when nothing exists at URL."""
def ExtractProtocol(uri):
"""Take a URI and return the protocol it is using, if any.
Examples:
'gs://some/path' ==> 'gs'
'file:///some/path' ==> 'file'
'/some/path' ==> None
'/cns/some/colossus/path' ==> None
Args:
uri: The URI to get protocol from.
Returns:
Protocol string that is found, or None.
"""
match = EXTRACT_PROTOCOL_RE.search(uri)
if match:
return match.group(1)
return None
def GetUriType(uri):
"""Get the type of a URI.
See the TYPE_* constants for examples. This is mostly based
on URI protocols, with Colossus and local files as exceptions.
Args:
uri: The URI to consider
Returns:
The URI type.
"""
protocol = ExtractProtocol(uri)
if protocol:
return protocol
return TYPE_LOCAL
def SplitURI(uri):
"""Get the protocol and path from a URI
Examples:
'gs://some/path' ==> ('gs', 'some/path')
'file:///some/path' ==> ('file', '/some/path')
'/some/path' ==> (None, '/some/path')
'/cns/some/colossus/path' ==> (None, '/cns/some/colossus/path')
Args:
uri: The uri to get protocol and path from.
Returns;
Tuple (protocol, path)
"""
match = SPLIT_URI_RE.search(uri)
if match:
return (match.group(1), match.group(2))
return (None, uri)
def IsGsURI(uri):
"""Returns True if given uri uses Google Storage protocol."""
return PROTOCOL_GS == ExtractProtocol(uri)
def IsFileURI(uri):
"""Return True if given uri is a file URI (or path).
If uri uses the file protocol or it is a plain non-Colossus path
then return True.
Args:
uri: Any URI or path.
Returns:
True or False as described above.
"""
return TYPE_LOCAL == GetUriType(uri)
def IsHttpURI(uri, https_ok=False):
"""Returns True if given uri uses http, or optionally https, protocol.
Args:
uri: The URI to check.
https_ok: If True, then accept https protocol as well.
Returns:
Boolean
"""
uri_type = GetUriType(uri)
return TYPE_HTTP == uri_type or (https_ok and TYPE_HTTPS == uri_type)
def IsHttpsURI(uri):
"""Returns True if given uri uses https protocol."""
return TYPE_HTTPS == GetUriType(uri)
def MD5Sum(uri):
"""Compute or retrieve MD5 sum of uri.
Supported for: local files, GS files.
Args:
uri: The /unix/path or gs:// uri to compute the md5sum on.
Returns:
A string representing the md5sum of the file/uri passed in.
None if we do not understand the uri passed in or cannot compute
the md5sum.
"""
uri_type = GetUriType(uri)
if uri_type == TYPE_LOCAL:
return filelib.MD5Sum(uri)
elif uri_type == TYPE_GS:
try:
return gslib.MD5Sum(uri)
except gslib.GSLibError:
return None
# Colossus does not have a command for getting MD5 sum. We could
# copy the file to local disk and calculate it, but it seems better
# to explicitly say it is not supported.
raise NotSupportedForType(uri_type)
def Cmp(uri1, uri2):
"""Return True if paths hold identical files.
If either file is missing then always return False.
Args:
uri1: URI to a file.
uri2: URI to a file.
Returns:
True if files are the same, False otherwise.
Raises:
NotSupportedBetweenTypes if Cmp cannot be done between the two
URIs provided.
"""
uri_type1 = GetUriType(uri1)
uri_type2 = GetUriType(uri2)
uri_types = set([uri_type1, uri_type2])
if TYPE_GS in uri_types:
# GS only supported between other GS files or local files.
if len(uri_types) == 1 or TYPE_LOCAL in uri_types:
return gslib.Cmp(uri1, uri2)
if TYPE_LOCAL in uri_types and len(uri_types) == 1:
return filelib.Cmp(uri1, uri2)
raise NotSupportedBetweenTypes(uri_type1, uri_type2)
class URLopener(urllib.FancyURLopener):
"""URLopener that will actually complain when download fails."""
# The urllib.urlretrieve function, which seems like a good fit for this,
# does not give access to error code.
def http_error_default(self, *args, **kwargs):
urllib.URLopener.http_error_default(self, *args, **kwargs)
def URLRetrieve(src_url, dest_path):
"""Download file from given URL to given local file path.
Args:
src_url: URL to download from.
dest_path: Path to download to.
Raises:
MissingURLError if URL cannot be downloaded.
"""
opener = URLopener()
try:
opener.retrieve(src_url, dest_path)
except IOError as e:
# If the domain is valid but download failed errno shows up as None.
if e.errno is None:
raise MissingURLError('Unable to download %s' % src_url)
# If the domain is invalid the errno shows up as 'socket error', weirdly.
try:
int(e.errno)
# This means there was some normal error writing to the dest_path.
raise
except ValueError:
raise MissingURLError('Unable to download %s (bad domain?)' % src_url)
def Copy(src_uri, dest_uri):
"""Copy one uri to another.
Args:
src_uri: URI to copy from.
dest_uri: Path to copy to.
Raises:
NotSupportedBetweenTypes if Cmp cannot be done between the two
URIs provided.
"""
uri_type1 = GetUriType(src_uri)
uri_type2 = GetUriType(dest_uri)
uri_types = set([uri_type1, uri_type2])
if TYPE_GS in uri_types:
# GS only supported between other GS files or local files.
if len(uri_types) == 1 or TYPE_LOCAL in uri_types:
return gslib.Copy(src_uri, dest_uri)
if TYPE_LOCAL in uri_types and len(uri_types) == 1:
return filelib.Copy(src_uri, dest_uri)
if uri_type1 in (TYPE_HTTP, TYPE_HTTPS) and uri_type2 == TYPE_LOCAL:
# Download file from URL.
return URLRetrieve(src_uri, dest_uri)
raise NotSupportedBetweenTypes(uri_type1, uri_type2)
def Remove(*args, **kwargs):
"""Delete the file(s) at uris, or directory(s) with recurse set.
Args:
args: One or more URIs.
ignore_no_match: If True, then do not complain if anything was not
removed because no URI match was found. Like rm -f. Defaults to False.
recurse: Remove recursively starting at path. Same as rm -R. Defaults
to False.
"""
uri_types = set([GetUriType(u) for u in args])
if TYPE_GS in uri_types:
# GS support only allows local files among list.
if len(uri_types) == 1 or (TYPE_LOCAL in uri_types and len(uri_types) == 2):
return gslib.Remove(*args, **kwargs)
if TYPE_LOCAL in uri_types and len(uri_types) == 1:
return filelib.Remove(*args, **kwargs)
raise NotSupportedForTypes(*list(uri_types))
def Size(uri):
"""Return size of file at URI in bytes.
Args:
uri: URI to consider
Returns:
Size of file at given URI in bytes.
Raises:
MissingURLError if uri is a URL and cannot be found.
"""
uri_type = GetUriType(uri)
if TYPE_GS == uri_type:
return gslib.FileSize(uri)
if TYPE_LOCAL == uri_type:
return filelib.Size(uri)
if TYPE_HTTP == uri_type or TYPE_HTTPS == uri_type:
try:
response = urllib2.urlopen(uri)
if response.getcode() == 200:
return int(response.headers.getheader('Content-Length'))
except urllib2.HTTPError as e:
# Interpret 4** errors as our own MissingURLError.
if e.code < 400 or e.code >= 500:
raise
raise MissingURLError('No such file at URL %s' % uri)
raise NotSupportedForType(uri_type)
def Exists(uri, as_dir=False):
"""Return True if file exists at given URI.
If URI is a directory and as_dir is False then this will return False.
Args:
uri: URI to consider
as_dir: If True then check URI as a directory, otherwise check as a file.
Returns:
True if file (or directory) exists at URI, False otherwise.
"""
uri_type = GetUriType(uri)
if TYPE_GS == uri_type:
if as_dir:
# GS does not contain directories.
return False
return gslib.Exists(uri)
if TYPE_LOCAL == uri_type:
return filelib.Exists(uri, as_dir=as_dir)
if TYPE_HTTP == uri_type or TYPE_HTTPS == uri_type:
if as_dir:
raise NotSupportedForType(uri_type, extra_msg='with as_dir=True')
try:
response = urllib2.urlopen(uri)
return response.getcode() == 200
except urllib2.HTTPError:
return False
raise NotSupportedForType(uri_type)
def ListFiles(root_path, recurse=False, filepattern=None, sort=False):
"""Return list of file paths under given root path.
Directories are intentionally excluded from results. The root_path
argument can be a local directory path, a Google storage directory URI,
or a Colossus (/cns) directory path.
Args:
root_path: A local path, CNS path, or GS path to directory.
recurse: Look for files in subdirectories, as well
filepattern: glob pattern to match against basename of file
sort: If True then do a default sort on paths
Returns:
List of paths to files that matched
"""
uri_type = GetUriType(root_path)
if TYPE_GS == uri_type:
return gslib.ListFiles(root_path, recurse=recurse,
filepattern=filepattern, sort=sort)
if TYPE_LOCAL == uri_type:
return filelib.ListFiles(root_path, recurse=recurse,
filepattern=filepattern, sort=sort)
raise NotSupportedForType(uri_type)
def CopyFiles(src_dir, dst_dir):
"""Recursively copy all files from src_dir into dst_dir
This leverages the Copy method, so the restrictions there for what
copies are supported apply here.
Args:
src_dir: A local, CNS, or GS directory to copy from.
dst_dir: A local, CNS, or GS directory to copy into.
Returns:
A list of absolute path files for all copied files.
"""
dst_paths = []
src_paths = ListFiles(src_dir, recurse=True)
for src_path in src_paths:
dst_path = src_path.replace(src_dir, dst_dir)
Copy(src_path, dst_path)
dst_paths.append(dst_path)
return dst_paths
def RemoveDirContents(base_dir):
"""Remove all contents of a directory.
Args:
base_dir: directory to delete contents of.
"""
uri_type = GetUriType(base_dir)
if TYPE_GS == uri_type:
return gslib.RemoveDirContents(base_dir)
if TYPE_LOCAL == uri_type:
return filelib.RemoveDirContents(base_dir)
raise NotSupportedForType(uri_type)
|
|
#!/usr/bin/env python2
# Tokenizer code adapted by David Humphrey from Neal Norwitz's C++ tokenizer:
#
# Copyright 2007 Neal Norwitz
# Portions Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from cgi import escape
class Token(object):
"""Data container to represent an IDL token.
Tokens can be identifiers, syntax char(s), constants, or
pre-processor directives.
start contains the index of the first char of the token in the source
end contains the index of the char just beyond the token in the source
"""
def __init__(self, token_type, name, start, end, line):
self.token_type = token_type
self.name = name
self.start = start
self.end = end
self.line = line
def __str__(self):
if not utils.DEBUG:
return 'Token(%r)' % self.name
return 'Token(%r, %s, %s)' % (self.name, self.start, self.end)
__repr__ = __str__
class BaseTokenizer:
# Token Types
UNKNOWN = 'unknown'
NEWLINE = 'newline'
TEXTLINE = 'textline'
def __init__(self, source):
self.source = source
def getTokens(self):
line = 1
i = 0
end = len(self.source)
while i < end:
token_type = self.UNKNOWN
start = i
c = self.source[i]
if c == '\n':
token_type = self.NEWLINE
i += 1
line += 1
else:
token_type = self.TEXTLINE
while i < end and not self.source[i] == '\n':
i += 1
if i >= end: return
yield Token(token_type, self.source[start:i], start, i, line)
class CppTokenizer(BaseTokenizer):
# Add $ as a valid identifier char since so much code uses it.
_letters = 'abcdefghijklmnopqrstuvwxyz'
VALID_IDENTIFIER_CHARS = frozenset(_letters + _letters.upper() + '_0123456789$')
HEX_DIGITS = frozenset('0123456789abcdefABCDEF')
INT_OR_FLOAT_DIGITS = frozenset('01234567890eE-+')
# C++0x string preffixes.
_STR_PREFIXES = frozenset(('R', 'u8', 'u8R', 'u', 'uR', 'U', 'UR', 'L', 'LR'))
# Token types.
UNKNOWN = 'UNKNOWN'
SYNTAX = 'SYNTAX'
CONSTANT = 'CONSTANT'
NAME = 'NAME'
PREPROCESSOR = 'PREPROCESSOR'
NEWLINE = 'NEWLINE'
COMMENT = 'COMMENT'
STRING = 'STRING'
KEYWORD = 'KEYWORD'
# C++ keywords
_keywords = frozenset(['auto', 'const', 'double', 'float', 'int', 'short', 'struct',
'unsigned', 'break', 'continue', 'else', 'for', 'long', 'signed',
'switch', 'void', 'case', 'default', 'enum', 'goto', 'register',
'sizeof' ,'typedef', 'volatile', 'char', 'do', 'extern', 'if',
'return', 'static', 'union', 'while', 'asm', 'dynamic_cast',
'namespace', 'reinterpret_cast', 'try', 'bool', 'explicit',
'new', 'static_cast', 'typeid', 'catch', 'false', 'operator',
'template', 'typename', 'class', 'friend', 'private', 'this',
'using', 'const_cast', 'inline', 'public', 'throw', 'virtual',
'delete', 'mutable', 'protected', 'true', 'wchar_t', 'and',
'bitand', 'compl', 'not_eq', 'or_eq', 'xor_eq', 'and_eq', 'bitor',
'not', 'or', 'xor'])
def __init__(self, source):
BaseTokenizer.__init__(self, source)
def _getString(self, source, start, i):
i = source.find('"', i+1)
while source[i-1] == '\\':
# Count the trailing backslashes.
backslash_count = 1
j = i - 2
while source[j] == '\\':
backslash_count += 1
j -= 1
# When trailing backslashes are even, they escape each other.
if (backslash_count % 2) == 0:
break
i = source.find('"', i+1)
return i + 1
def _getChar(self, source, start, i):
i = source.find("'", i+1)
while source[i-1] == '\\':
# Need to special case '\\'.
if (i - 2) > start and source[i-2] == '\\':
break
i = source.find("'", i+1)
# Try to handle unterminated single quotes (in a #if 0 block).
if i < 0:
i = start
return i + 1
def getTokens(self):
# Cache various valid character sets for speed.
valid_identifier_chars = self.VALID_IDENTIFIER_CHARS
hex_digits = self.HEX_DIGITS
int_or_float_digits = self.INT_OR_FLOAT_DIGITS
int_or_float_digits2 = int_or_float_digits | set('.')
# Are we in a multi-line comment? This is position of comment's start
in_comment = -1
# Are we in a multi-line macro? This is the position of the macro's start
in_macro = -1
# Only ignore errors while in a #if 0 block.
ignore_errors = False
# keep track of which line we're on
line = 1
source = self.source
i = 0
end = len(source)
while i < end:
# Skip whitespace if not in macro/comment.
if in_comment == -1 and in_macro == -1:
while i < end and source[i] in [' ', '\t', '\r', '\x0c']:
i += 1
if i >= end:
return
token_type = self.UNKNOWN
start = i
c = source[i]
if c == '\n':
token_type = self.NEWLINE
i += 1
elif in_comment > -1: # Deal with being in multi-line comments (report each comment line)
token_type = self.COMMENT
while i < end and source[i] != '\n' and not (source[i] == '*' and source[i+1] == '/'):
i += 1
if i >= end:
return
if source[i] == '*' and source[i+1] == '/':
in_comment = -1
i += 2
if i >= end:
return
elif in_macro > -1 and c not in '/"\'': # Deal with being in macros (report each macro line)
token_type = self.PREPROCESSOR
while i < end and source[i] != '\n':
# Handle comments within preprocessor lines
if i < end - 1 and source[i] == '/' and (source[i+1] == '/' or source[i+1] == '*'):
break
if i < end and not source.startswith('#include', in_macro) and source[i] in '"\'':
break
if not source[i].isspace():
last_char = source[i]
i += 1
if i >= end:
return
if last_char is None or last_char != '\\':
in_macro = -1
elif c.isalpha() or c == '_': # Find a string token.
token_type = self.NAME
while source[i] in valid_identifier_chars:
i += 1
# String and character constants can look like a name if
# they are something like L"".
if (source[i] == "'" and (i - start) == 1 and
source[start:i] in 'uUL'):
# u, U, and L are valid C++0x character preffixes.
token_type = self.CONSTANT
i = self._getChar(source, start, i)
elif source[i] == "'" and source[start:i] in self._STR_PREFIXES:
token_type = self.CONSTANT
i = self._getString(source, start, i)
elif c == '/' and source[i+1] == '/': # Find // comments.
token_type = self.COMMENT
i = source.find('\n', i)
if i == -1: # Handle EOF.
i = end
elif c == '/' and source[i+1] == '*': # Find /* comments. */
in_comment = i
continue
elif c in ':+-<>&|*=': # : or :: (plus other chars).
token_type = self.SYNTAX
i += 1
new_ch = source[i]
if new_ch == c:
i += 1
elif c == '-' and new_ch == '>':
i += 1
elif new_ch == '=':
i += 1
elif c in '$()[]{}~!?^%;/.,@': # Handle single char tokens (adding @ for obj-c/c++).
token_type = self.SYNTAX
i += 1
if c == '.' and source[i].isdigit():
token_type = self.CONSTANT
i += 1
while source[i] in int_or_float_digits:
i += 1
# Handle float suffixes.
for suffix in ('l', 'f'):
if suffix == source[i:i+1].lower():
i += 1
break
elif c.isdigit(): # Find integer.
token_type = self.CONSTANT
if c == '0' and source[i+1] in 'xX':
# Handle hex digits.
i += 2
while source[i] in hex_digits:
i += 1
else:
while source[i] in int_or_float_digits2:
i += 1
# Handle integer (and float) suffixes.
for suffix in ('ull', 'll', 'ul', 'l', 'f', 'u'):
size = len(suffix)
if suffix == source[i:i+size].lower():
i += size
break
elif c == '"': # Find string.
token_type = self.STRING
i = self._getString(source, start, i)
elif c == "'": # Find char.
token_type = self.STRING
i = self._getChar(source, start, i)
elif c == '#': # Find pre-processor command.
in_macro = i
continue
elif c == '\\': # Handle \ in code.
# This is different from the pre-processor \ handling.
i += 1
continue
elif ignore_errors:
# The tokenizer seems to be in pretty good shape. This
# raise is conditionally disabled so that bogus code
# in an #if 0 block can be handled. Since we will ignore
# it anyways, this is probably fine. So disable the
# exception and return the bogus char.
i += 1
else:
sys.stderr.write('Got invalid token in %s @ %d token:%s: %r\n' %
('?', i, c, source[i-10:i+10]))
raise RuntimeError('unexpected token')
if i <= 0:
print('Invalid index, exiting now. Started on %d: "...%s..."' % (start, source[start:start+100]))
return
# if we get a NEWLINE, bump line number, but don't report
if token_type == self.NEWLINE:
line += 1
# if this is a keyword, change token type
if token_type == self.NAME and source[start:i] in self._keywords:
token_type = self.KEYWORD
yield Token(token_type, source[start:i], start, i, line)
class IdlTokenizer(CppTokenizer):
# IDL is close enough to C++ that we just need a new keyword set.
_keywords = frozenset(['interface', 'attribute', 'readonly', 'uuid', 'scriptable',
'const', 'native', 'ptr', 'ref', 'nsid', 'retval', 'shared',
'iid_is', 'notxpcom', 'noscript', 'in', 'out', 'inout'])
def __init__(self, source):
CppTokenizer.__init__(self, source)
|
|
from functools import wraps
from ..utils.isaac import is_live_site
from ..utils.log import log, TEST, INFO, ERROR
from collections import OrderedDict
__all__ = ['TestWithDependency']
class TestWithDependency(object):
"""Declares a Regression Test item with dependencies and tracks results.
Use as a decorator for each regression test function to ensure the test is
run only if the tests it depends upon have run successfully. Automatically
keeps track of the results of tests using the class variable 'Results'. To
access the results of tests, 'TestWithDependency.Results' provides the internal
OrderedDict used to track results; 'True' is a pass, 'False' is a fail,
'None' denotes that the test was not run.
- Tests must return a boolean 'True' for pass, 'False' for failure. Any other
return value will be considered a failure!
- Throws 'KeyError' if dependency in 'deps' has not been run/defined!
- 'Name' should be an uppercase string of max length 25 characters to describe
the test and is the name to be used in 'deps' if any other test depends
on this test.
- 'deps' is an optional list of test names that must have completed successfully
for this test to be run. If a test name is listed in 'deps' and does not
appear in 'Results' keys; a 'KeyError' exception will be raised.
- 'teardowns' is a highly optional requirement specifying any tests that
should be run after a test is run using 'run_test_with_deps(...)' and before
overall testing finishes. Specifying a complex web of teardowns may cause
infinite recursion and overflow or other errors. Use very sparingly!
"""
Results = OrderedDict()
_Tests = OrderedDict()
_Dependencies = dict()
_Teardowns = dict()
_teardowns_to_run = set()
def __init__(self, Name, deps=[], teardowns=[]):
self.Name = Name
self.deps = deps
self.teardowns = teardowns
self.Results[Name] = None
self._Dependencies[Name] = self.deps
self._Teardowns[Name] = self.teardowns
def __call__(self, test_func):
def _decorator(*args, **kwargs):
if type(self.Results[self.Name]) == bool: # Don't re-run previosuly run tests
log(TEST, "Test '%s'." % self.Name)
log(INFO, "Test has already run.")
return
if self.dependencies_met(self.Name):
log(TEST, "Test '%s'." % self.Name)
self.Results[self.Name] = False # If it dies; ensure this test marked as a fail!
result = test_func(*args, **kwargs)
if type(result) != bool:
log(INFO, "Test returned unexpected value. Assuming failure!")
result = False
del self.Results[self.Name] # This moves the entry to the end,
self.Results[self.Name] = result # So it is clearer which were not run.
return result
else:
not_met = ", ".join([d for d in self.deps if not self.Results[d]])
log(TEST, "Test '%s' not run, dependencies '%s' not met!" % (self.Name, not_met))
log(ERROR, "Test could not run!")
del self.Results[self.Name] # This moves the entry to the end,
self.Results[self.Name] = None # So it is clearer which were not run.
return None
test = wraps(test_func)(_decorator)
self._Tests[self.Name] = test
return test
@classmethod
def dependencies_met(cls, name):
return all([cls.Results[d] for d in cls._Dependencies[name]])
@classmethod
def run_test_with_deps(cls, name, driver, inbox, Users, ISAAC_WEB, GUERRILLAMAIL, WAIT_DUR, run_teardowns=True):
"""Run a single test from the test suite, first running all dependencies.
This ideally should not be used as a stand in to run a short list of tests
(though it ought to work if made to). It is designed for running single
tests when a feature of the website has changed and only one area needs
testing. It avoids having to work out the dependencies by hand.
- 'cls' is automatically passed in because this function is decorated
as a classmethod. IGNORE THIS ARGUMENT.
- 'name' is the uppercase name of the test to run.
- 'driver' should be a Selenium WebDriver.
- 'inbox' must be a GuerrillaInbox object.
- 'Users' should be the TestUsers object.
- 'ISAAC_WEB' is the string URL of the Isaac website to be tested.
- 'GUERRILLAMAIL' is the string URL of GuerrillaMail.
- 'WAIT_DUR' is the time in seconds to wait for JavaScript to run/load.
- 'run_teardowns' notes whether this is the last call of the recursion
and runs any necessary tests to cleanup. Do not pass this parameter
by hand without good reason.
"""
assert not is_live_site(ISAAC_WEB), "Cannot perform testing on live Isaac website!"
# This is the superset of all arguments required by tests. Their "**kwargs"
# argument allows us to pass in all arguments and any extra ones will simply
# be ignored.
kwargs = {"driver": driver, "inbox": inbox, "Users": Users, "ISAAC_WEB": ISAAC_WEB,
"GUERRILLAMAIL": GUERRILLAMAIL, "WAIT_DUR": WAIT_DUR}
# Run each of the dependency tests if necessary, and recursively run their
# dependencies. DOES NOT CHECK FOR SUCCESS of dependencies, that is left to
# the main decorator above. Add any teardowns to overall list.
for t in cls._Dependencies[name]:
if type(cls.Results[t]) != bool:
cls._teardowns_to_run.update(cls._Teardowns[t])
cls.run_test_with_deps(t, run_teardowns=False, **kwargs)
# Run the actual test that needed to run, noting any teardowns:
cls._teardowns_to_run.update(cls._Teardowns[name])
cls._Tests[name](**kwargs)
# If supposed to run teardowns, do that:
if run_teardowns:
for t in cls._teardowns_to_run:
cls.run_test_with_deps(t, run_teardowns=False, **kwargs)
cls._teardowns_to_run = set()
@classmethod
def run_all_tests(cls, driver, inbox, Users, ISAAC_WEB, GUERRILLAMAIL, WAIT_DUR):
"""Run all tests from the test suite.
This will run all defined tests, in the order set by their imports. This
ordering is important if all are to run successfully.
- 'cls' is automatically passed in because this function is decorated
as a classmethod. IGNORE THIS ARGUMENT.
- 'driver' should be a Selenium WebDriver.
- 'inbox' must be a GuerrillaInbox object.
- 'Users' should be the TestUsers object.
- 'ISAAC_WEB' is the string URL of the Isaac website to be tested.
- 'GUERRILLAMAIL' is the string URL of GuerrillaMail.
- 'WAIT_DUR' is the time in seconds to wait for JavaScript to run/load.
"""
assert not is_live_site(ISAAC_WEB), "Cannot perform testing on live Isaac website!"
# This is the superset of all arguments required by tests. Their "**kwargs"
# argument allows us to pass in all arguments and any extra ones will simply
# be ignored.
kwargs = {"driver": driver, "inbox": inbox, "Users": Users, "ISAAC_WEB": ISAAC_WEB,
"GUERRILLAMAIL": GUERRILLAMAIL, "WAIT_DUR": WAIT_DUR}
for test in cls._Tests:
cls._Tests[test](**kwargs)
@classmethod
def dependency_graph(cls):
"""Return the dependency graph of the tests in string form.
Produce a string form of the dependency graph suitable for using with
Graphviz or similar (try: http://www.webgraphviz.com/). May help to
visualise how the tests interdepend.
"""
graph_str = "digraph selenium_tester {\n"
for n in cls._Dependencies:
graph_str += "%s;\n" % n
for d in cls._Dependencies[n]:
graph_str += "%s -> %s;\n" % (n, d)
graph_str += "}"
return graph_str
# Import all known tests into the namespace of this file, also avoids extra imports
# in any file using the module. It can just import TestWithDependency from this
# file and these imports will declare all the requisite tests.
from .login import login
from .questionnaire import questionnaire
from .accept_cookies import accept_cookies
from .global_nav import global_nav
from .global_nav_mobile import global_nav_mobile
from .logout import logout
from .user_type_specific_menu_links import user_type_specific_menu_links
from .my_assignments_page_access import my_assignments_page_access
from .set_assignments_page_access import set_assignments_page_access
from .groups_page_access import groups_page_access
from .groups_creation import groups_creation
from .login_throttle import login_throttle
from .login_timeout import login_timeout
from .login_mobile import login_mobile
from .logout_mobile import logout_mobile
from .signup import signup
from .welcome_email import welcome_email
from .req_verify_emails import req_verify_emails
from .recieve_verify_emails import recieve_verify_emails
from .verify_link import verify_link
from .verify_banner_gone import verify_banner_gone
from .pwd_reset_throttle import pwd_reset_throttle
from .recieve_pwd_reset_emails import recieve_pwd_reset_emails
from .pwd_reset_link import pwd_reset_link
from .reset_pwd_login import reset_pwd_login
from .login_uppercase import login_uppercase
from .signup_uppercase import signup_uppercase
from .account_settings import account_settings
from .user_consistency import user_consistency
from .user_consistency_popup import user_consistency_popup
from .email_change import email_change
from .email_change_emails import email_change_emails
from .email_change_login_status import email_change_login_status
from .admin_page_access import admin_page_access
from .accordion_behaviour import accordion_behavior
from .quick_questions import quick_questions
from .multiple_choice_questions import multiple_choice_questions
from .numeric_q_units_select import numeric_q_units_select
from .numeric_q_all_correct import numeric_q_all_correct
from .numeric_q_answer_reloaded import numeric_q_answer_reloaded
from .numeric_q_answer_change import numeric_q_answer_change
from .numeric_q_incorrect_unit import numeric_q_incorrect_unit
from .numeric_q_incorrect_value import numeric_q_incorrect_value
from .numeric_q_all_incorrect import numeric_q_all_incorrect
from .numeric_q_incorrect_sf import numeric_q_incorrect_sf
from .numeric_q_incorrect_sf_u import numeric_q_incorrect_sf_u
from .numeric_q_known_wrong_ans import numeric_q_known_wrong_ans
from .numeric_q_known_wrong_sf import numeric_q_known_wrong_sf
from .numeric_q_known_wrong_sf_tagged import numeric_q_known_wrong_sf_tagged
from .numeric_q_units_none import numeric_q_units_none
from .numeric_q_help_popup import numeric_q_help_popup
from .answer_saved_login import answer_saved_login
from .symbolic_q_text_entry_correct import symbolic_q_text_entry_correct
from .string_match_q_correct import string_match_q_correct
from .concept_index_page import concept_index_page
from .concept_pages import concept_pages
from .tab_behaviour import tab_behavior
from .figure_behaviour import figure_behaviour
from .back_to_board import back_to_board
from .filter_behaviour import filter_behaviour
#from .filter_by_concept import filter_by_concept # This feature has been removed!
from .save_board_add import save_board_add
from .admin_stats_summary import admin_stats_summary
from .admin_stats_analytics import admin_stats_analytics
from .admin_stats_gameboards import admin_stats_gameboards
from .admin_stats_schools import admin_stats_schools
from .admin_user_search import admin_user_search
from .user_role_change import user_role_change
from .non_admin_user_search import non_admin_user_search
from .delete_user import delete_user
from .user_progress_access import user_progress_access
from .manually_entered_links import manually_entered_links
from .hsts_header import hsts_header
from .board_builder import board_builder
|
|
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Finds components for a given manifest."""
from typing import Any, List, Optional, Tuple
import pathlib
import sys
import xml.etree.ElementTree
def _gn_str_out(name: str, val: Any):
"""Outputs scoped string in GN format."""
print(f'{name} = "{val}"')
def _gn_list_str_out(name: str, val: List[Any]):
"""Outputs list of strings in GN format with correct escaping."""
list_str = ','.join('"' + str(x).replace('"', r'\"').replace('$', r'\$') +
'"' for x in val)
print(f'{name} = [{list_str}]')
def _gn_list_path_out(name: str,
val: List[pathlib.Path],
path_prefix: Optional[str] = None):
"""Outputs list of paths in GN format with common prefix."""
if path_prefix is not None:
str_val = list(f'{path_prefix}/{str(d)}' for d in val)
else:
str_val = list(str(d) for d in val)
_gn_list_str_out(name, str_val)
def get_component(
root: xml.etree.ElementTree.Element, component_id: str
) -> Tuple[Optional[xml.etree.ElementTree.Element], Optional[pathlib.Path]]:
"""Parse <component> manifest stanza.
Schema:
<component id="{component_id}" package_base_path="component">
</component>
Args:
root: root of element tree.
component_id: id of component to return.
Returns:
(element, base_path) for the component, or (None, None).
"""
xpath = f'./components/component[@id="{component_id}"]'
component = root.find(xpath)
if component is None:
return (None, None)
try:
base_path = pathlib.Path(component.attrib['package_base_path'])
return (component, base_path)
except KeyError:
return (component, None)
def parse_defines(root: xml.etree.ElementTree.Element,
component_id: str) -> List[str]:
"""Parse pre-processor definitions for a component.
Schema:
<defines>
<define name="EXAMPLE" value="1"/>
<define name="OTHER"/>
</defines>
Args:
root: root of element tree.
component_id: id of component to return.
Returns:
list of str NAME=VALUE or NAME for the component.
"""
xpath = f'./components/component[@id="{component_id}"]/defines/define'
return list(_parse_define(define) for define in root.findall(xpath))
def _parse_define(define: xml.etree.ElementTree.Element) -> str:
"""Parse <define> manifest stanza.
Schema:
<define name="EXAMPLE" value="1"/>
<define name="OTHER"/>
Args:
define: XML Element for <define>.
Returns:
str with a value NAME=VALUE or NAME.
"""
name = define.attrib['name']
value = define.attrib.get('value', None)
if value is None:
return name
return f'{name}={value}'
def parse_include_paths(root: xml.etree.ElementTree.Element,
component_id: str) -> List[pathlib.Path]:
"""Parse include directories for a component.
Schema:
<component id="{component_id}" package_base_path="component">
<include_paths>
<include_path relative_path="./" type="c_include"/>
</include_paths>
</component>
Args:
root: root of element tree.
component_id: id of component to return.
Returns:
list of include directories for the component.
"""
(component, base_path) = get_component(root, component_id)
if component is None:
return []
include_paths: List[pathlib.Path] = []
for include_type in ('c_include', 'asm_include'):
include_xpath = f'./include_paths/include_path[@type="{include_type}"]'
include_paths.extend(
_parse_include_path(include_path, base_path)
for include_path in component.findall(include_xpath))
return include_paths
def _parse_include_path(include_path: xml.etree.ElementTree.Element,
base_path: Optional[pathlib.Path]) -> pathlib.Path:
"""Parse <include_path> manifest stanza.
Schema:
<include_path relative_path="./" type="c_include"/>
Args:
include_path: XML Element for <input_path>.
base_path: prefix for paths.
Returns:
Path, prefixed with `base_path`.
"""
path = pathlib.Path(include_path.attrib['relative_path'])
if base_path is None:
return path
return base_path / path
def parse_headers(root: xml.etree.ElementTree.Element,
component_id: str) -> List[pathlib.Path]:
"""Parse header files for a component.
Schema:
<component id="{component_id}" package_base_path="component">
<source relative_path="./" type="c_include">
<files mask="example.h"/>
</source>
</component>
Args:
root: root of element tree.
component_id: id of component to return.
Returns:
list of header files for the component.
"""
return _parse_sources(root, component_id, 'c_include')
def parse_sources(root: xml.etree.ElementTree.Element,
component_id: str) -> List[pathlib.Path]:
"""Parse source files for a component.
Schema:
<component id="{component_id}" package_base_path="component">
<source relative_path="./" type="src">
<files mask="example.cc"/>
</source>
</component>
Args:
root: root of element tree.
component_id: id of component to return.
Returns:
list of source files for the component.
"""
source_files = []
for source_type in ('src', 'src_c', 'src_cpp', 'asm_include'):
source_files.extend(_parse_sources(root, component_id, source_type))
return source_files
def parse_libs(root: xml.etree.ElementTree.Element,
component_id: str) -> List[pathlib.Path]:
"""Parse pre-compiled libraries for a component.
Schema:
<component id="{component_id}" package_base_path="component">
<source relative_path="./" type="lib">
<files mask="example.a"/>
</source>
</component>
Args:
root: root of element tree.
component_id: id of component to return.
Returns:
list of pre-compiler libraries for the component.
"""
return _parse_sources(root, component_id, 'lib')
def _parse_sources(root: xml.etree.ElementTree.Element, component_id: str,
source_type: str) -> List[pathlib.Path]:
"""Parse <source> manifest stanza.
Schema:
<component id="{component_id}" package_base_path="component">
<source relative_path="./" type="{source_type}">
<files mask="example.h"/>
</source>
</component>
Args:
root: root of element tree.
component_id: id of component to return.
source_type: type of source to search for.
Returns:
list of source files for the component.
"""
(component, base_path) = get_component(root, component_id)
if component is None:
return []
sources: List[pathlib.Path] = []
source_xpath = f'./source[@type="{source_type}"]'
for source in component.findall(source_xpath):
relative_path = pathlib.Path(source.attrib['relative_path'])
if base_path is not None:
relative_path = base_path / relative_path
sources.extend(relative_path / files.attrib['mask']
for files in source.findall('./files'))
return sources
def parse_dependencies(root: xml.etree.ElementTree.Element,
component_id: str) -> List[str]:
"""Parse the list of dependencies for a component.
Optional dependencies are ignored for parsing since they have to be
included explicitly.
Schema:
<dependencies>
<all>
<component_dependency value="component"/>
<component_dependency value="component"/>
<any_of>
<component_dependency value="component"/>
<component_dependency value="component"/>
</any_of>
</all>
</dependencies>
Args:
root: root of element tree.
component_id: id of component to return.
Returns:
list of component id dependencies of the component.
"""
dependencies = []
xpath = f'./components/component[@id="{component_id}"]/dependencies/*'
for dependency in root.findall(xpath):
dependencies.extend(_parse_dependency(dependency))
return dependencies
def _parse_dependency(dependency: xml.etree.ElementTree.Element) -> List[str]:
"""Parse <all>, <any_of>, and <component_dependency> manifest stanzas.
Schema:
<all>
<component_dependency value="component"/>
<component_dependency value="component"/>
<any_of>
<component_dependency value="component"/>
<component_dependency value="component"/>
</any_of>
</all>
Args:
dependency: XML Element of dependency.
Returns:
list of component id dependencies.
"""
if dependency.tag == 'component_dependency':
return [dependency.attrib['value']]
if dependency.tag == 'all':
dependencies = []
for subdependency in dependency:
dependencies.extend(_parse_dependency(subdependency))
return dependencies
if dependency.tag == 'any_of':
# Explicitly ignore.
return []
# Unknown dependency tag type.
return []
def check_dependencies(root: xml.etree.ElementTree.Element,
component_id: str,
include: List[str],
exclude: Optional[List[str]] = None) -> bool:
"""Check the list of optional dependencies for a component.
Verifies that the optional dependencies for a component are satisfied by
components listed in `include` or `exclude`.
Args:
root: root of element tree.
component_id: id of component to check.
include: list of component ids included in the project.
exclude: list of component ids explicitly excluded from the project.
Returns:
True if dependencies are satisfied, False if not.
"""
xpath = f'./components/component[@id="{component_id}"]/dependencies/*'
for dependency in root.findall(xpath):
if not _check_dependency(dependency, include, exclude=exclude):
return False
return True
def _check_dependency(dependency: xml.etree.ElementTree.Element,
include: List[str],
exclude: Optional[List[str]] = None) -> bool:
"""Check a dependency for a component.
Verifies that the given {dependency} is satisfied by components listed in
`include` or `exclude`.
Args:
dependency: XML Element of dependency.
include: list of component ids included in the project.
exclude: list of component ids explicitly excluded from the project.
Returns:
True if dependencies are satisfied, False if not.
"""
if dependency.tag == 'component_dependency':
component_id = dependency.attrib['value']
return component_id in include or (exclude is not None
and component_id in exclude)
if dependency.tag == 'all':
for subdependency in dependency:
if not _check_dependency(subdependency, include, exclude=exclude):
return False
return True
if dependency.tag == 'any_of':
for subdependency in dependency:
if _check_dependency(subdependency, include, exclude=exclude):
return True
tree = xml.etree.ElementTree.tostring(dependency).decode('utf-8')
print(f'Unsatisfied dependency from: {tree}', file=sys.stderr)
return False
# Unknown dependency tag type.
return True
def create_project(
root: xml.etree.ElementTree.Element,
include: List[str],
exclude: Optional[List[str]] = None
) -> Tuple[List[str], List[str], List[pathlib.Path], List[pathlib.Path],
List[pathlib.Path], List[pathlib.Path]]:
"""Create a project from a list of specified components.
Args:
root: root of element tree.
include: list of component ids included in the project.
exclude: list of component ids excluded from the project.
Returns:
(component_ids, defines, include_paths, headers, sources, libs) for the
project.
"""
# Build the project list from the list of included components by expanding
# dependencies.
project_list = []
pending_list = include
while len(pending_list) > 0:
component_id = pending_list.pop(0)
if component_id in project_list:
continue
if exclude is not None and component_id in exclude:
continue
project_list.append(component_id)
pending_list.extend(parse_dependencies(root, component_id))
return (
project_list,
sum((parse_defines(root, component_id)
for component_id in project_list), []),
sum((parse_include_paths(root, component_id)
for component_id in project_list), []),
sum((parse_headers(root, component_id)
for component_id in project_list), []),
sum((parse_sources(root, component_id)
for component_id in project_list), []),
sum((parse_libs(root, component_id) for component_id in project_list),
[]),
)
def project(manifest_path: pathlib.Path,
include: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
path_prefix: Optional[str] = None):
"""Output GN scope for a project with the specified components.
Args:
manifest_path: path to SDK manifest XML.
include: list of component ids included in the project.
exclude: list of component ids excluded from the project.
path_prefix: string prefix to prepend to all paths.
"""
assert include is not None, "Project must include at least one component."
tree = xml.etree.ElementTree.parse(manifest_path)
root = tree.getroot()
(component_ids, defines, include_dirs, headers, sources, libs) = \
create_project(root, include, exclude=exclude)
for component_id in component_ids:
if not check_dependencies(
root, component_id, component_ids, exclude=exclude):
return
_gn_list_str_out('defines', defines)
_gn_list_path_out('include_dirs', include_dirs, path_prefix=path_prefix)
_gn_list_path_out('public', headers, path_prefix=path_prefix)
_gn_list_path_out('sources', sources, path_prefix=path_prefix)
_gn_list_path_out('libs', libs, path_prefix=path_prefix)
|
|
#!/usr/bin/env python
# Copyright (c) 2016 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates various info tables from SPIR-V JSON grammar."""
import errno
import json
import os.path
import re
# Prefix for all C variables generated by this script.
PYGEN_VARIABLE_PREFIX = 'pygen_variable'
# Extensions to recognize, but which don't necessarily come from the SPIR-V
# core or KHR grammar files. Get this list from the SPIR-V registry web page.
# NOTE: Only put things on this list if it is not in those grammar files.
EXTENSIONS_FROM_SPIRV_REGISTRY_AND_NOT_FROM_GRAMMARS = """
SPV_AMD_gcn_shader
SPV_AMD_gpu_shader_half_float
SPV_AMD_gpu_shader_int16
SPV_AMD_shader_trinary_minmax
SPV_KHR_non_semantic_info
"""
def make_path_to_file(f):
"""Makes all ancestor directories to the given file, if they don't yet
exist.
Arguments:
f: The file whose ancestor directories are to be created.
"""
dir = os.path.dirname(os.path.abspath(f))
try:
os.makedirs(dir)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(dir):
pass
else:
raise
def convert_min_required_version(version):
"""Converts the minimal required SPIR-V version encoded in the grammar to
the symbol in SPIRV-Tools."""
if version is None:
return 'SPV_SPIRV_VERSION_WORD(1, 0)'
if version == 'None':
return '0xffffffffu'
return 'SPV_SPIRV_VERSION_WORD({})'.format(version.replace('.', ','))
def convert_max_required_version(version):
"""Converts the maximum required SPIR-V version encoded in the grammar to
the symbol in SPIRV-Tools."""
if version is None:
return '0xffffffffu'
return 'SPV_SPIRV_VERSION_WORD({})'.format(version.replace('.', ','))
def compose_capability_list(caps):
"""Returns a string containing a braced list of capabilities as enums.
Arguments:
- caps: a sequence of capability names
Returns:
a string containing the braced list of SpvCapability* enums named by caps.
"""
return '{' + ', '.join(['SpvCapability{}'.format(c) for c in caps]) + '}'
def get_capability_array_name(caps):
"""Returns the name of the array containing all the given capabilities.
Args:
- caps: a sequence of capability names
"""
if not caps:
return 'nullptr'
return '{}_caps_{}'.format(PYGEN_VARIABLE_PREFIX, ''.join(caps))
def generate_capability_arrays(caps):
"""Returns the arrays of capabilities.
Arguments:
- caps: a sequence of sequence of capability names
"""
caps = sorted(set([tuple(c) for c in caps if c]))
arrays = [
'static const SpvCapability {}[] = {};'.format(
get_capability_array_name(c), compose_capability_list(c))
for c in caps]
return '\n'.join(arrays)
def compose_extension_list(exts):
"""Returns a string containing a braced list of extensions as enums.
Arguments:
- exts: a sequence of extension names
Returns:
a string containing the braced list of extensions named by exts.
"""
return '{' + ', '.join(
['spvtools::Extension::k{}'.format(e) for e in exts]) + '}'
def get_extension_array_name(extensions):
"""Returns the name of the array containing all the given extensions.
Args:
- extensions: a sequence of extension names
"""
if not extensions:
return 'nullptr'
else:
return '{}_exts_{}'.format(
PYGEN_VARIABLE_PREFIX, ''.join(extensions))
def generate_extension_arrays(extensions):
"""Returns the arrays of extensions.
Arguments:
- caps: a sequence of sequence of extension names
"""
extensions = sorted(set([tuple(e) for e in extensions if e]))
arrays = [
'static const spvtools::Extension {}[] = {};'.format(
get_extension_array_name(e), compose_extension_list(e))
for e in extensions]
return '\n'.join(arrays)
def convert_operand_kind(operand_tuple):
"""Returns the corresponding operand type used in spirv-tools for the given
operand kind and quantifier used in the JSON grammar.
Arguments:
- operand_tuple: a tuple of two elements:
- operand kind: used in the JSON grammar
- quantifier: '', '?', or '*'
Returns:
a string of the enumerant name in spv_operand_type_t
"""
kind, quantifier = operand_tuple
# The following cases are where we differ between the JSON grammar and
# spirv-tools.
if kind == 'IdResultType':
kind = 'TypeId'
elif kind == 'IdResult':
kind = 'ResultId'
elif kind == 'IdMemorySemantics' or kind == 'MemorySemantics':
kind = 'MemorySemanticsId'
elif kind == 'IdScope' or kind == 'Scope':
kind = 'ScopeId'
elif kind == 'IdRef':
kind = 'Id'
elif kind == 'ImageOperands':
kind = 'Image'
elif kind == 'Dim':
kind = 'Dimensionality'
elif kind == 'ImageFormat':
kind = 'SamplerImageFormat'
elif kind == 'KernelEnqueueFlags':
kind = 'KernelEnqFlags'
elif kind == 'LiteralExtInstInteger':
kind = 'ExtensionInstructionNumber'
elif kind == 'LiteralSpecConstantOpInteger':
kind = 'SpecConstantOpNumber'
elif kind == 'LiteralContextDependentNumber':
kind = 'TypedLiteralNumber'
elif kind == 'PairLiteralIntegerIdRef':
kind = 'LiteralIntegerId'
elif kind == 'PairIdRefLiteralInteger':
kind = 'IdLiteralInteger'
elif kind == 'PairIdRefIdRef': # Used by OpPhi in the grammar
kind = 'Id'
if kind == 'FPRoundingMode':
kind = 'FpRoundingMode'
elif kind == 'FPFastMathMode':
kind = 'FpFastMathMode'
if quantifier == '?':
kind = 'Optional{}'.format(kind)
elif quantifier == '*':
kind = 'Variable{}'.format(kind)
return 'SPV_OPERAND_TYPE_{}'.format(
re.sub(r'([a-z])([A-Z])', r'\1_\2', kind).upper())
class InstInitializer(object):
"""Instances holds a SPIR-V instruction suitable for printing as the
initializer for spv_opcode_desc_t."""
def __init__(self, opname, caps, exts, operands, version, lastVersion):
"""Initialization.
Arguments:
- opname: opcode name (with the 'Op' prefix)
- caps: a sequence of capability names required by this opcode
- exts: a sequence of names of extensions enabling this enumerant
- operands: a sequence of (operand-kind, operand-quantifier) tuples
- version: minimal SPIR-V version required for this opcode
- lastVersion: last version of SPIR-V that includes this opcode
"""
assert opname.startswith('Op')
self.opname = opname[2:] # Remove the "Op" prefix.
self.num_caps = len(caps)
self.caps_mask = get_capability_array_name(caps)
self.num_exts = len(exts)
self.exts = get_extension_array_name(exts)
self.operands = [convert_operand_kind(o) for o in operands]
self.fix_syntax()
operands = [o[0] for o in operands]
self.ref_type_id = 'IdResultType' in operands
self.def_result_id = 'IdResult' in operands
self.version = convert_min_required_version(version)
self.lastVersion = convert_max_required_version(lastVersion)
def fix_syntax(self):
"""Fix an instruction's syntax, adjusting for differences between the
officially released grammar and how SPIRV-Tools uses the grammar.
Fixes:
- ExtInst should not end with SPV_OPERAND_VARIABLE_ID.
https://github.com/KhronosGroup/SPIRV-Tools/issues/233
"""
if (self.opname == 'ExtInst'
and self.operands[-1] == 'SPV_OPERAND_TYPE_VARIABLE_ID'):
self.operands.pop()
def __str__(self):
template = ['{{"{opname}"', 'SpvOp{opname}',
'{num_caps}', '{caps_mask}',
'{num_operands}', '{{{operands}}}',
'{def_result_id}', '{ref_type_id}',
'{num_exts}', '{exts}',
'{min_version}', '{max_version}}}']
return ', '.join(template).format(
opname=self.opname,
num_caps=self.num_caps,
caps_mask=self.caps_mask,
num_operands=len(self.operands),
operands=', '.join(self.operands),
def_result_id=(1 if self.def_result_id else 0),
ref_type_id=(1 if self.ref_type_id else 0),
num_exts=self.num_exts,
exts=self.exts,
min_version=self.version,
max_version=self.lastVersion)
class ExtInstInitializer(object):
"""Instances holds a SPIR-V extended instruction suitable for printing as
the initializer for spv_ext_inst_desc_t."""
def __init__(self, opname, opcode, caps, operands):
"""Initialization.
Arguments:
- opname: opcode name
- opcode: enumerant value for this opcode
- caps: a sequence of capability names required by this opcode
- operands: a sequence of (operand-kind, operand-quantifier) tuples
"""
self.opname = opname
self.opcode = opcode
self.num_caps = len(caps)
self.caps_mask = get_capability_array_name(caps)
self.operands = [convert_operand_kind(o) for o in operands]
self.operands.append('SPV_OPERAND_TYPE_NONE')
def __str__(self):
template = ['{{"{opname}"', '{opcode}', '{num_caps}', '{caps_mask}',
'{{{operands}}}}}']
return ', '.join(template).format(
opname=self.opname,
opcode=self.opcode,
num_caps=self.num_caps,
caps_mask=self.caps_mask,
operands=', '.join(self.operands))
def generate_instruction(inst, is_ext_inst):
"""Returns the C initializer for the given SPIR-V instruction.
Arguments:
- inst: a dict containing information about a SPIR-V instruction
- is_ext_inst: a bool indicating whether |inst| is an extended
instruction.
Returns:
a string containing the C initializer for spv_opcode_desc_t or
spv_ext_inst_desc_t
"""
opname = inst.get('opname')
opcode = inst.get('opcode')
caps = inst.get('capabilities', [])
exts = inst.get('extensions', [])
operands = inst.get('operands', {})
operands = [(o['kind'], o.get('quantifier', '')) for o in operands]
min_version = inst.get('version', None)
max_version = inst.get('lastVersion', None)
assert opname is not None
if is_ext_inst:
return str(ExtInstInitializer(opname, opcode, caps, operands))
else:
return str(InstInitializer(opname, caps, exts, operands, min_version, max_version))
def generate_instruction_table(inst_table):
"""Returns the info table containing all SPIR-V instructions, sorted by
opcode, and prefixed by capability arrays.
Note:
- the built-in sorted() function is guaranteed to be stable.
https://docs.python.org/3/library/functions.html#sorted
Arguments:
- inst_table: a list containing all SPIR-V instructions.
"""
inst_table = sorted(inst_table, key=lambda k: (k['opcode'], k['opname']))
caps_arrays = generate_capability_arrays(
[inst.get('capabilities', []) for inst in inst_table])
exts_arrays = generate_extension_arrays(
[inst.get('extensions', []) for inst in inst_table])
insts = [generate_instruction(inst, False) for inst in inst_table]
insts = ['static const spv_opcode_desc_t kOpcodeTableEntries[] = {{\n'
' {}\n}};'.format(',\n '.join(insts))]
return '{}\n\n{}\n\n{}'.format(caps_arrays, exts_arrays, '\n'.join(insts))
def generate_extended_instruction_table(json_grammar, set_name, operand_kind_prefix=""):
"""Returns the info table containing all SPIR-V extended instructions,
sorted by opcode, and prefixed by capability arrays.
Arguments:
- inst_table: a list containing all SPIR-V instructions.
- set_name: the name of the extended instruction set.
- operand_kind_prefix: the prefix, if any, to add to the front
of operand kind names.
"""
if operand_kind_prefix:
prefix_operand_kind_names(operand_kind_prefix, json_grammar)
inst_table = json_grammar["instructions"]
set_name = set_name.replace(".", "_")
inst_table = sorted(inst_table, key=lambda k: k['opcode'])
caps = [inst.get('capabilities', []) for inst in inst_table]
caps_arrays = generate_capability_arrays(caps)
insts = [generate_instruction(inst, True) for inst in inst_table]
insts = ['static const spv_ext_inst_desc_t {}_entries[] = {{\n'
' {}\n}};'.format(set_name, ',\n '.join(insts))]
return '{}\n\n{}'.format(caps_arrays, '\n'.join(insts))
class EnumerantInitializer(object):
"""Prints an enumerant as the initializer for spv_operand_desc_t."""
def __init__(self, enumerant, value, caps, exts, parameters, version, lastVersion):
"""Initialization.
Arguments:
- enumerant: enumerant name
- value: enumerant value
- caps: a sequence of capability names required by this enumerant
- exts: a sequence of names of extensions enabling this enumerant
- parameters: a sequence of (operand-kind, operand-quantifier) tuples
- version: minimal SPIR-V version required for this opcode
- lastVersion: last SPIR-V version this opode appears
"""
self.enumerant = enumerant
self.value = value
self.num_caps = len(caps)
self.caps = get_capability_array_name(caps)
self.num_exts = len(exts)
self.exts = get_extension_array_name(exts)
self.parameters = [convert_operand_kind(p) for p in parameters]
self.version = convert_min_required_version(version)
self.lastVersion = convert_max_required_version(lastVersion)
def __str__(self):
template = ['{{"{enumerant}"', '{value}', '{num_caps}',
'{caps}', '{num_exts}', '{exts}',
'{{{parameters}}}', '{min_version}',
'{max_version}}}']
return ', '.join(template).format(
enumerant=self.enumerant,
value=self.value,
num_caps=self.num_caps,
caps=self.caps,
num_exts=self.num_exts,
exts=self.exts,
parameters=', '.join(self.parameters),
min_version=self.version,
max_version=self.lastVersion)
def generate_enum_operand_kind_entry(entry, extension_map):
"""Returns the C initializer for the given operand enum entry.
Arguments:
- entry: a dict containing information about an enum entry
- extension_map: a dict mapping enum value to list of extensions
Returns:
a string containing the C initializer for spv_operand_desc_t
"""
enumerant = entry.get('enumerant')
value = entry.get('value')
caps = entry.get('capabilities', [])
if value in extension_map:
exts = extension_map[value]
else:
exts = []
params = entry.get('parameters', [])
params = [p.get('kind') for p in params]
params = zip(params, [''] * len(params))
version = entry.get('version', None)
max_version = entry.get('lastVersion', None)
assert enumerant is not None
assert value is not None
return str(EnumerantInitializer(
enumerant, value, caps, exts, params, version, max_version))
def generate_enum_operand_kind(enum, synthetic_exts_list):
"""Returns the C definition for the given operand kind.
It's a static const named array of spv_operand_desc_t.
Also appends to |synthetic_exts_list| a list of extension lists
used.
"""
kind = enum.get('kind')
assert kind is not None
# Sort all enumerants according to their values, but otherwise
# preserve their order so the first name listed in the grammar
# as the preferred name for disassembly.
if enum.get('category') == 'ValueEnum':
def functor(k): return (k['value'])
else:
def functor(k): return (int(k['value'], 16))
entries = sorted(enum.get('enumerants', []), key=functor)
# SubgroupEqMask and SubgroupEqMaskKHR are the same number with
# same semantics, but one has no extension list while the other
# does. Both should have the extension list.
# So create a mapping from enum value to the union of the extensions
# across all those grammar entries. Preserve order.
extension_map = {}
for e in entries:
value = e.get('value')
extension_map[value] = []
for e in entries:
value = e.get('value')
exts = e.get('extensions', [])
for ext in exts:
if ext not in extension_map[value]:
extension_map[value].append(ext)
synthetic_exts_list.extend(extension_map.values())
name = '{}_{}Entries'.format(PYGEN_VARIABLE_PREFIX, kind)
entries = [' {}'.format(generate_enum_operand_kind_entry(e, extension_map))
for e in entries]
template = ['static const spv_operand_desc_t {name}[] = {{',
'{entries}', '}};']
entries = '\n'.join(template).format(
name=name,
entries=',\n'.join(entries))
return kind, name, entries
def generate_operand_kind_table(enums):
"""Returns the info table containing all SPIR-V operand kinds."""
# We only need to output info tables for those operand kinds that are enums.
enums = [e for e in enums if e.get('category') in ['ValueEnum', 'BitEnum']]
caps = [entry.get('capabilities', [])
for enum in enums
for entry in enum.get('enumerants', [])]
caps_arrays = generate_capability_arrays(caps)
exts = [entry.get('extensions', [])
for enum in enums
for entry in enum.get('enumerants', [])]
enums = [generate_enum_operand_kind(e, exts) for e in enums]
exts_arrays = generate_extension_arrays(exts)
# We have a few operand kinds that require their optional counterpart to
# exist in the operand info table.
optional_enums = ['ImageOperands', 'AccessQualifier', 'MemoryAccess', 'PackedVectorFormat']
optional_enums = [e for e in enums if e[0] in optional_enums]
enums.extend(optional_enums)
enum_kinds, enum_names, enum_entries = zip(*enums)
# Mark the last few as optional ones.
enum_quantifiers = [''] * (len(enums) - len(optional_enums)) + ['?'] * len(optional_enums)
# And we don't want redefinition of them.
enum_entries = enum_entries[:-len(optional_enums)]
enum_kinds = [convert_operand_kind(e)
for e in zip(enum_kinds, enum_quantifiers)]
table_entries = zip(enum_kinds, enum_names, enum_names)
table_entries = [' {{{}, ARRAY_SIZE({}), {}}}'.format(*e)
for e in table_entries]
template = [
'static const spv_operand_desc_group_t {p}_OperandInfoTable[] = {{',
'{enums}', '}};']
table = '\n'.join(template).format(
p=PYGEN_VARIABLE_PREFIX, enums=',\n'.join(table_entries))
return '\n\n'.join((caps_arrays,) + (exts_arrays,) + enum_entries + (table,))
def get_extension_list(instructions, operand_kinds):
"""Returns extensions as an alphabetically sorted list of strings."""
things_with_an_extensions_field = [item for item in instructions]
enumerants = sum([item.get('enumerants', [])
for item in operand_kinds], [])
things_with_an_extensions_field.extend(enumerants)
extensions = sum([item.get('extensions', [])
for item in things_with_an_extensions_field
if item.get('extensions')], [])
for item in EXTENSIONS_FROM_SPIRV_REGISTRY_AND_NOT_FROM_GRAMMARS.split():
# If it's already listed in a grammar, then don't put it in the
# special exceptions list.
assert item not in extensions, 'Extension %s is already in a grammar file' % item
extensions.extend(
EXTENSIONS_FROM_SPIRV_REGISTRY_AND_NOT_FROM_GRAMMARS.split())
# Validator would ignore type declaration unique check. Should only be used
# for legacy autogenerated test files containing multiple instances of the
# same type declaration, if fixing the test by other methods is too
# difficult. Shouldn't be used for any other reasons.
extensions.append('SPV_VALIDATOR_ignore_type_decl_unique')
return sorted(set(extensions))
def get_capabilities(operand_kinds):
"""Returns capabilities as a list of JSON objects, in order of
appearance."""
enumerants = sum([item.get('enumerants', []) for item in operand_kinds
if item.get('kind') in ['Capability']], [])
return enumerants
def generate_extension_enum(extensions):
"""Returns enumeration containing extensions declared in the grammar."""
return ',\n'.join(['k' + extension for extension in extensions])
def generate_extension_to_string_mapping(extensions):
"""Returns mapping function from extensions to corresponding strings."""
function = 'const char* ExtensionToString(Extension extension) {\n'
function += ' switch (extension) {\n'
template = ' case Extension::k{extension}:\n' \
' return "{extension}";\n'
function += ''.join([template.format(extension=extension)
for extension in extensions])
function += ' }\n\n return "";\n}'
return function
def generate_string_to_extension_mapping(extensions):
"""Returns mapping function from strings to corresponding extensions."""
function = '''
bool GetExtensionFromString(const char* str, Extension* extension) {{
static const char* known_ext_strs[] = {{ {strs} }};
static const Extension known_ext_ids[] = {{ {ids} }};
const auto b = std::begin(known_ext_strs);
const auto e = std::end(known_ext_strs);
const auto found = std::equal_range(
b, e, str, [](const char* str1, const char* str2) {{
return std::strcmp(str1, str2) < 0;
}});
if (found.first == e || found.first == found.second) return false;
*extension = known_ext_ids[found.first - b];
return true;
}}
'''.format(strs=', '.join(['"{}"'.format(e) for e in extensions]),
ids=', '.join(['Extension::k{}'.format(e) for e in extensions]))
return function
def generate_capability_to_string_mapping(operand_kinds):
"""Returns mapping function from capabilities to corresponding strings.
We take care to avoid emitting duplicate values.
"""
function = 'const char* CapabilityToString(SpvCapability capability) {\n'
function += ' switch (capability) {\n'
template = ' case SpvCapability{capability}:\n' \
' return "{capability}";\n'
emitted = set() # The values of capabilities we already have emitted
for capability in get_capabilities(operand_kinds):
value = capability.get('value')
if value not in emitted:
emitted.add(value)
function += template.format(capability=capability.get('enumerant'))
function += ' case SpvCapabilityMax:\n' \
' assert(0 && "Attempting to convert SpvCapabilityMax to string");\n' \
' return "";\n'
function += ' }\n\n return "";\n}'
return function
def generate_all_string_enum_mappings(extensions, operand_kinds):
"""Returns all string-to-enum / enum-to-string mapping tables."""
tables = []
tables.append(generate_extension_to_string_mapping(extensions))
tables.append(generate_string_to_extension_mapping(extensions))
tables.append(generate_capability_to_string_mapping(operand_kinds))
return '\n\n'.join(tables)
def precondition_operand_kinds(operand_kinds):
"""For operand kinds that have the same number, make sure they all have the
same extension list."""
# Map operand kind and value to list of the union of extensions
# for same-valued enumerants.
exts = {}
for kind_entry in operand_kinds:
kind = kind_entry.get('kind')
for enum_entry in kind_entry.get('enumerants', []):
value = enum_entry.get('value')
key = kind + '.' + str(value)
if key in exts:
exts[key].extend(enum_entry.get('extensions', []))
else:
exts[key] = enum_entry.get('extensions', [])
exts[key] = sorted(set(exts[key]))
# Now make each entry the same list.
for kind_entry in operand_kinds:
kind = kind_entry.get('kind')
for enum_entry in kind_entry.get('enumerants', []):
value = enum_entry.get('value')
key = kind + '.' + str(value)
if len(exts[key]) > 0:
enum_entry['extensions'] = exts[key]
return operand_kinds
def prefix_operand_kind_names(prefix, json_dict):
"""Modifies json_dict, by prefixing all the operand kind names
with the given prefix. Also modifies their uses in the instructions
to match.
"""
old_to_new = {}
for operand_kind in json_dict["operand_kinds"]:
old_name = operand_kind["kind"]
new_name = prefix + old_name
operand_kind["kind"] = new_name
old_to_new[old_name] = new_name
for instruction in json_dict["instructions"]:
for operand in instruction.get("operands", []):
replacement = old_to_new.get(operand["kind"])
if replacement is not None:
operand["kind"] = replacement
def main():
import argparse
parser = argparse.ArgumentParser(description='Generate SPIR-V info tables')
parser.add_argument('--spirv-core-grammar', metavar='<path>',
type=str, required=False,
help='input JSON grammar file for core SPIR-V '
'instructions')
parser.add_argument('--extinst-debuginfo-grammar', metavar='<path>',
type=str, required=False, default=None,
help='input JSON grammar file for DebugInfo extended '
'instruction set')
parser.add_argument('--extinst-cldebuginfo100-grammar', metavar='<path>',
type=str, required=False, default=None,
help='input JSON grammar file for OpenCL.DebugInfo.100 '
'extended instruction set')
parser.add_argument('--extinst-glsl-grammar', metavar='<path>',
type=str, required=False, default=None,
help='input JSON grammar file for GLSL extended '
'instruction set')
parser.add_argument('--extinst-opencl-grammar', metavar='<path>',
type=str, required=False, default=None,
help='input JSON grammar file for OpenCL extended '
'instruction set')
parser.add_argument('--core-insts-output', metavar='<path>',
type=str, required=False, default=None,
help='output file for core SPIR-V instructions')
parser.add_argument('--glsl-insts-output', metavar='<path>',
type=str, required=False, default=None,
help='output file for GLSL extended instruction set')
parser.add_argument('--opencl-insts-output', metavar='<path>',
type=str, required=False, default=None,
help='output file for OpenCL extended instruction set')
parser.add_argument('--operand-kinds-output', metavar='<path>',
type=str, required=False, default=None,
help='output file for operand kinds')
parser.add_argument('--extension-enum-output', metavar='<path>',
type=str, required=False, default=None,
help='output file for extension enumeration')
parser.add_argument('--enum-string-mapping-output', metavar='<path>',
type=str, required=False, default=None,
help='output file for enum-string mappings')
parser.add_argument('--extinst-vendor-grammar', metavar='<path>',
type=str, required=False, default=None,
help='input JSON grammar file for vendor extended '
'instruction set'),
parser.add_argument('--vendor-insts-output', metavar='<path>',
type=str, required=False, default=None,
help='output file for vendor extended instruction set')
parser.add_argument('--vendor-operand-kind-prefix', metavar='<string>',
type=str, required=False, default=None,
help='prefix for operand kinds (to disambiguate operand type enums)')
args = parser.parse_args()
# The GN build system needs this because it doesn't handle quoting
# empty string arguments well.
if args.vendor_operand_kind_prefix == "...nil...":
args.vendor_operand_kind_prefix = ""
if (args.core_insts_output is None) != \
(args.operand_kinds_output is None):
print('error: --core-insts-output and --operand-kinds-output '
'should be specified together.')
exit(1)
if args.operand_kinds_output and not (args.spirv_core_grammar and
args.extinst_debuginfo_grammar and
args.extinst_cldebuginfo100_grammar):
print('error: --operand-kinds-output requires --spirv-core-grammar '
'and --extinst-debuginfo-grammar '
'and --extinst-cldebuginfo100-grammar')
exit(1)
if (args.glsl_insts_output is None) != \
(args.extinst_glsl_grammar is None):
print('error: --glsl-insts-output and --extinst-glsl-grammar '
'should be specified together.')
exit(1)
if (args.opencl_insts_output is None) != \
(args.extinst_opencl_grammar is None):
print('error: --opencl-insts-output and --extinst-opencl-grammar '
'should be specified together.')
exit(1)
if (args.vendor_insts_output is None) != \
(args.extinst_vendor_grammar is None):
print('error: --vendor-insts-output and '
'--extinst-vendor-grammar should be specified together.')
exit(1)
if all([args.core_insts_output is None,
args.glsl_insts_output is None,
args.opencl_insts_output is None,
args.vendor_insts_output is None,
args.extension_enum_output is None,
args.enum_string_mapping_output is None]):
print('error: at least one output should be specified.')
exit(1)
if args.spirv_core_grammar is not None:
with open(args.spirv_core_grammar) as json_file:
core_grammar = json.loads(json_file.read())
with open(args.extinst_debuginfo_grammar) as debuginfo_json_file:
debuginfo_grammar = json.loads(debuginfo_json_file.read())
with open(args.extinst_cldebuginfo100_grammar) as cldebuginfo100_json_file:
cldebuginfo100_grammar = json.loads(cldebuginfo100_json_file.read())
prefix_operand_kind_names("CLDEBUG100_", cldebuginfo100_grammar)
instructions = []
instructions.extend(core_grammar['instructions'])
instructions.extend(debuginfo_grammar['instructions'])
instructions.extend(cldebuginfo100_grammar['instructions'])
operand_kinds = []
operand_kinds.extend(core_grammar['operand_kinds'])
operand_kinds.extend(debuginfo_grammar['operand_kinds'])
operand_kinds.extend(cldebuginfo100_grammar['operand_kinds'])
extensions = get_extension_list(instructions, operand_kinds)
operand_kinds = precondition_operand_kinds(operand_kinds)
if args.core_insts_output is not None:
make_path_to_file(args.core_insts_output)
make_path_to_file(args.operand_kinds_output)
with open(args.core_insts_output, 'w') as f:
f.write(generate_instruction_table(
core_grammar['instructions']))
with open(args.operand_kinds_output, 'w') as f:
f.write(generate_operand_kind_table(operand_kinds))
if args.extension_enum_output is not None:
make_path_to_file(args.extension_enum_output)
with open(args.extension_enum_output, 'w') as f:
f.write(generate_extension_enum(extensions))
if args.enum_string_mapping_output is not None:
make_path_to_file(args.enum_string_mapping_output)
with open(args.enum_string_mapping_output, 'w') as f:
f.write(generate_all_string_enum_mappings(
extensions, operand_kinds))
if args.extinst_glsl_grammar is not None:
with open(args.extinst_glsl_grammar) as json_file:
grammar = json.loads(json_file.read())
make_path_to_file(args.glsl_insts_output)
with open(args.glsl_insts_output, 'w') as f:
f.write(generate_extended_instruction_table(
grammar, 'glsl'))
if args.extinst_opencl_grammar is not None:
with open(args.extinst_opencl_grammar) as json_file:
grammar = json.loads(json_file.read())
make_path_to_file(args.opencl_insts_output)
with open(args.opencl_insts_output, 'w') as f:
f.write(generate_extended_instruction_table(
grammar, 'opencl'))
if args.extinst_vendor_grammar is not None:
with open(args.extinst_vendor_grammar) as json_file:
grammar = json.loads(json_file.read())
make_path_to_file(args.vendor_insts_output)
name = args.extinst_vendor_grammar
start = name.find('extinst.') + len('extinst.')
name = name[start:-len('.grammar.json')].replace('-', '_')
with open(args.vendor_insts_output, 'w') as f:
f.write(generate_extended_instruction_table(
grammar, name, args.vendor_operand_kind_prefix))
if __name__ == '__main__':
main()
|
|
from __future__ import print_function
import distutils
import logging
import os
import subprocess
import sys
import warnings
import numpy
from theano import config
from theano.compat import decode, decode_iter
from theano.configdefaults import local_bitwidth
from theano.gof.utils import hash_from_file
from theano.gof.cmodule import (std_libs, std_lib_dirs,
std_include_dirs, dlimport,
Compiler,
get_lib_extension)
from theano.misc.windows import output_subprocess_Popen
_logger = logging.getLogger("theano.sandbox.cuda.nvcc_compiler")
nvcc_path = 'nvcc'
nvcc_version = None
def is_nvcc_available():
"""
Return True iff the nvcc compiler is found.
"""
def set_version():
p_out = output_subprocess_Popen([nvcc_path, '--version'])
ver_line = decode(p_out[0]).strip().split('\n')[-1]
build, version = ver_line.split(',')[1].strip().split()
assert build == 'release'
global nvcc_version
nvcc_version = version
try:
set_version()
return True
except Exception:
# try to find nvcc into cuda.root
p = os.path.join(config.cuda.root, 'bin', 'nvcc')
if os.path.exists(p):
global nvcc_path
nvcc_path = p
try:
set_version()
except Exception:
return False
return True
else:
return False
rpath_defaults = []
def add_standard_rpath(rpath):
rpath_defaults.append(rpath)
class NVCC_compiler(Compiler):
supports_amdlibm = False
@staticmethod
def try_compile_tmp(src_code, tmp_prefix='', flags=(),
try_run=False, output=False):
return Compiler._try_compile_tmp(src_code, tmp_prefix, flags,
try_run, output,
nvcc_path)
@staticmethod
def try_flags(flag_list, preambule="", body="",
try_run=False, output=False):
return Compiler._try_flags(flag_list, preambule, body, try_run, output,
nvcc_path)
@staticmethod
def version_str():
return "nvcc " + nvcc_version
@staticmethod
def compile_args():
"""
This args will be received by compile_str() in the preargs paramter.
They will also be included in the "hard" part of the key module.
"""
flags = [flag for flag in config.nvcc.flags.split(' ') if flag]
if config.nvcc.fastmath:
flags.append('-use_fast_math')
cuda_ndarray_cuh_hash = hash_from_file(
os.path.join(os.path.split(__file__)[0], 'cuda_ndarray.cuh'))
flags.append('-DCUDA_NDARRAY_CUH=' + cuda_ndarray_cuh_hash)
# NumPy 1.7 Deprecate the old API. I updated most of the places
# to use the new API, but not everywhere. When finished, enable
# the following macro to assert that we don't bring new code
# that use the old API.
flags.append("-DNPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION")
# numpy 1.7 deprecated the following macro but the didn't
# existed in the past
numpy_ver = [int(n) for n in numpy.__version__.split('.')[:2]]
if bool(numpy_ver < [1, 7]):
flags.append("-DNPY_ARRAY_ENSURECOPY=NPY_ENSURECOPY")
flags.append("-DNPY_ARRAY_ALIGNED=NPY_ALIGNED")
flags.append("-DNPY_ARRAY_WRITEABLE=NPY_WRITEABLE")
flags.append("-DNPY_ARRAY_UPDATE_ALL=NPY_UPDATE_ALL")
flags.append("-DNPY_ARRAY_C_CONTIGUOUS=NPY_C_CONTIGUOUS")
flags.append("-DNPY_ARRAY_F_CONTIGUOUS=NPY_F_CONTIGUOUS")
# If the user didn't specify architecture flags add them
if not any(['-arch=sm_' in f for f in flags]):
# We compile cuda_ndarray.cu during import.
# We should not add device properties at that time.
# As the device is not selected yet!
# TODO: re-compile cuda_ndarray when we bind to a GPU?
import theano.sandbox.cuda
if hasattr(theano.sandbox, 'cuda'):
n = theano.sandbox.cuda.use.device_number
if n is None:
_logger.warn(
"We try to get compilation arguments for CUDA"
" code, but the GPU device is not initialized."
" This is probably caused by an Op that work on"
" the GPU that don't inherit from GpuOp."
" We Initialize the GPU now.")
theano.sandbox.cuda.use(
"gpu",
force=True,
default_to_move_computation_to_gpu=False,
move_shared_float32_to_gpu=False,
enable_cuda=False)
n = theano.sandbox.cuda.use.device_number
p = theano.sandbox.cuda.device_properties(n)
flags.append('-arch=sm_' + str(p['major']) +
str(p['minor']))
return flags
@staticmethod
def compile_str(
module_name, src_code,
location=None, include_dirs=[], lib_dirs=[], libs=[], preargs=[],
rpaths=rpath_defaults, py_module=True, hide_symbols=True):
"""
Parameters
----------
module_name: str
This has been embedded in the src_code.
src_code
A complete c or c++ source listing for the module.
location
A pre-existing filesystem directory where the
cpp file and .so will be written.
include_dirs
A list of include directory names (each gets prefixed with -I).
lib_dirs
A list of library search path directory names (each gets
prefixed with -L).
libs
A list of libraries to link with (each gets prefixed with -l).
preargs
A list of extra compiler arguments.
rpaths
List of rpaths to use with Xlinker. Defaults to `rpath_defaults`.
py_module
If False, compile to a shared library, but
do not import as a Python module.
hide_symbols
If True (the default), hide all symbols from the library symbol
table unless explicitely exported.
Returns
-------
module
Dynamically-imported python module of the compiled code.
(unless py_module is False, in that case returns None.)
Notes
-----
On Windows 7 with nvcc 3.1 we need to compile in the real directory
Otherwise nvcc never finish.
"""
# Remove empty string directory
include_dirs = [d for d in include_dirs if d]
lib_dirs = [d for d in lib_dirs if d]
rpaths = list(rpaths)
if sys.platform == "win32":
# Remove some compilation args that cl.exe does not understand.
# cl.exe is the compiler used by nvcc on Windows.
for a in ["-Wno-write-strings", "-Wno-unused-label",
"-Wno-unused-variable", "-fno-math-errno"]:
if a in preargs:
preargs.remove(a)
if preargs is None:
preargs = []
else:
preargs = list(preargs)
if sys.platform != 'win32':
preargs.append('-fPIC')
if config.cmodule.remove_gxx_opt:
preargs = [p for p in preargs if not p.startswith('-O')]
cuda_root = config.cuda.root
# The include dirs gived by the user should have precedence over
# the standards ones.
include_dirs = include_dirs + std_include_dirs()
if os.path.abspath(os.path.split(__file__)[0]) not in include_dirs:
include_dirs.append(os.path.abspath(os.path.split(__file__)[0]))
libs = libs + std_libs()
if 'cudart' not in libs:
libs.append('cudart')
lib_dirs = lib_dirs + std_lib_dirs()
if sys.platform != 'darwin':
# config.dnn.include_path add this by default for cudnn in the
# new back-end. This should not be used in this back-end. So
# just remove them.
lib_dirs = [ld for ld in lib_dirs if
not(ld == os.path.join(cuda_root, 'lib') or
ld == os.path.join(cuda_root, 'lib64'))]
if sys.platform != 'darwin':
# sometimes, the linker cannot find -lpython so we need to tell it
# explicitly where it is located
# this returns somepath/lib/python2.x
python_lib = distutils.sysconfig.get_python_lib(plat_specific=1,
standard_lib=1)
python_lib = os.path.dirname(python_lib)
if python_lib not in lib_dirs:
lib_dirs.append(python_lib)
cppfilename = os.path.join(location, 'mod.cu')
with open(cppfilename, 'w') as cppfile:
_logger.debug('Writing module C++ code to %s', cppfilename)
cppfile.write(src_code)
lib_filename = os.path.join(location, '%s.%s' %
(module_name, get_lib_extension()))
_logger.debug('Generating shared lib %s', lib_filename)
# TODO: Why do these args cause failure on gtx285 that has 1.3
# compute capability? '--gpu-architecture=compute_13',
# '--gpu-code=compute_13',
# nvcc argument
preargs1 = []
preargs2 = []
for pa in preargs:
if pa.startswith('-Wl,'):
# the -rpath option is not understood by the Microsoft linker
if sys.platform != 'win32' or not pa.startswith('-Wl,-rpath'):
preargs1.append('-Xlinker')
preargs1.append(pa[4:])
continue
for pattern in ['-O', '-arch=', '-ccbin=', '-G', '-g', '-I',
'-L', '--fmad', '--ftz', '--maxrregcount',
'--prec-div', '--prec-sqrt', '--use_fast_math',
'-fmad', '-ftz', '-maxrregcount',
'-prec-div', '-prec-sqrt', '-use_fast_math',
'--use-local-env', '--cl-version=']:
if pa.startswith(pattern):
preargs1.append(pa)
break
else:
preargs2.append(pa)
# Don't put -G by default, as it slow things down.
# We aren't sure if -g slow things down, so we don't put it by default.
cmd = [nvcc_path, '-shared'] + preargs1
if config.nvcc.compiler_bindir:
cmd.extend(['--compiler-bindir', config.nvcc.compiler_bindir])
if sys.platform == 'win32':
# add flags for Microsoft compiler to create .pdb files
preargs2.extend(['/Zi', '/MD'])
cmd.extend(['-Xlinker', '/DEBUG'])
# remove the complaints for the duplication of `double round(double)`
# in both math_functions.h and pymath.h,
# by not including the one in pymath.h
cmd.extend(['-D HAVE_ROUND'])
else:
if hide_symbols:
preargs2.append('-fvisibility=hidden')
if local_bitwidth() == 64:
cmd.append('-m64')
else:
cmd.append('-m32')
if len(preargs2) > 0:
cmd.extend(['-Xcompiler', ','.join(preargs2)])
# We should not use rpath if possible. If the user provided
# provided an cuda.root flag, we need to add one, but
# otherwise, we don't add it. See gh-1540 and
# https://wiki.debian.org/RpathIssue for details.
if (not type(config.cuda).root.is_default and
os.path.exists(os.path.join(config.cuda.root, 'lib'))):
rpaths.append(os.path.join(config.cuda.root, 'lib'))
if sys.platform != 'darwin':
# the CUDA libs are universal (contain both 32-bit and 64-bit)
rpaths.append(os.path.join(config.cuda.root, 'lib64'))
if sys.platform != 'win32':
# the -rpath option is not understood by the Microsoft linker
for rpath in rpaths:
cmd.extend(['-Xlinker', ','.join(['-rpath', rpath])])
cmd.extend('-I%s' % idir for idir in include_dirs)
cmd.extend(['-o', lib_filename])
cmd.append(os.path.split(cppfilename)[-1])
cmd.extend(['-L%s' % ldir for ldir in lib_dirs])
cmd.extend(['-l%s' % l for l in libs])
if sys.platform == 'darwin':
# This tells the compiler to use the already-loaded python
# symbols (which should always be the right ones).
cmd.extend(['-Xcompiler', '-undefined,dynamic_lookup'])
# Remove "-u Symbol" arguments, since they are usually not
# relevant for the new compilation, even if they were used for
# compiling python. If they are necessary, the nvcc syntax is
# "-U Symbol" with a capital U.
done = False
while not done:
try:
indexof = cmd.index('-u')
cmd.pop(indexof) # Remove -u
cmd.pop(indexof) # Remove argument to -u
except ValueError as e:
done = True
# CUDA Toolkit v4.1 Known Issues:
# Host linker on Mac OS 10.7 (and 10.6 for me) passes -no_pie option
# to nvcc this option is not recognized and generates an error
# http://stackoverflow.com/questions/9327265/nvcc-unknown-option-no-pie
# Passing -Xlinker -pie stops -no_pie from getting passed
if sys.platform == 'darwin' and nvcc_version >= '4.1':
cmd.extend(['-Xlinker', '-pie'])
# cmd.append("--ptxas-options=-v") #uncomment this to see
# register and shared-mem requirements
_logger.debug('Running cmd %s', ' '.join(cmd))
orig_dir = os.getcwd()
try:
os.chdir(location)
p = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
nvcc_stdout, nvcc_stderr = decode_iter(p.communicate()[:2])
finally:
os.chdir(orig_dir)
for eline in nvcc_stderr.split('\n'):
if not eline:
continue
if 'skipping incompatible' in eline:
# ld is skipping an incompatible library
continue
if 'declared but never referenced' in eline:
continue
if 'statement is unreachable' in eline:
continue
_logger.info("NVCC: %s", eline)
if p.returncode:
for i, l in enumerate(src_code.split('\n')):
print(i + 1, l, file=sys.stderr)
print('===============================', file=sys.stderr)
# filter the output from the compiler
for l in nvcc_stderr.split('\n'):
if not l:
continue
# filter out the annoying declaration warnings
try:
if l[l.index(':'):].startswith(': warning: variable'):
continue
if l[l.index(':'):].startswith(': warning: label'):
continue
except Exception:
pass
print(l, file=sys.stderr)
print(nvcc_stdout)
print(cmd)
raise Exception('nvcc return status', p.returncode,
'for cmd', ' '.join(cmd))
elif config.cmodule.compilation_warning and nvcc_stdout:
print(nvcc_stdout)
if nvcc_stdout:
# this doesn't happen to my knowledge
print("DEBUG: nvcc STDOUT", nvcc_stdout, file=sys.stderr)
if py_module:
# touch the __init__ file
open(os.path.join(location, "__init__.py"), 'w').close()
return dlimport(lib_filename)
|
|
from sympy.core.basic import Basic, S, C, sympify
from sympy.core import oo, Rational, Pow
from sympy.core.cache import cacheit
class Order(Basic):
"""
Represents O(f(x)) at the point x = 0.
Definition
==========
g(x) = O(f(x)) as x->0 if and only if
|g(x)|<=M|f(x)| near x=0 (1)
for some positive but finite M. An equivalent way of saying (1) is:
lim_{x->0} |g(x)/f(x)| < oo
Let's illustrate it on the following example:
sin x = x - x**3/3! + O(x**5)
where in this case O(x**5) = x**5/5! - x**7/7! + .... and the definition
of O means:
|x**5/5! - x**7/7! + ....| <= M|x**5| near x=0
or equivalently:
lim_{x->0} | (x**5/5! - x**7/7! + ....) / x**5| < oo
which surely is true, because
lim_{x->0} | (x**5/5! - x**7/7! + ....) / x**5| = 1/5!
So intuitively O(x**3) means: all terms x**3, x**4 and
higher. But not x**2, x or 1.
Examples:
=========
>>> from sympy import O
>>> from sympy.abc import x
>>> O(x)
O(x)
>>> O(x)*x
O(x**2)
>>> O(x)-O(x)
O(x)
External links
--------------
U{Big O notation<http://en.wikipedia.org/wiki/Big_O_notation>}
Properties:
===========
g(x) = O(f(x)) as x->0 <-> |g(x)|<=M|f(x)| near x=0 <-> lim_{x->0} |g(x)/f(x)| < oo
g(x,y) = O(f(x,y)) <-> lim_{x,y->0} |g(x,y)/f(x,y)| < oo, we'll assume that limits commute.
Notes:
======
In O(f(x),x) the expression f(x) is assumed to have a leading term.
O(f(x),x) is automatically transformed to O(f(x).as_leading_term(x),x).
O(expr*f(x),x) is O(f(x),x)
O(expr,x) is O(1)
O(0, x) is 0.
Multivariate O is also supported:
O(f(x,y),x,y) is transformed to O(f(x,y).as_leading_term(x,y).as_leading_term(y), x, y)
If O is used with only expression argument then the symbols are
all symbols in the expression.
"""
is_Order = True
__slots__ = []
@cacheit
def __new__(cls, expr, *symbols, **assumptions):
expr = sympify(expr).expand()
if expr is S.NaN:
return S.NaN
if symbols:
symbols = map(sympify, symbols)
else:
symbols = list(expr.atoms(C.Symbol))
symbols.sort(Basic.compare)
if expr.is_Order:
new_symbols = list(expr.symbols)
for s in symbols:
if s not in new_symbols:
new_symbols.append(s)
if len(new_symbols)==len(expr.symbols):
return expr
symbols = new_symbols
elif symbols:
symbol_map = {}
new_symbols = []
for s in symbols:
if isinstance(s, C.Symbol):
new_symbols.append(s)
continue
z = C.Symbol('z',dummy=True)
x1,s1 = s.solve4linearsymbol(z)
expr = expr.subs(x1,s1)
symbol_map[z] = s
new_symbols.append(z)
if symbol_map:
r = Order(expr, *new_symbols, **assumptions)
expr = r.expr.subs(symbol_map)
symbols = []
for s in r.symbols:
if symbol_map.has_key(s):
symbols.append(symbol_map[s])
else:
symbols.append(s)
else:
if expr.is_Add:
lst = expr.extract_leading_order(*symbols)
expr = C.Add(*[f.expr for (e,f) in lst])
else:
expr = expr.as_leading_term(*symbols)
coeff, terms = expr.as_coeff_terms()
if coeff is S.Zero:
return coeff
expr = C.Mul(*[t for t in terms if t.has(*symbols)])
elif expr is not S.Zero:
expr = S.One
if expr is S.Zero:
return expr
# create Order instance:
obj = Basic.__new__(cls, expr, *symbols, **assumptions)
return obj
def _hashable_content(self):
if self.args[0].is_number:
return (self.args[0],)
return self.args
def oseries(self, order):
return self
def _eval_nseries(self, x, x0, n):
return self
@classmethod
def find_limit(cls, f, x):
"""Basically identical to:
return limit(f, x, 0, dir="+")
but first trying some easy cases (like x**2) using heuristics, to avoid
infinite recursion. This is only needed in the Order class and series
expansion (that shouldn't rely on the Gruntz algorithm too much),
that's why find_limit() is defined here.
"""
from sympy import limit, Wild, log
if f.is_Pow:
if f.args[0] == x:
if f.args[1].is_Rational:
if f.args[1] > 0:
return S.Zero
else:
return oo
if f.args[1].is_number:
if f.args[1].evalf() > 0:
return S.Zero
else:
return oo
if f == x:
return S.Zero
p, q = Wild("p"), Wild("q")
r = f.match(x**p * log(x)**q)
if r:
p, q = r[p], r[q]
if q.is_number and p.is_number:
if q > 0:
if p > 0:
return S.Zero
else:
return -oo
elif q < 0:
if p >= 0:
return S.Zero
else:
return -oo
return limit(f, x, 0, dir="+")
@property
def expr(self):
return self._args[0]
@property
def symbols(self):
return self._args[1:]
def _eval_power(b, e):
if e.is_Number:
return Order(b.expr ** e, *b.symbols)
return
def as_expr_symbols(self, order_symbols):
if order_symbols is None:
order_symbols = self.symbols
else:
for s in self.symbols:
if s not in order_symbols:
order_symbols = order_symbols + (s,)
return self.expr, order_symbols
@cacheit
def contains(self, expr):
"""
Return True if expr belongs to Order(self.expr, *self.symbols).
Return False if self belongs to expr.
Return None if the inclusion relation cannot be determined (e.g. when self and
expr have different symbols).
"""
from sympy import powsimp
if expr is S.Zero:
return True
if expr is S.NaN:
return False
if expr.is_Order:
if self.symbols and expr.symbols:
common_symbols = tuple([s for s in self.symbols if s in expr.symbols])
elif self.symbols:
common_symbols = self.symbols
else:
common_symbols = expr.symbols
if not common_symbols:
if not (self.symbols or expr.symbols): # O(1),O(1)
return True
return None
r = None
for s in common_symbols:
l = Order.find_limit(powsimp(self.expr/expr.expr, deep=True,\
combine='exp'), s) != 0
if r is None:
r = l
else:
if r != l:
return
return r
obj = Order(expr, *self.symbols)
return self.contains(obj)
def _eval_subs(self, old, new):
if self==old:
return new
if isinstance(old, C.Symbol) and old in self.symbols:
i = list(self.symbols).index(old)
if isinstance(new, C.Symbol):
return Order(self.expr._eval_subs(old, new), *(self.symbols[:i]+(new,)+self.symbols[i+1:]))
return Order(self.expr._eval_subs(old, new), *(self.symbols[:i]+self.symbols[i+1:]))
return Order(self.expr._eval_subs(old, new), *self.symbols)
def _sage_(self):
#XXX: SAGE doesn't have Order yet. Let's return 0 instead.
return Rational(0)._sage_()
Basic.singleton['O'] = lambda : Order
|
|
"""
Functions for executing ES searches
"""
import json
import logging
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from django.db import transaction
from django.db.models import Q as Query
from elasticsearch.exceptions import NotFoundError
from elasticsearch_dsl import Search, Q
from jsonpatch import make_patch
from courses.models import Program
from dashboard.models import ProgramEnrollment
from profiles.models import Profile
from roles.api import get_advance_searchable_program_ids
from search.connection import (
get_default_alias,
get_conn,
GLOBAL_DOC_TYPE,
PRIVATE_ENROLLMENT_INDEX_TYPE,
PUBLIC_ENROLLMENT_INDEX_TYPE,
PERCOLATE_INDEX_TYPE,
)
from search.models import (
PercolateQuery,
PercolateQueryMembership,
)
from search.exceptions import (
NoProgramAccessException,
PercolateException,
)
from search.indexing_api import serialize_program_enrolled_user
DEFAULT_ES_LOOP_PAGE_SIZE = 100
log = logging.getLogger(__name__)
def execute_search(search_obj):
"""
Executes a search against ES after checking the connection
Args:
search_obj (Search): elasticsearch_dsl Search object
Returns:
elasticsearch_dsl.result.Response: ES response
"""
# make sure there is a live connection
if search_obj._index is None: # pylint: disable=protected-access
# If you're seeing this it means you're creating Search() without using
# create_search_obj which sets important fields like the index.
raise ImproperlyConfigured("search object is missing an index")
get_conn()
return search_obj.execute()
def scan_search(search_obj):
"""
Executes a scan search after checking the connection and return a
generator that will iterate over all the documents matching the query.
Args:
search_obj (Search): elasticsearch_dsl Search object
Returns:
generator of dict:
A generator that will iterate over all the documents matching the query
"""
# make sure there is a live connection
if search_obj._index is None: # pylint: disable=protected-access
# If you're seeing this it means you're creating Search() without using
# create_search_obj which sets important fields like the index.
raise ImproperlyConfigured("search object is missing an index")
get_conn()
return search_obj.scan()
def get_searchable_programs(user, staff_program_ids):
"""
Determines the programs a user is eligible to search
Args:
user (django.contrib.auth.models.User): the user that is searching
staff_program_ids (list of int): the list of program ids the user is staff for if any
Returns:
set of courses.models.Program: set of programs the user can search in
"""
# filter only to the staff programs or enrolled programs
# NOTE: this has an accepted limitation that if you are staff on any program,
# you can't use search on non-staff programs
return set(Program.objects.filter(
Query(id__in=staff_program_ids) if staff_program_ids else Query(programenrollment__user=user)
).distinct())
def create_program_limit_query(user, staff_program_ids, filter_on_email_optin=False):
"""
Constructs and returns a query that limits a user to data for their allowed programs
Args:
user (django.contrib.auth.models.User): A user
staff_program_ids (list of int): the list of program ids the user is staff for if any
filter_on_email_optin (bool): If true, filter out profiles where email_optin != true
Returns:
elasticsearch_dsl.query.Q: An elasticsearch query
"""
users_allowed_programs = get_searchable_programs(user, staff_program_ids)
# if the user cannot search any program, raise an exception.
# in theory this should never happen because `UserCanAdvanceSearchPermission`
# takes care of doing the same check, but better to keep it to avoid
# that a theoretical bug exposes all the data in the index
if not users_allowed_programs:
raise NoProgramAccessException()
must = [
Q('term', **{'program.is_learner': True})
]
if filter_on_email_optin:
must.append(Q('term', **{'profile.email_optin': True}))
# no matter what the query is, limit the programs to the allowed ones
# if this is a superset of what searchkit sends, this will not impact the result
return Q(
'bool',
should=[
Q('term', **{'program.id': program.id}) for program in users_allowed_programs
],
# require that at least one program id matches the user's allowed programs
minimum_should_match=1,
must=must,
)
def create_search_obj(user, search_param_dict=None, filter_on_email_optin=False):
"""
Creates a search object and prepares it with metadata and query parameters that
we want to apply for all ES requests
Args:
user (User): User object
search_param_dict (dict): A dict representing the body of an ES query
filter_on_email_optin (bool): If true, filter out profiles where email_optin != True
Returns:
Search: elasticsearch_dsl Search object
"""
staff_program_ids = get_advance_searchable_program_ids(user)
is_advance_search_capable = bool(staff_program_ids)
index_type = PRIVATE_ENROLLMENT_INDEX_TYPE if is_advance_search_capable else PUBLIC_ENROLLMENT_INDEX_TYPE
index = get_default_alias(index_type)
search_obj = Search(index=index)
# Update from search params first so our server-side filtering will overwrite it if necessary
if search_param_dict is not None:
search_obj.update_from_dict(search_param_dict)
if not is_advance_search_capable:
# Learners can't search for other learners with privacy set to private
search_obj = search_obj.filter(
~Q('term', **{'profile.account_privacy': Profile.PRIVATE}) # pylint: disable=invalid-unary-operand-type
)
# Limit results to one of the programs the user is staff on
search_obj = search_obj.filter(create_program_limit_query(
user,
staff_program_ids,
filter_on_email_optin=filter_on_email_optin
))
# Filter so that only filled_out profiles are seen
search_obj = search_obj.filter(
Q('term', **{'profile.filled_out': True})
)
# Force size to be the one we set on the server
update_dict = {'size': settings.ELASTICSEARCH_DEFAULT_PAGE_SIZE}
if search_param_dict is not None and search_param_dict.get('from') is not None:
update_dict['from'] = search_param_dict['from']
search_obj.update_from_dict(update_dict)
return search_obj
def prepare_and_execute_search(user, search_param_dict=None, search_func=execute_search,
filter_on_email_optin=False):
"""
Prepares a Search object and executes the search against ES
Args:
user (User): User object
search_param_dict (dict): A dict representing the body of an ES query
search_func (callable): The function that executes the search
filter_on_email_optin (bool): If true, filter out profiles where email_optin != True
Returns:
elasticsearch_dsl.result.Response: ES response
"""
search_obj = create_search_obj(
user,
search_param_dict=search_param_dict,
filter_on_email_optin=filter_on_email_optin,
)
return search_func(search_obj)
def search_for_field(search_obj, field_name):
"""
Retrieves all unique instances of a field for documents that match an ES query
Args:
search_obj (Search): Search object
field_name (str): The name of the field for the value to get
Returns:
set: Set of unique values
"""
results = set()
# Maintaining a consistent sort on '_doc' will help prevent bugs where the
# index is altered during the loop.
# This also limits the query to only return the field value.
search_obj = search_obj.sort('_doc').source(include=[field_name])
search_results = scan_search(search_obj)
# add the field value for every search result hit to the set
for hit in search_results:
results.add(getattr(hit, field_name))
return results
def get_all_query_matching_emails(search_obj):
"""
Retrieves all unique emails for documents that match an ES query
Args:
search_obj (Search): Search object
page_size (int): Number of docs per page of results
Returns:
set: Set of unique emails
"""
return search_for_field(search_obj, "email")
def search_percolate_queries(program_enrollment_id, source_type):
"""
Find all PercolateQuery objects whose queries match a user document
Args:
program_enrollment_id (int): A ProgramEnrollment id
source_type (str): The type of the percolate query to filter on
Returns:
django.db.models.query.QuerySet: A QuerySet of PercolateQuery matching the percolate results
"""
enrollment = ProgramEnrollment.objects.get(id=program_enrollment_id)
result_ids = _search_percolate_queries(enrollment)
return PercolateQuery.objects.filter(id__in=result_ids, source_type=source_type).exclude(is_deleted=True)
def _search_percolate_queries(program_enrollment):
"""
Find all PercolateQuery ids whose queries match a user document
Args:
program_enrollment (ProgramEnrollment): A ProgramEnrollment
Returns:
list of int: A list of PercolateQuery ids
"""
conn = get_conn()
percolate_index = get_default_alias(PERCOLATE_INDEX_TYPE)
doc = serialize_program_enrolled_user(program_enrollment)
if not doc:
return []
# We don't need this to search for percolator queries and
# it causes a dynamic mapping failure so we need to remove it
del doc['_id']
body = {
"query": {
"percolate": {
"field": "query",
"document": doc
}
}
}
result = conn.search(percolate_index, GLOBAL_DOC_TYPE, body=body)
failures = result.get('_shards', {}).get('failures', [])
if len(failures) > 0:
raise PercolateException("Failed to percolate: {}".format(failures))
return [int(row['_id']) for row in result['hits']['hits']]
def adjust_search_for_percolator(search):
"""
Returns an updated Search which can be used with percolator.
Percolated queries can only store the query portion of the search object
(see https://github.com/elastic/elasticsearch/issues/19680). This will modify the original search query
to add post_filter arguments to the query part of the search. Then all parts of the Search other than
query will be removed.
Args:
search (Search): A search object
Returns:
Search: updated search object
"""
search_dict = search.to_dict()
if 'post_filter' in search_dict:
search = search.filter(search_dict['post_filter'])
# Remove all other keys besides query
updated_search_dict = {}
search_dict = search.to_dict()
if 'query' in search_dict:
updated_search_dict['query'] = search_dict['query']
updated_search = Search(index=search._index) # pylint: disable=protected-access
updated_search.update_from_dict(updated_search_dict)
return updated_search
def document_needs_updating(enrollment):
"""
Get the document from elasticsearch and see if it matches what's in the database
Args:
enrollment (ProgramEnrollment): A program enrollment
Returns:
bool: True if the document needs to be updated via reindex
"""
index = get_default_alias(PRIVATE_ENROLLMENT_INDEX_TYPE)
conn = get_conn()
try:
document = conn.get(index=index, doc_type=GLOBAL_DOC_TYPE, id=enrollment.id)
except NotFoundError:
return True
serialized_enrollment = serialize_program_enrolled_user(enrollment)
del serialized_enrollment['_id']
source = document['_source']
if serialized_enrollment != source:
# Convert OrderedDict to dict
reserialized_enrollment = json.loads(json.dumps(serialized_enrollment))
diff = make_patch(source, reserialized_enrollment).patch
serialized_diff = json.dumps(diff, indent=" ")
log.info("Difference found for enrollment %s: %s", enrollment, serialized_diff)
return True
return False
def update_percolate_memberships(user, source_type):
"""
Updates membership in a PercolateQuery
Args:
user (User): A User to check for membership changes
source_type (str): The type of the percolate query to filter on
"""
# ensure we have a membership for each of the queries so we can acquire a lock on them
percolate_queries = list(PercolateQuery.objects.filter(source_type=source_type).exclude(is_deleted=True))
membership_ids = _ensure_memberships_for_queries(
percolate_queries,
user
)
# if there are no percolate queries or memberships then there's nothing to do
if membership_ids:
_update_memberships([query.id for query in percolate_queries], membership_ids, user)
def _ensure_memberships_for_queries(percolate_queries, user):
"""
Ensures PercolateQueryMemberships exist for the user on the designated PercolateQueries
Args:
percolate_queries (list of PercolateQuery): A list of PercolateQuerys to add PercolateQueryMemberships for
user (User): The user to ensure memberships for
"""
membership_ids = []
for query in percolate_queries:
membership, _ = PercolateQueryMembership.objects.get_or_create(query=query, user=user)
membership_ids.append(membership.id)
return membership_ids
def _update_memberships(percolate_query_ids, membership_ids, user, force_save=False):
"""
Atomically determine and update memberships
Args:
percolate_query_ids (set of int): a set of PercolateQuery.id
membership_ids (list of int): A list of ids for PercolateQueryMemberships to update
user (User): A User to check for membership changes
force_save (bool): True if membership saves should be force even if no change
"""
with transaction.atomic():
memberships = PercolateQueryMembership.objects.filter(id__in=membership_ids).select_for_update()
# limit the query_ids to the queries we are trying to update
query_ids = set()
for enrollment in user.programenrollment_set.all():
query_ids.update(set(_search_percolate_queries(enrollment)))
query_ids.intersection_update(percolate_query_ids)
for membership in memberships:
# only update if there's a delta in membership status
is_member = membership.query_id in query_ids
if force_save or (membership.is_member is not is_member):
membership.is_member = is_member
membership.needs_update = True
membership.save()
def populate_query_memberships(percolate_query_id):
"""
Populates PercolateQueryMemberships for the given query and enrollments
Args:
percolate_query_id (int): Database id for the PercolateQuery to populate
"""
# practically this is a list of 1 query, but _ensure_memberships_for_queries requires a list
query = PercolateQuery.objects.get(id=percolate_query_id)
users = User.objects.filter(is_active=True).iterator()
for user in users:
membership_ids = _ensure_memberships_for_queries([query], user)
_update_memberships(set([query.id]), membership_ids, user, force_save=True)
|
|
# -*- coding: utf-8 -*-
""" Sahana Eden Assets Model
@copyright: 2009-2014 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3AssetModel",
"S3AssetHRModel",
"S3AssetTeamModel",
#"asset_rheader",
"asset_types",
"asset_log_status",
"asset_controller",
"asset_AssetRepresent",
)
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
from gluon import *
from gluon.storage import Storage
from ..s3 import *
from s3layouts import S3AddResourceLink
ASSET_TYPE_VEHICLE = 1 # => Extra Tab(s) for Registration Documents, Fuel Efficiency
ASSET_TYPE_RADIO = 2 # => Extra Tab(s) for Radio Channels/Frequencies
ASSET_TYPE_TELEPHONE = 3 # => Extra Tab(s) for Contact Details & Airtime Billing
ASSET_TYPE_OTHER = 4 # => No extra Tabs
# To pass to global scope
asset_types = {"VEHICLE" : ASSET_TYPE_VEHICLE,
"RADIO" : ASSET_TYPE_RADIO,
"TELEPHONE" : ASSET_TYPE_TELEPHONE,
"OTHER" : ASSET_TYPE_OTHER,
}
ASSET_LOG_SET_BASE = 1
ASSET_LOG_ASSIGN = 2
ASSET_LOG_RETURN = 3
ASSET_LOG_CHECK = 4
ASSET_LOG_REPAIR = 5
ASSET_LOG_DONATED = 32
ASSET_LOG_LOST = 33
ASSET_LOG_STOLEN = 34
ASSET_LOG_DESTROY = 35
# To pass to global scope
asset_log_status = {"SET_BASE" : ASSET_LOG_SET_BASE,
"ASSIGN" : ASSET_LOG_ASSIGN,
"RETURN" : ASSET_LOG_RETURN,
"CHECK" : ASSET_LOG_CHECK,
"REPAIR" : ASSET_LOG_REPAIR,
"DONATED" : ASSET_LOG_DONATED,
"LOST" : ASSET_LOG_LOST,
"STOLEN" : ASSET_LOG_STOLEN,
"DESTROY" : ASSET_LOG_DESTROY,
}
# =============================================================================
class S3AssetModel(S3Model):
"""
Asset Management
"""
names = ("asset_asset",
"asset_item",
"asset_log",
"asset_asset_id",
)
def model(self):
T = current.T
db = current.db
auth = current.auth
s3 = current.response.s3
item_id = self.supply_item_id
item_entity_id = self.supply_item_entity_id
location_id = self.gis_location_id
organisation_id = self.org_organisation_id
person_id = self.pr_person_id
messages = current.messages
NONE = messages["NONE"]
UNKNOWN_OPT = messages.UNKNOWN_OPT
settings = current.deployment_settings
org_site_label = settings.get_org_site_label()
vehicle = settings.has_module("vehicle")
# Shortcuts
add_components = self.add_components
configure = self.configure
crud_strings = s3.crud_strings
define_table = self.define_table
super_link = self.super_link
#--------------------------------------------------------------------------
# Assets
#
asset_type_opts = {ASSET_TYPE_VEHICLE : T("Vehicle"),
#ASSET_TYPE_RADIO : T("Radio"),
#ASSET_TYPE_TELEPHONE : T("Telephone"),
ASSET_TYPE_OTHER : T("Other"),
}
asset_condition_opts = {1: T("Good Condition"),
2: T("Minor Damage"),
3: T("Major Damage"),
4: T("Un-Repairable"),
5: T("Needs Maintenance"),
}
ctable = self.supply_item_category
itable = self.supply_item
supply_item_represent = self.supply_item_represent
asset_items_set = db((ctable.can_be_asset == True) & \
(itable.item_category_id == ctable.id))
tablename = "asset_asset"
define_table(tablename,
# Instances
super_link("track_id", "sit_trackable"),
super_link("doc_id", "doc_entity"),
item_entity_id,
Field("number",
label = T("Asset Number"),
),
# @ToDo: We could set this automatically based on Item Category
Field("type", "integer",
default = ASSET_TYPE_OTHER,
label = T("Type"),
represent = lambda opt: \
asset_type_opts.get(opt, UNKNOWN_OPT),
requires = IS_IN_SET(asset_type_opts),
readable = vehicle,
writable = vehicle,
),
item_id(represent = supply_item_represent,
requires = IS_ONE_OF(asset_items_set,
"supply_item.id",
supply_item_represent,
sort = True,
),
script = None, # No Item Pack Filter
widget = None,
),
Field("kit", "boolean",
default = False,
label = T("Kit?"),
represent = lambda opt: \
(opt and [T("Yes")] or [NONE])[0],
# Enable in template if-required
readable = False,
writable = False,
),
organisation_id(requires=self.org_organisation_requires(
updateable=True,
#required=True
),
required = True,
script = '''
S3OptionsFilter({
'triggerName':'organisation_id',
'targetName':'site_id',
'lookupResource':'site',
'lookupPrefix':'org',
'lookupField':'site_id',
'lookupURL':S3.Ap.concat('/org/sites_for_org/'),
})''',
),
# This is a component, so needs to be a super_link
# - can't override field name, ondelete or requires
super_link("site_id", "org_site",
default = auth.user.site_id if auth.is_logged_in() else None,
empty = False,
label = org_site_label,
ondelete = "RESTRICT",
readable = True,
writable = True,
represent = self.org_site_represent,
# Comment these to use a Dropdown & not an Autocomplete
#widget = S3SiteAutocompleteWidget(),
#comment = DIV(_class="tooltip",
# _title="%s|%s" % (T("Warehouse"),
# messages.AUTOCOMPLETE_HELP)),
),
Field("sn",
label = T("Serial Number"),
),
organisation_id("supply_org_id",
label = T("Supplier/Donor"),
ondelete = "SET NULL",
),
s3_date("purchase_date",
label = T("Purchase Date"),
),
Field("purchase_price", "double",
#default = 0.00,
represent = lambda v, row=None: \
IS_FLOAT_AMOUNT.represent(v, precision=2),
),
s3_currency("purchase_currency"),
# Base Location, which should always be a Site & set via Log
location_id(readable = False,
writable = False,
),
# Populated onaccept of the log to make a component tab
person_id("assigned_to_id",
readable = False,
writable = False,
comment = self.pr_person_comment(child="assigned_to_id"),
),
# Populated onaccept of the log for reporting/filtering
Field("cond", "integer",
label = T("Condition"),
represent = lambda opt: \
asset_condition_opts.get(opt, UNKNOWN_OPT),
#readable = False,
writable = False,
),
s3_comments(),
*s3_meta_fields())
# CRUD strings
crud_strings[tablename] = Storage(
label_create = T("Create Asset"),
title_display = T("Asset Details"),
title_list = T("Assets"),
title_update = T("Edit Asset"),
title_upload = T("Import Assets"),
label_list_button = T("List Assets"),
label_delete_button = T("Delete Asset"),
msg_record_created = T("Asset added"),
msg_record_modified = T("Asset updated"),
msg_record_deleted = T("Asset deleted"),
msg_list_empty = T("No Assets currently registered"))
asset_represent = asset_AssetRepresent(show_link=True)
# Reusable Field
asset_id = S3ReusableField("asset_id", "reference %s" % tablename,
label = T("Asset"),
ondelete = "CASCADE",
represent = asset_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "asset_asset.id",
asset_represent,
sort=True)),
sortby = "number",
)
# Which levels of Hierarchy are we using?
levels = current.gis.get_relevant_hierarchy_levels()
list_fields = ["id",
"item_id$item_category_id",
"item_id",
"number",
#"type",
#"purchase_date",
(T("Assigned To"), "assigned_to_id"),
"organisation_id",
"site_id",
]
report_fields = ["number",
(T("Category"), "item_id$item_category_id"),
(T("Item"), "item_id"),
"organisation_id",
"site_id",
"cond",
]
text_fields = ["number",
"item_id$name",
#"item_id$category_id$name",
"comments",
]
for level in levels:
lfield = "location_id$%s" % level
report_fields.append(lfield)
text_fields.append(lfield)
list_fields.append(lfield)
list_fields.extend(("cond",
"comments"))
filter_widgets = [
S3TextFilter(text_fields,
label = T("Search"),
comment = T("You can search by asset number, item description or comments. You may use % as wildcard. Press 'Search' without input to list all assets."),
#_class = "filter-search",
),
S3OptionsFilter("item_id$item_category_id",
),
S3OptionsFilter("organisation_id",
represent = "%(name)s",
hidden = True,
),
S3LocationFilter("location_id",
levels = levels,
hidden = True,
),
S3OptionsFilter("cond",
hidden = True,
),
]
report_options = Storage(
rows = report_fields,
cols = report_fields,
fact = [(T("Number of items"), "count(number)")],
defaults=Storage(cols = "location_id$%s" % levels[0], # Highest-level of hierarchy
fact = "count(number)",
rows = "item_id$item_category_id",
totals = True,
)
)
# Default summary
summary = [{"name": "addform",
"common": True,
"widgets": [{"method": "create"}],
},
{"name": "table",
"label": "Table",
"widgets": [{"method": "datatable"}]
},
{"name": "report",
"label": "Report",
"widgets": [{"method": "report",
"ajax_init": True}]
},
{"name": "map",
"label": "Map",
"widgets": [{"method": "map",
"ajax_init": True}],
},
]
# Resource Configuration
configure(tablename,
# Open Tabs after creation
create_next = URL(c="asset", f="asset",
args=["[id]"]),
deduplicate = self.asset_duplicate,
filter_widgets = filter_widgets,
list_fields = list_fields,
mark_required = ["organisation_id"],
onaccept = self.asset_onaccept,
realm_components = ["log", "presence"],
report_options = report_options,
summary = summary,
super_entity = ("supply_item_entity", "sit_trackable"),
update_realm = True,
)
# Components
add_components(tablename,
asset_group = "asset_id",
asset_item = "asset_id",
asset_log = "asset_id",
asset_human_resource = "asset_id",
hrm_human_resource = {"link": "asset_human_resource",
"joinby": "asset_id",
"key": "human_resource_id",
"actuate": "hide",
},
vehicle_gps = "asset_id",
vehicle_vehicle = {"joinby": "asset_id",
"multiple": False,
},
)
# =====================================================================
# Asset Items
# - to allow building ad-hoc Kits
#
tablename = "asset_item"
define_table(tablename,
item_entity_id,
asset_id(ondelete="CASCADE"),
item_id(represent = supply_item_represent,
requires = IS_ONE_OF(asset_items_set,
"supply_item.id",
supply_item_represent,
sort = True,
),
script = None, # No Item Pack Filter
widget = None,
),
Field("quantity", "integer", notnull=True,
default = 1,
label = T("Quantity"),
requires = IS_INT_IN_RANGE(1, 1000),
),
Field("sn",
label = T("Serial Number")),
organisation_id("supply_org_id",
label = T("Supplier/Donor"),
ondelete = "SET NULL"),
s3_date("purchase_date",
label = T("Purchase Date")),
Field("purchase_price", "double",
#default=0.00,
represent=lambda v, row=None: \
IS_FLOAT_AMOUNT.represent(v, precision=2)),
s3_currency("purchase_currency"),
# Base Location, which should always be a Site & set via Log
location_id(readable=False,
writable=False),
s3_comments(comment=None),
*s3_meta_fields())
# =====================================================================
# Asset Log
#
asset_log_status_opts = {ASSET_LOG_SET_BASE : T("Base %(facility)s Set") % dict(facility = org_site_label),
ASSET_LOG_ASSIGN : T("Assigned"),
ASSET_LOG_RETURN : T("Returned"),
ASSET_LOG_CHECK : T("Checked"),
ASSET_LOG_REPAIR : T("Repaired"),
ASSET_LOG_DONATED : T("Donated"),
ASSET_LOG_LOST : T("Lost"),
ASSET_LOG_STOLEN : T("Stolen"),
ASSET_LOG_DESTROY : T("Destroyed"),
}
if auth.permission.format == "html":
# T isn't JSON serializable
site_types = auth.org_site_types
for key in site_types.keys():
site_types[key] = str(site_types[key])
site_types = json.dumps(site_types)
script = '''
S3OptionsFilter({
'triggerName':'organisation_id',
'targetName':'site_id',
'lookupPrefix':'org',
'lookupResource':'site',
'lookupField':'site_id',
'fncRepresent': function(record,PrepResult){
var InstanceTypeNice=%(instance_type_nice)s
return record.name+" ("+InstanceTypeNice[record.instance_type]+")"
}})''' % dict(instance_type_nice = site_types)
else:
script = None
tablename = "asset_log"
define_table(tablename,
asset_id(),
Field("status", "integer",
label = T("Status"),
represent = lambda opt: \
asset_log_status_opts.get(opt, UNKNOWN_OPT),
requires = IS_IN_SET(asset_log_status_opts),
),
s3_datetime("datetime",
default = "now",
empty = False,
represent = "date",
),
s3_datetime("datetime_until",
label = T("Date Until"),
represent = "date",
),
person_id(label = T("Assigned To")),
Field("check_in_to_person", "boolean",
#label = T("Mobile"), # Relabel?
label = T("Track with this Person?"),
comment = DIV(_class="tooltip",
#_title="%s|%s" % (T("Mobile"),
_title="%s|%s" % (T("Track with this Person?"),
T("If selected, then this Asset's Location will be updated whenever the Person's Location is updated."))),
readable = False,
writable = False,
),
# The Organisation to whom the loan is made
organisation_id(readable = False,
widget = None,
writable = False,
),
# This is a component, so needs to be a super_link
# - can't override field name, ondelete or requires
super_link("site_id", "org_site",
label = org_site_label,
#filterby = "site_id",
#filter_opts = auth.permitted_facilities(redirect_on_error=False),
instance_types = auth.org_site_types,
updateable = True,
not_filterby = "obsolete",
not_filter_opts = (True,),
#default = user.site_id if is_logged_in() else None,
readable = True,
writable = True,
empty = False,
represent = self.org_site_represent,
#widget = S3SiteAutocompleteWidget(),
script = script,
),
self.org_room_id(),
#location_id(),
Field("cancel", "boolean",
default = False,
label = T("Cancel Log Entry"),
represent = s3_yes_no_represent,
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Cancel Log Entry"),
T("'Cancel' will indicate an asset log entry did not occur")))
),
Field("cond", "integer", # condition is a MySQL reserved word
label = T("Condition"),
represent = lambda opt: \
asset_condition_opts.get(opt, UNKNOWN_OPT),
requires = IS_IN_SET(asset_condition_opts,
zero = "%s..." % T("Please select")),
),
person_id("by_person_id",
default = auth.s3_logged_in_person(), # This can either be the Asset controller if signed-out from the store
label = T("Assigned By"), # or the previous owner if passed on directly (e.g. to successor in their post)
comment = self.pr_person_comment(child="by_person_id"),
),
s3_comments(),
*s3_meta_fields())
# CRUD strings
ADD_ASSIGN = T("New Entry in Asset Log")
crud_strings[tablename] = Storage(
label_create = ADD_ASSIGN,
title_display = T("Asset Log Details"),
title_list = T("Asset Log"),
title_update = T("Edit Asset Log Entry"),
label_list_button = T("Asset Log"),
label_delete_button = T("Delete Asset Log Entry"),
msg_record_created = T("Entry added to Asset Log"),
msg_record_modified = T("Asset Log Entry updated"),
msg_record_deleted = T("Asset Log Entry deleted"),
msg_list_empty = T("Asset Log Empty"))
# Resource configuration
configure(tablename,
listadd = False,
list_fields = ["id",
"datetime",
"status",
"datetime_until",
"organisation_id",
"site_id",
"room_id",
"person_id",
#"location_id",
"cancel",
"cond",
"comments",
],
onaccept = self.asset_log_onaccept,
orderby = "asset_log.datetime desc",
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict(asset_asset_id = asset_id,
asset_represent = asset_represent,
)
# -------------------------------------------------------------------------
@staticmethod
def defaults():
""" Return safe defaults for names in case the model is disabled """
dummy = S3ReusableField("dummy_id", "integer",
readable = False,
writable = False)
return dict(asset_asset_id = lambda **attr: dummy("asset_id"),
)
# -------------------------------------------------------------------------
@staticmethod
def asset_duplicate(item):
"""
Deduplication of Assets
"""
if item.tablename != "asset_asset":
return
table = item.table
data = item.data
number = data.get("number", None)
query = (table.number == number)
organisation_id = data.get("organisation_id", None)
if organisation_id:
query &= (table.organisation_id == organisation_id)
site_id = data.get("site_id", None)
if site_id:
query &= (table.site_id == site_id)
_duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if _duplicate:
item.id = _duplicate.id
item.data.id = _duplicate.id
item.method = item.METHOD.UPDATE
# -------------------------------------------------------------------------
@staticmethod
def asset_onaccept(form):
"""
After DB I/O
"""
if current.response.s3.bulk:
# Import or Sync
return
db = current.db
atable = db.asset_asset
form_vars = form.vars
kit = form_vars.get("kit", None)
site_id = form_vars.get("site_id", None)
if site_id:
stable = db.org_site
asset_id = form_vars.id
# Set the Base Location
location_id = db(stable.site_id == site_id).select(stable.location_id,
limitby=(0, 1)
).first().location_id
tracker = S3Tracker()
asset_tracker = tracker(atable, asset_id)
asset_tracker.set_base_location(location_id)
if kit:
# Also populate location_id field in component items
aitable = db.asset_item
db(aitable.asset_id == asset_id).update(location_id = location_id)
# Add a log entry for this
ltable = db.asset_log
ltable.insert(asset_id = asset_id,
status = ASSET_LOG_SET_BASE,
organisation_id = form_vars.get("organisation_id", None),
site_id = site_id,
cond = 1,
)
if kit:
# Empty any inappropriate fields
db(atable.id == asset_id).update(supplier_org_id = None,
purchase_date = None,
purchase_price = None,
purchase_currency = None,
)
else:
# Delete any component items
aitable = db.asset_item
ids = db(aitable.asset_id == asset_id).select(aitable.id).as_list()
if ids:
resource = current.s3db.resource("asset_item", id=ids)
resource.delete()
return
# -------------------------------------------------------------------------
@staticmethod
def asset_log_onaccept(form):
"""
After DB I/O
"""
request = current.request
get_vars = request.get_vars
status = get_vars.get("status", None)
if not status:
if not current.response.s3.asset_import:
# e.g. Record merger or Sync
return
# Import
db = current.db
form_vars = form.vars
asset_id = form_vars.asset_id
status = int(form_vars.status)
if status == ASSET_LOG_ASSIGN:
# Only type supported right now
# @ToDo: Support more types
type == "person"
new = True
else:
# Interactive
form_vars = form.vars
status = int(form_vars.status or status)
db = current.db
ltable = db.asset_log
row = db(ltable.id == form_vars.id).select(ltable.asset_id,
limitby=(0, 1)
).first()
try:
asset_id = row.asset_id
except:
return
current_log = asset_get_current_log(asset_id)
type = get_vars.get("type", None)
log_time = current_log.datetime
current_time = form_vars.get("datetime", None).replace(tzinfo=None)
new = log_time <= current_time
if new:
# This is a current assignment
atable = db.asset_asset
aitable = db.asset_item
tracker = S3Tracker()
asset_tracker = tracker(atable, asset_id)
if status == ASSET_LOG_SET_BASE:
# Set Base Location
site_id = form_vars.get("site_id", None)
stable = db.org_site
location_id = db(stable.site_id == site_id).select(stable.location_id,
limitby=(0, 1)
).first().location_id
asset_tracker.set_base_location(location_id)
# Also do component items
db(aitable.asset_id == asset_id).update(location_id = location_id)
elif status == ASSET_LOG_ASSIGN:
if type == "person":
if form_vars.check_in_to_person:
asset_tracker.check_in(db.pr_person, form_vars.person_id,
timestmp = request.utcnow)
# Also do component items
# @ToDo: Have these move when the person moves
locations = asset_tracker.get_location(_fields=[db.gis_location.id])
try:
db(aitable.asset_id == asset_id).update(location_id = locations[0].id)
except:
pass
else:
location_id = asset_tracker.set_location(form_vars.person_id,
timestmp = request.utcnow)
# Also do component items
db(aitable.asset_id == asset_id).update(location_id = location_id)
# Update main record for component
db(atable.id == asset_id).update(assigned_to_id=form_vars.person_id)
elif type == "site":
asset_tracker.check_in(db.org_site, form_vars.site_id,
timestmp = request.utcnow)
# Also do component items
locations = asset_tracker.get_location(_fields=[db.gis_location.id])
try:
db(aitable.asset_id == asset_id).update(location_id = locations[0].id)
except:
pass
elif type == "organisation":
site_id = form_vars.get("site_id", None)
if site_id:
asset_tracker.check_in(db.org_site, site_id,
timestmp = request.utcnow)
# Also do component items
locations = asset_tracker.get_location(_fields=[db.gis_location.id])
try:
db(aitable.asset_id == asset_id).update(location_id = locations[0].id)
except:
pass
else:
# We can no longer track location
asset_tracker.check_out()
elif status == ASSET_LOG_RETURN:
# Set location to base location
location_id = asset_tracker.set_location(asset_tracker,
timestmp = request.utcnow)
# Also do component items
db(aitable.asset_id == asset_id).update(location_id = location_id)
# Update condition in main record
db(atable.id == asset_id).update(cond=form_vars.cond)
return
# =============================================================================
class S3AssetHRModel(S3Model):
"""
Optionally link Assets to Human Resources
- useful for staffing a vehicle
"""
names = ("asset_human_resource",)
def model(self):
#T = current.T
#--------------------------------------------------------------------------
# Assets <> Human Resources
#
tablename = "asset_human_resource"
self.define_table(tablename,
self.asset_asset_id(empty = False),
self.hrm_human_resource_id(empty = False,
ondelete = "CASCADE",
),
#s3_comments(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict()
# =============================================================================
class S3AssetTeamModel(S3Model):
"""
Optionally link Assets to Teams
"""
names = ("asset_group",)
def model(self):
#T = current.T
#--------------------------------------------------------------------------
# Assets <> Groups
#
tablename = "asset_group"
self.define_table(tablename,
self.asset_asset_id(empty = False),
self.pr_group_id(comment = None,
empty = False,
),
#s3_comments(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict()
# =============================================================================
def asset_get_current_log(asset_id):
"""
Get the current log entry for this asset
"""
table = current.s3db.asset_log
query = (table.asset_id == asset_id) & \
(table.cancel == False) & \
(table.deleted == False)
# Get the log with the maximum time
asset_log = current.db(query).select(table.id,
table.status,
table.datetime,
table.cond,
table.person_id,
table.organisation_id,
table.site_id,
#table.location_id,
orderby = ~table.datetime,
limitby=(0, 1)).first()
if asset_log:
return Storage(datetime = asset_log.datetime,
person_id = asset_log.person_id,
cond = int(asset_log.cond or 0),
status = int(asset_log.status or 0),
organisation_id = asset_log.organisation_id,
site_id = asset_log.site_id,
#location_id = asset_log.location_id
)
else:
return Storage()
# =============================================================================
def asset_log_prep(r):
"""
Called by Controller
"""
T = current.T
db = current.db
request = current.request
table = db.asset_log
if r.record:
asset = Storage(r.record)
else:
# This is a new record
asset = Storage()
table.cancel.readable = False
table.cancel.writable = False
# This causes an error with the dataTables paginate
# if used only in r.interactive & not also r.representation=="aadata"
if r.method != "read" and r.method != "update":
table.cancel.readable = False
table.cancel.writable = False
current_log = asset_get_current_log(asset.id)
if request.vars.status:
status = int(request.vars.status)
else:
status = 0
if status and status != "None":
field = table.status
field.default = status
field.readable = False
field.writable = False
elif current_log:
table.status.default = current_log.status
if current_log.organisation_id:
table.organisation_id.default = current_log.organisation_id
table.site_id.requires = IS_ONE_OF(db, "org_site.site_id",
table.site_id.represent,
filterby = "organisation_id",
filter_opts = (current_log.organisation_id,))
crud_strings = current.response.s3.crud_strings.asset_log
if status == ASSET_LOG_SET_BASE:
crud_strings.msg_record_created = T("Base Facility/Site Set")
table.by_person_id.label = T("Set By")
table.site_id.writable = True
table.datetime_until.readable = False
table.datetime_until.writable = False
table.person_id.readable = False
table.person_id.writable = False
table.organisation_id.readable = True
table.organisation_id.writable = True
table.site_id.requires = IS_ONE_OF(db, "org_site.site_id",
table.site_id.represent)
elif status == ASSET_LOG_RETURN:
crud_strings.msg_record_created = T("Returned")
table.person_id.label = T("Returned From")
table.person_id.default = current_log.person_id
table.site_id.readable = False
table.site_id.writable = False
elif status == ASSET_LOG_ASSIGN:
type = request.vars.type
# table["%s_id" % type].required = True
if type == "person":
crud_strings.msg_record_created = T("Assigned to Person")
table["person_id"].requires = IS_ONE_OF(db, "pr_person.id",
table.person_id.represent,
orderby="pr_person.first_name",
sort=True,
error_message="Person must be specified!")
table.check_in_to_person.readable = True
table.check_in_to_person.writable = True
table.site_id.requires = IS_EMPTY_OR(
IS_ONE_OF(db, "org_site.site_id",
table.site_id.represent))
elif type == "site":
crud_strings.msg_record_created = T("Assigned to Facility/Site")
elif type == "organisation":
crud_strings.msg_record_created = T("Assigned to Organization")
table.organisation_id.readable = True
table.organisation_id.writable = True
table.organisation_id.requires = IS_ONE_OF(db, "org_organisation.id",
table.organisation_id.represent,
orderby="org_organisation.name",
sort=True)
table.site_id.requires = IS_EMPTY_OR(
IS_ONE_OF(db, "org_site.site_id",
table.site_id.represent))
elif "status" in request.get_vars:
crud_strings.msg_record_created = T("Status Updated")
table.person_id.label = T("Updated By")
field = table.status
field.readable = True
field.writable = True
field.requires = IS_IN_SET({ASSET_LOG_CHECK : T("Check"),
ASSET_LOG_REPAIR : T("Repair"),
ASSET_LOG_DONATED : T("Donated"),
ASSET_LOG_LOST : T("Lost"),
ASSET_LOG_STOLEN : T("Stolen"),
ASSET_LOG_DESTROY : T("Destroyed"),
})
# =============================================================================
def asset_rheader(r):
""" Resource Header for Assets """
if r.representation == "html":
record = r.record
if record:
T = current.T
s3db = current.s3db
s3 = current.response.s3
NONE = current.messages["NONE"]
if record.type == ASSET_TYPE_VEHICLE:
STAFF = current.deployment_settings.get_hrm_staff_label()
tabs = [(T("Asset Details"), None, {"native": True}),
(T("Vehicle Details"), "vehicle"),
(STAFF, "human_resource"),
(T("Assign %(staff)s") % dict(staff=STAFF), "assign"),
(T("Check-In"), "check-in"),
(T("Check-Out"), "check-out"),
(T("GPS Data"), "gps"),
]
else:
tabs = [(T("Edit Details"), None)]
#elif record.type == s3.asset.ASSET_TYPE_RADIO:
# tabs.append((T("Radio Details"), "radio"))
#elif record.type == s3.asset.ASSET_TYPE_TELEPHONE:
# tabs.append((T("Telephone Details"), "phone"))
tabs.append((T("Log"), "log"))
tabs.append((T("Documents"), "document"))
rheader_tabs = s3_rheader_tabs(r, tabs)
if current.request.controller == "vehicle":
func = "vehicle"
else:
func = "asset"
# @ToDo: Check permissions before displaying buttons
asset_action_btns = [
A(T("Set Base Facility/Site"),
_href = URL(f=func,
args = [record.id, "log", "create"],
vars = dict(status = ASSET_LOG_SET_BASE)
),
_class = "action-btn",
)
]
current_log = asset_get_current_log(record.id)
status = current_log.status
#if record.location_id:
# A Base Site has been set
# Return functionality removed - as it doesn't set site_id & organisation_id in the logs
#if status == ASSET_LOG_ASSIGN:
# asset_action_btns += [ A( T("Return"),
# _href = URL(f=func,
# args = [record.id, "log", "create"],
# vars = dict(status = ASSET_LOG_RETURN)
# ),
# _class = "action-btn"
# )
# ]
if status < ASSET_LOG_DONATED:
# @ToDo: deployment setting to prevent assigning assets before returning them
# The Asset is available for assignment (not disposed)
asset_action_btns += [
A(T("Assign to Person"),
_href = URL(f=func,
args = [record.id, "log", "create"],
vars = dict(status = ASSET_LOG_ASSIGN,
type = "person")
),
_class = "action-btn",
),
A(T("Assign to Facility/Site"),
_href = URL(f=func,
args = [record.id, "log", "create"],
vars = dict(status = ASSET_LOG_ASSIGN,
type = "site")
),
_class = "action-btn",
),
A(T("Assign to Organization"),
_href = URL(f=func,
args = [record.id, "log", "create"],
vars = dict(status = ASSET_LOG_ASSIGN,
type = "organisation")
),
_class = "action-btn",
),
]
asset_action_btns += [
A(T("Update Status"),
_href = URL(f=func,
args = [record.id, "log", "create"],
vars = None
),
_class = "action-btn",
),
]
table = r.table
ltable = s3db.asset_log
rheader = DIV(TABLE(TR(TH("%s: " % table.number.label),
record.number,
TH("%s: " % table.item_id.label),
table.item_id.represent(record.item_id)
),
TR(TH("%s: " % ltable.cond.label),
ltable.cond.represent(current_log.cond),
TH("%s: " % ltable.status.label),
ltable.status.represent(status),
),
TR(TH("%s: " % ltable.person_id.label),
ltable.person_id.represent(current_log.person_id),
TH("%s: " % ltable.site_id.label),
ltable.site_id.represent(current_log.site_id),
),
),
DIV(_style = "margin-top:5px", # @ToDo: Move to CSS
*asset_action_btns
),
rheader_tabs)
return rheader
return None
# =============================================================================
def asset_controller():
""" RESTful CRUD controller """
s3db = current.s3db
s3 = current.response.s3
# Pre-process
def prep(r):
# Location Filter
current.s3db.gis_location_filter(r)
if r.component_name == "log":
asset_log_prep(r)
return True
s3.prep = prep
# Import pre-process
def import_prep(data):
"""
Flag that this is an Import (to distinguish from Sync)
@ToDo: Find Person records from their email addresses
"""
current.response.s3.asset_import = True
return
# @ToDo: get this working
ctable = s3db.pr_contact
ptable = s3db.pr_person
resource, tree = data
elements = tree.getroot().xpath("/s3xml//resource[@name='pr_person']/data[@field='first_name']")
persons = {}
for element in elements:
email = element.text
if email in persons:
# Replace email with uuid
element.text = persons[email]["uuid"]
# Don't check again
continue
query = (ctable.value == email) & \
(ctable.pe_id == ptable.pe_id)
person = db(query).select(ptable.uuid,
limitby=(0, 1)
).first()
if person:
# Replace email with uuid
uuid = person.uuid
else:
# Blank it
uuid = ""
element.text = uuid
# Store in case we get called again with same value
persons[email] = dict(uuid=uuid)
s3.import_prep = import_prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
script = "/%s/static/scripts/S3/s3.asset.js" % r.application
s3.scripts.append(script)
S3CRUD.action_buttons(r, deletable=False)
#if not r.component:
#s3.actions.append({"url" : URL(c="asset", f="asset",
# args = ["[id]", "log", "create"],
# vars = {"status" : eden.asset.asset_log_status["ASSIGN"],
# "type" : "person"}),
# "_class" : "action-btn",
# "label" : str(T("Assign"))})
return output
s3.postp = postp
output = current.rest_controller("asset", "asset",
rheader = asset_rheader,
)
return output
# =============================================================================
class asset_AssetRepresent(S3Represent):
""" Representation of Assets """
def __init__(self,
fields = ("number",), # unused
show_link = False,
translate = False,
multiple = False,
):
# Need a custom lookup
self.lookup_rows = self.custom_lookup_rows
super(asset_AssetRepresent,
self).__init__(lookup="asset_asset",
fields=fields,
show_link=show_link,
translate=translate,
multiple=multiple)
# -------------------------------------------------------------------------
def custom_lookup_rows(self, key, values, fields=[]):
"""
Custom lookup method for organisation rows, does a
left join with the parent organisation. Parameters
key and fields are not used, but are kept for API
compatibility reasons.
@param values: the organisation IDs
"""
db = current.db
s3db = current.s3db
table = s3db.asset_asset
itable = db.supply_item
btable = db.supply_brand
qty = len(values)
if qty == 1:
query = (table.id == values[0])
limitby = (0, 1)
else:
query = (table.id.belongs(values))
limitby = (0, qty)
query &= (itable.id == table.item_id)
rows = db(query).select(table.id,
table.number,
table.type,
itable.name,
btable.name,
left=btable.on(itable.brand_id == btable.id),
limitby=limitby)
self.queries += 1
return rows
# -------------------------------------------------------------------------
def represent_row(self, row):
"""
Represent a single Row
@param row: the asset_asset Row
"""
# Custom Row (with the item & brand left-joined)
number = row["asset_asset.number"]
item = row["supply_item.name"]
brand = row.get("supply_brand.name", None)
if not number:
return self.default
represent = "%s (%s" % (number, item)
if brand:
represent = "%s, %s)" % (represent, brand)
else:
represent = "%s)" % represent
return s3_unicode(represent)
# -------------------------------------------------------------------------
def link(self, k, v, row=None):
"""
Represent a (key, value) as hypertext link.
@param k: the key (site_id)
@param v: the representation of the key
@param row: the row with this key
"""
if row:
type = row.get("asset_asset.type", None)
if type == 1:
return A(v, _href=URL(c="vehicle", f="vehicle", args=[k],
# remove the .aaData extension in paginated views
extension=""
))
k = s3_unicode(k)
return A(v, _href=self.linkto.replace("[id]", k) \
.replace("%5Bid%5D", k))
# END =========================================================================
|
|
#!/usr/bin/env python
##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
##~ Copyright (C) 2002-2004 TechGame Networks, LLC.
##~
##~ This library is free software; you can redistribute it and/or
##~ modify it under the terms of the BSD style License as found in the
##~ LICENSE file included with this distribution.
##
## Modified by Dirk Holtwick <holtwick@web.de>, 2007-2008
##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""CSS-2.1 parser.
The CSS 2.1 Specification this parser was derived from can be found at http://www.w3.org/TR/CSS21/
Primary Classes:
* CSSParser
Parses CSS source forms into results using a Builder Pattern. Must
provide concrete implemenation of CSSBuilderAbstract.
* CSSBuilderAbstract
Outlines the interface between CSSParser and it's rule-builder.
Compose CSSParser with a concrete implementation of the builder to get
usable results from the CSS parser.
Dependencies:
python 2.3 (or greater)
re
"""
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ Imports
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import re
try:
from . import cssSpecial #python 3
except Exception:
import cssSpecial #python 2
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ Definitions
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def isAtRuleIdent(src, ident):
return re.match(r'^@' + ident + r'\s*', src)
def stripAtRuleIdent(src):
return re.sub(r'^@[a-z\-]+\s*', '', src)
class CSSSelectorAbstract(object):
"""Outlines the interface between CSSParser and it's rule-builder for selectors.
CSSBuilderAbstract.selector and CSSBuilderAbstract.combineSelectors must
return concrete implementations of this abstract.
See css.CSSMutableSelector for an example implementation.
"""
def addHashId(self, hashId):
raise NotImplementedError('Subclass responsibility')
def addClass(self, class_):
raise NotImplementedError('Subclass responsibility')
def addAttribute(self, attrName):
raise NotImplementedError('Subclass responsibility')
def addAttributeOperation(self, attrName, op, attrValue):
raise NotImplementedError('Subclass responsibility')
def addPseudo(self, name):
raise NotImplementedError('Subclass responsibility')
def addPseudoFunction(self, name, value):
raise NotImplementedError('Subclass responsibility')
class CSSBuilderAbstract(object):
"""Outlines the interface between CSSParser and it's rule-builder. Compose
CSSParser with a concrete implementation of the builder to get usable
results from the CSS parser.
See css.CSSBuilder for an example implementation
"""
def setCharset(self, charset):
raise NotImplementedError('Subclass responsibility')
#~ css results ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def beginStylesheet(self):
raise NotImplementedError('Subclass responsibility')
def stylesheet(self, elements):
raise NotImplementedError('Subclass responsibility')
def endStylesheet(self):
raise NotImplementedError('Subclass responsibility')
def beginInline(self):
raise NotImplementedError('Subclass responsibility')
def inline(self, declarations):
raise NotImplementedError('Subclass responsibility')
def endInline(self):
raise NotImplementedError('Subclass responsibility')
def ruleset(self, selectors, declarations):
raise NotImplementedError('Subclass responsibility')
#~ css namespaces ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def resolveNamespacePrefix(self, nsPrefix, name):
raise NotImplementedError('Subclass responsibility')
#~ css @ directives ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def atCharset(self, charset):
raise NotImplementedError('Subclass responsibility')
def atImport(self, import_, mediums, cssParser):
raise NotImplementedError('Subclass responsibility')
def atNamespace(self, nsPrefix, uri):
raise NotImplementedError('Subclass responsibility')
def atMedia(self, mediums, ruleset):
raise NotImplementedError('Subclass responsibility')
def atPage(self, page, pseudopage, declarations):
raise NotImplementedError('Subclass responsibility')
def atFontFace(self, declarations):
raise NotImplementedError('Subclass responsibility')
def atIdent(self, atIdent, cssParser, src):
return src, NotImplemented
#~ css selectors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def combineSelectors(self, selectorA, combiner, selectorB):
"""Return value must implement CSSSelectorAbstract"""
raise NotImplementedError('Subclass responsibility')
def selector(self, name):
"""Return value must implement CSSSelectorAbstract"""
raise NotImplementedError('Subclass responsibility')
#~ css declarations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def property(self, name, value, important=False):
raise NotImplementedError('Subclass responsibility')
def combineTerms(self, termA, combiner, termB):
raise NotImplementedError('Subclass responsibility')
def termIdent(self, value):
raise NotImplementedError('Subclass responsibility')
def termNumber(self, value, units=None):
raise NotImplementedError('Subclass responsibility')
def termRGB(self, value):
raise NotImplementedError('Subclass responsibility')
def termURI(self, value):
raise NotImplementedError('Subclass responsibility')
def termString(self, value):
raise NotImplementedError('Subclass responsibility')
def termUnicodeRange(self, value):
raise NotImplementedError('Subclass responsibility')
def termFunction(self, name, value):
raise NotImplementedError('Subclass responsibility')
def termUnknown(self, src):
raise NotImplementedError('Subclass responsibility')
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ CSS Parser
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSParseError(Exception):
src = None
ctxsrc = None
fullsrc = None
inline = False
srcCtxIdx = None
srcFullIdx = None
ctxsrcFullIdx = None
def __init__(self, msg, src, ctxsrc=None):
Exception.__init__(self, msg)
self.src = src
self.ctxsrc = ctxsrc or src
if self.ctxsrc:
self.srcCtxIdx = self.ctxsrc.find(self.src)
if self.srcCtxIdx < 0:
del self.srcCtxIdx
def __str__(self):
if self.ctxsrc:
return Exception.__str__(self) + ':: (' + repr(self.ctxsrc[:self.srcCtxIdx]) + ', ' + repr(
self.ctxsrc[self.srcCtxIdx:self.srcCtxIdx + 20]) + ')'
else:
return Exception.__str__(self) + ':: ' + repr(self.src[:40])
def setFullCSSSource(self, fullsrc, inline=False):
self.fullsrc = fullsrc
if inline:
self.inline = inline
if self.fullsrc:
self.srcFullIdx = self.fullsrc.find(self.src)
if self.srcFullIdx < 0:
del self.srcFullIdx
self.ctxsrcFullIdx = self.fullsrc.find(self.ctxsrc)
if self.ctxsrcFullIdx < 0:
del self.ctxsrcFullIdx
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSParser(object):
"""CSS-2.1 parser dependent only upon the re module.
Implemented directly from http://www.w3.org/TR/CSS21/grammar.html
Tested with some existing CSS stylesheets for portability.
CSS Parsing API:
* setCSSBuilder()
To set your concrete implementation of CSSBuilderAbstract
* parseFile()
Use to parse external stylesheets using a file-like object
>>> cssFile = open('test.css', 'r')
>>> stylesheets = myCSSParser.parseFile(cssFile)
* parse()
Use to parse embedded stylesheets using source string
>>> cssSrc = '''
body,body.body {
font: 110%, "Times New Roman", Arial, Verdana, Helvetica, serif;
background: White;
color: Black;
}
a {text-decoration: underline;}
'''
>>> stylesheets = myCSSParser.parse(cssSrc)
* parseInline()
Use to parse inline stylesheets using attribute source string
>>> style = 'font: 110%, "Times New Roman", Arial, Verdana, Helvetica, serif; background: White; color: Black'
>>> stylesheets = myCSSParser.parseInline(style)
* parseAttributes()
Use to parse attribute string values into inline stylesheets
>>> stylesheets = myCSSParser.parseAttributes(
font='110%, "Times New Roman", Arial, Verdana, Helvetica, serif',
background='White',
color='Black')
* parseSingleAttr()
Use to parse a single string value into a CSS expression
>>> fontValue = myCSSParser.parseSingleAttr('110%, "Times New Roman", Arial, Verdana, Helvetica, serif')
"""
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ Constants / Variables / Etc.
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ParseError = CSSParseError
AttributeOperators = ['=', '~=', '|=', '&=', '^=', '!=', '<>']
SelectorQualifiers = ('#', '.', '[', ':')
SelectorCombiners = ['+', '>']
ExpressionOperators = ('/', '+', ',')
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ Regular expressions
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if True: # makes the following code foldable
_orRule = lambda *args: '|'.join(args)
_reflags = re.I | re.M | re.U
i_hex = '[0-9a-fA-F]'
i_nonascii = u'[\200-\377]'
i_unicode = '\\\\(?:%s){1,6}\s?' % i_hex
i_escape = _orRule(i_unicode, u'\\\\[ -~\200-\377]')
# i_nmstart = _orRule('[A-Za-z_]', i_nonascii, i_escape)
i_nmstart = _orRule('\-[^0-9]|[A-Za-z_]', i_nonascii,
i_escape) # XXX Added hyphen, http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier
i_nmchar = _orRule('[-0-9A-Za-z_]', i_nonascii, i_escape)
i_ident = '((?:%s)(?:%s)*)' % (i_nmstart, i_nmchar)
re_ident = re.compile(i_ident, _reflags)
# Caution: treats all characters above 0x7f as legal for an identifier.
i_unicodeid = r'([^\u0000-\u007f]+)'
re_unicodeid = re.compile(i_unicodeid, _reflags)
i_element_name = '((?:%s)|\*)' % (i_ident[1:-1],)
re_element_name = re.compile(i_element_name, _reflags)
i_namespace_selector = '((?:%s)|\*|)\|(?!=)' % (i_ident[1:-1],)
re_namespace_selector = re.compile(i_namespace_selector, _reflags)
i_class = '\\.' + i_ident
re_class = re.compile(i_class, _reflags)
i_hash = '#((?:%s)+)' % i_nmchar
re_hash = re.compile(i_hash, _reflags)
i_rgbcolor = '(#%s{6}|#%s{3})' % (i_hex, i_hex)
re_rgbcolor = re.compile(i_rgbcolor, _reflags)
i_nl = u'\n|\r\n|\r|\f'
i_escape_nl = u'\\\\(?:%s)' % i_nl
i_string_content = _orRule(u'[\t !#$%&(-~]', i_escape_nl, i_nonascii, i_escape)
i_string1 = u'\"((?:%s|\')*)\"' % i_string_content
i_string2 = u'\'((?:%s|\")*)\'' % i_string_content
i_string = _orRule(i_string1, i_string2)
re_string = re.compile(i_string, _reflags)
i_uri = (u'url\\(\s*(?:(?:%s)|((?:%s)+))\s*\\)'
% (i_string, _orRule('[!#$%&*-~]', i_nonascii, i_escape)))
# XXX For now
# i_uri = u'(url\\(.*?\\))'
re_uri = re.compile(i_uri, _reflags)
i_num = u'(([-+]?[0-9]+(?:\\.[0-9]+)?)|([-+]?\\.[0-9]+))' # XXX Added out paranthesis, because e.g. .5em was not parsed correctly
re_num = re.compile(i_num, _reflags)
i_unit = '(%%|%s)?' % i_ident
re_unit = re.compile(i_unit, _reflags)
i_function = i_ident + '\\('
re_function = re.compile(i_function, _reflags)
i_functionterm = u'[-+]?' + i_function
re_functionterm = re.compile(i_functionterm, _reflags)
i_unicoderange1 = "(?:U\\+%s{1,6}-%s{1,6})" % (i_hex, i_hex)
i_unicoderange2 = "(?:U\\+\?{1,6}|{h}(\?{0,5}|{h}(\?{0,4}|{h}(\?{0,3}|{h}(\?{0,2}|{h}(\??|{h}))))))"
i_unicoderange = i_unicoderange1 # u'(%s|%s)' % (i_unicoderange1, i_unicoderange2)
re_unicoderange = re.compile(i_unicoderange, _reflags)
# i_comment = u'(?:\/\*[^*]*\*+([^/*][^*]*\*+)*\/)|(?://.*)'
# gabriel: only C convention for comments is allowed in CSS
i_comment = u'(?:\/\*[^*]*\*+([^/*][^*]*\*+)*\/)'
re_comment = re.compile(i_comment, _reflags)
i_important = u'!\s*(important)'
re_important = re.compile(i_important, _reflags)
del _orRule
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ Public
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def __init__(self, cssBuilder=None):
self.setCSSBuilder(cssBuilder)
#~ CSS Builder to delegate to ~~~~~~~~~~~~~~~~~~~~~~~~
def getCSSBuilder(self):
"""A concrete instance implementing CSSBuilderAbstract"""
return self._cssBuilder
def setCSSBuilder(self, cssBuilder):
"""A concrete instance implementing CSSBuilderAbstract"""
self._cssBuilder = cssBuilder
cssBuilder = property(getCSSBuilder, setCSSBuilder)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ Public CSS Parsing API
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def parseFile(self, srcFile, closeFile=False):
"""Parses CSS file-like objects using the current cssBuilder.
Use for external stylesheets."""
try:
result = self.parse(srcFile.read())
finally:
if closeFile:
srcFile.close()
return result
def parse(self, src):
"""Parses CSS string source using the current cssBuilder.
Use for embedded stylesheets."""
self.cssBuilder.beginStylesheet()
try:
# XXX Some simple preprocessing
src = cssSpecial.cleanupCSS(src)
try:
src, stylesheet = self._parseStylesheet(src)
except self.ParseError as err:
err.setFullCSSSource(src)
raise
finally:
self.cssBuilder.endStylesheet()
return stylesheet
def parseInline(self, src):
"""Parses CSS inline source string using the current cssBuilder.
Use to parse a tag's 'sytle'-like attribute."""
self.cssBuilder.beginInline()
try:
try:
src, properties = self._parseDeclarationGroup(src.strip(), braces=False)
except self.ParseError as err:
err.setFullCSSSource(src, inline=True)
raise
result = self.cssBuilder.inline(properties)
finally:
self.cssBuilder.endInline()
return result
def parseAttributes(self, attributes={}, **kwAttributes):
"""Parses CSS attribute source strings, and return as an inline stylesheet.
Use to parse a tag's highly CSS-based attributes like 'font'.
See also: parseSingleAttr
"""
if attributes:
kwAttributes.update(attributes)
self.cssBuilder.beginInline()
try:
properties = []
try:
for propertyName, src in kwAttributes.iteritems():
src, property = self._parseDeclarationProperty(src.strip(), propertyName)
properties.append(property)
except self.ParseError as err:
err.setFullCSSSource(src, inline=True)
raise
result = self.cssBuilder.inline(properties)
finally:
self.cssBuilder.endInline()
return result
def parseSingleAttr(self, attrValue):
"""Parse a single CSS attribute source string, and returns the built CSS expression.
Use to parse a tag's highly CSS-based attributes like 'font'.
See also: parseAttributes
"""
results = self.parseAttributes(temp=attrValue)
if 'temp' in results[1]:
return results[1]['temp']
else:
return results[0]['temp']
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ Internal _parse methods
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _parseStylesheet(self, src):
"""stylesheet
: [ CHARSET_SYM S* STRING S* ';' ]?
[S|CDO|CDC]* [ import [S|CDO|CDC]* ]*
[ [ ruleset | media | page | font_face ] [S|CDO|CDC]* ]*
;
"""
# Get rid of the comments
src = self.re_comment.sub(u'', src)
# [ CHARSET_SYM S* STRING S* ';' ]?
src = self._parseAtCharset(src)
# [S|CDO|CDC]*
src = self._parseSCDOCDC(src)
# [ import [S|CDO|CDC]* ]*
src, stylesheetImports = self._parseAtImports(src)
# [ namespace [S|CDO|CDC]* ]*
src = self._parseAtNamespace(src)
stylesheetElements = []
# [ [ ruleset | atkeywords ] [S|CDO|CDC]* ]*
while src: # due to ending with ]*
if src.startswith('@'):
# @media, @page, @font-face
src, atResults = self._parseAtKeyword(src)
if atResults is not None and atResults != NotImplemented:
stylesheetElements.extend(atResults)
else:
# ruleset
src, ruleset = self._parseRuleset(src)
stylesheetElements.append(ruleset)
# [S|CDO|CDC]*
src = self._parseSCDOCDC(src)
stylesheet = self.cssBuilder.stylesheet(stylesheetElements, stylesheetImports)
return src, stylesheet
def _parseSCDOCDC(self, src):
"""[S|CDO|CDC]*"""
while 1:
src = src.lstrip()
if src.startswith('<!--'):
src = src[4:]
elif src.startswith('-->'):
src = src[3:]
else:
break
return src
#~ CSS @ directives ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _parseAtCharset(self, src):
"""[ CHARSET_SYM S* STRING S* ';' ]?"""
if isAtRuleIdent(src, 'charset'):
src = stripAtRuleIdent(src)
charset, src = self._getString(src)
src = src.lstrip()
if src[:1] != ';':
raise self.ParseError('@charset expected a terminating \';\'', src, ctxsrc)
src = src[1:].lstrip()
self.cssBuilder.atCharset(charset)
return src
def _parseAtImports(self, src):
"""[ import [S|CDO|CDC]* ]*"""
result = []
while isAtRuleIdent(src, 'import'):
ctxsrc = src
src = stripAtRuleIdent(src)
import_, src = self._getStringOrURI(src)
if import_ is None:
raise self.ParseError('Import expecting string or url', src, ctxsrc)
mediums = []
medium, src = self._getIdent(src.lstrip())
while medium is not None:
mediums.append(medium)
if src[:1] == ',':
src = src[1:].lstrip()
medium, src = self._getIdent(src)
else:
break
# XXX No medium inherits and then "all" is appropriate
if not mediums:
mediums = ["all"]
if src[:1] != ';':
raise self.ParseError('@import expected a terminating \';\'', src, ctxsrc)
src = src[1:].lstrip()
stylesheet = self.cssBuilder.atImport(import_, mediums, self)
if stylesheet is not None:
result.append(stylesheet)
src = self._parseSCDOCDC(src)
return src, result
def _parseAtNamespace(self, src):
"""namespace :
@namespace S* [IDENT S*]? [STRING|URI] S* ';' S*
"""
src = self._parseSCDOCDC(src)
while isAtRuleIdent(src, 'namespace'):
ctxsrc = src
src = stripAtRuleIdent(src)
namespace, src = self._getStringOrURI(src)
if namespace is None:
nsPrefix, src = self._getIdent(src)
if nsPrefix is None:
raise self.ParseError('@namespace expected an identifier or a URI', src, ctxsrc)
namespace, src = self._getStringOrURI(src.lstrip())
if namespace is None:
raise self.ParseError('@namespace expected a URI', src, ctxsrc)
else:
nsPrefix = None
src = src.lstrip()
if src[:1] != ';':
raise self.ParseError('@namespace expected a terminating \';\'', src, ctxsrc)
src = src[1:].lstrip()
self.cssBuilder.atNamespace(nsPrefix, namespace)
src = self._parseSCDOCDC(src)
return src
def _parseAtKeyword(self, src):
"""[media | page | font_face | unknown_keyword]"""
ctxsrc = src
if isAtRuleIdent(src, 'media'):
src, result = self._parseAtMedia(src)
elif isAtRuleIdent(src, 'page'):
src, result = self._parseAtPage(src)
elif isAtRuleIdent(src, 'font-face'):
src, result = self._parseAtFontFace(src)
# XXX added @import, was missing!
elif isAtRuleIdent(src, 'import'):
src, result = self._parseAtImports(src)
elif isAtRuleIdent(src, 'frame'):
src, result = self._parseAtFrame(src)
elif src.startswith('@'):
src, result = self._parseAtIdent(src)
else:
raise self.ParseError('Unknown state in atKeyword', src, ctxsrc)
return src, result
def _parseAtMedia(self, src):
"""media
: MEDIA_SYM S* medium [ ',' S* medium ]* '{' S* ruleset* '}' S*
;
"""
ctxsrc = src
src = src[len('@media '):].lstrip()
mediums = []
while src and src[0] != '{':
medium, src = self._getIdent(src)
if medium is None:
raise self.ParseError('@media rule expected media identifier', src, ctxsrc)
# make "and ... {" work
if medium == u'and':
# strip up to curly bracket
pattern = re.compile('.*({.*)')
match = re.match(pattern, src)
src = src[match.end()-1:]
break
mediums.append(medium)
if src[0] == ',':
src = src[1:].lstrip()
else:
src = src.lstrip()
if not src.startswith('{'):
raise self.ParseError('Ruleset opening \'{\' not found', src, ctxsrc)
src = src[1:].lstrip()
stylesheetElements = []
#while src and not src.startswith('}'):
# src, ruleset = self._parseRuleset(src)
# stylesheetElements.append(ruleset)
# src = src.lstrip()
# Containing @ where not found and parsed
while src and not src.startswith('}'):
if src.startswith('@'):
# @media, @page, @font-face
src, atResults = self._parseAtKeyword(src)
if atResults is not None:
stylesheetElements.extend(atResults)
else:
# ruleset
src, ruleset = self._parseRuleset(src)
stylesheetElements.append(ruleset)
src = src.lstrip()
if not src.startswith('}'):
raise self.ParseError('Ruleset closing \'}\' not found', src, ctxsrc)
else:
src = src[1:].lstrip()
result = self.cssBuilder.atMedia(mediums, stylesheetElements)
return src, result
def _parseAtPage(self, src):
"""page
: PAGE_SYM S* IDENT? pseudo_page? S*
'{' S* declaration [ ';' S* declaration ]* '}' S*
;
"""
ctxsrc = src
src = src[len('@page '):].lstrip()
page, src = self._getIdent(src)
if src[:1] == ':':
pseudopage, src = self._getIdent(src[1:])
page = page + '_' + pseudopage
else:
pseudopage = None
#src, properties = self._parseDeclarationGroup(src.lstrip())
# Containing @ where not found and parsed
stylesheetElements = []
src = src.lstrip()
properties = []
# XXX Extended for PDF use
if not src.startswith('{'):
raise self.ParseError('Ruleset opening \'{\' not found', src, ctxsrc)
else:
src = src[1:].lstrip()
while src and not src.startswith('}'):
if src.startswith('@'):
# @media, @page, @font-face
src, atResults = self._parseAtKeyword(src)
if atResults is not None:
stylesheetElements.extend(atResults)
else:
src, nproperties = self._parseDeclarationGroup(src.lstrip(), braces=False)
properties += nproperties
src = src.lstrip()
result = [self.cssBuilder.atPage(page, pseudopage, properties)]
return src[1:].lstrip(), result
def _parseAtFrame(self, src):
"""
XXX Proprietary for PDF
"""
ctxsrc = src
src = src[len('@frame '):].lstrip()
box, src = self._getIdent(src)
src, properties = self._parseDeclarationGroup(src.lstrip())
result = [self.cssBuilder.atFrame(box, properties)]
return src.lstrip(), result
def _parseAtFontFace(self, src):
ctxsrc = src
src = src[len('@font-face '):].lstrip()
src, properties = self._parseDeclarationGroup(src)
result = [self.cssBuilder.atFontFace(properties)]
return src, result
def _parseAtIdent(self, src):
ctxsrc = src
atIdent, src = self._getIdent(src[1:])
if atIdent is None:
raise self.ParseError('At-rule expected an identifier for the rule', src, ctxsrc)
src, result = self.cssBuilder.atIdent(atIdent, self, src)
if result is NotImplemented:
# An at-rule consists of everything up to and including the next semicolon (;) or the next block, whichever comes first
semiIdx = src.find(';')
if semiIdx < 0:
semiIdx = None
blockIdx = src[:semiIdx].find('{')
if blockIdx < 0:
blockIdx = None
if semiIdx is not None and semiIdx < blockIdx:
src = src[semiIdx + 1:].lstrip()
elif blockIdx is None:
# consume the rest of the content since we didn't find a block or a semicolon
src = src[-1:-1]
elif blockIdx is not None:
# expecing a block...
src = src[blockIdx:]
try:
# try to parse it as a declarations block
src, declarations = self._parseDeclarationGroup(src)
except self.ParseError:
# try to parse it as a stylesheet block
src, stylesheet = self._parseStylesheet(src)
else:
raise self.ParserError('Unable to ignore @-rule block', src, ctxsrc)
return src.lstrip(), result
#~ ruleset - see selector and declaration groups ~~~~
def _parseRuleset(self, src):
"""ruleset
: selector [ ',' S* selector ]*
'{' S* declaration [ ';' S* declaration ]* '}' S*
;
"""
src, selectors = self._parseSelectorGroup(src)
src, properties = self._parseDeclarationGroup(src.lstrip())
result = self.cssBuilder.ruleset(selectors, properties)
return src, result
#~ selector parsing ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _parseSelectorGroup(self, src):
selectors = []
while src[:1] not in ('{', '}', ']', '(', ')', ';', ''):
src, selector = self._parseSelector(src)
if selector is None:
break
selectors.append(selector)
if src.startswith(','):
src = src[1:].lstrip()
return src, selectors
def _parseSelector(self, src):
"""selector
: simple_selector [ combinator simple_selector ]*
;
"""
src, selector = self._parseSimpleSelector(src)
srcLen = len(src) # XXX
while src[:1] not in ('', ',', ';', '{', '}', '[', ']', '(', ')'):
for combiner in self.SelectorCombiners:
if src.startswith(combiner):
src = src[len(combiner):].lstrip()
break
else:
combiner = ' '
src, selectorB = self._parseSimpleSelector(src)
# XXX Fix a bug that occured here e.g. : .1 {...}
if len(src) >= srcLen:
src = src[1:]
while src and (src[:1] not in ('', ',', ';', '{', '}', '[', ']', '(', ')')):
src = src[1:]
return src.lstrip(), None
selector = self.cssBuilder.combineSelectors(selector, combiner, selectorB)
return src.lstrip(), selector
def _parseSimpleSelector(self, src):
"""simple_selector
: [ namespace_selector ]? element_name? [ HASH | class | attrib | pseudo ]* S*
;
"""
ctxsrc = src.lstrip()
nsPrefix, src = self._getMatchResult(self.re_namespace_selector, src)
name, src = self._getMatchResult(self.re_element_name, src)
if name:
pass # already *successfully* assigned
elif src[:1] in self.SelectorQualifiers:
name = '*'
else:
raise self.ParseError('Selector name or qualifier expected', src, ctxsrc)
name = self.cssBuilder.resolveNamespacePrefix(nsPrefix, name)
selector = self.cssBuilder.selector(name)
while src and src[:1] in self.SelectorQualifiers:
hash_, src = self._getMatchResult(self.re_hash, src)
if hash_ is not None:
selector.addHashId(hash_)
continue
class_, src = self._getMatchResult(self.re_class, src)
if class_ is not None:
selector.addClass(class_)
continue
if src.startswith('['):
src, selector = self._parseSelectorAttribute(src, selector)
elif src.startswith(':'):
src, selector = self._parseSelectorPseudo(src, selector)
else:
break
return src.lstrip(), selector
def _parseSelectorAttribute(self, src, selector):
"""attrib
: '[' S* [ namespace_selector ]? IDENT S* [ [ '=' | INCLUDES | DASHMATCH ] S*
[ IDENT | STRING ] S* ]? ']'
;
"""
ctxsrc = src
if not src.startswith('['):
raise self.ParseError('Selector Attribute opening \'[\' not found', src, ctxsrc)
src = src[1:].lstrip()
nsPrefix, src = self._getMatchResult(self.re_namespace_selector, src)
attrName, src = self._getIdent(src)
src = src.lstrip()
if attrName is None:
raise self.ParseError('Expected a selector attribute name', src, ctxsrc)
if nsPrefix is not None:
attrName = self.cssBuilder.resolveNamespacePrefix(nsPrefix, attrName)
for op in self.AttributeOperators:
if src.startswith(op):
break
else:
op = ''
src = src[len(op):].lstrip()
if op:
attrValue, src = self._getIdent(src)
if attrValue is None:
attrValue, src = self._getString(src)
if attrValue is None:
raise self.ParseError('Expected a selector attribute value', src, ctxsrc)
else:
attrValue = None
if not src.startswith(']'):
raise self.ParseError('Selector Attribute closing \']\' not found', src, ctxsrc)
else:
src = src[1:]
if op:
selector.addAttributeOperation(attrName, op, attrValue)
else:
selector.addAttribute(attrName)
return src, selector
def _parseSelectorPseudo(self, src, selector):
"""pseudo
: ':' [ IDENT | function ]
;
"""
ctxsrc = src
if not src.startswith(':'):
raise self.ParseError('Selector Pseudo \':\' not found', src, ctxsrc)
src = re.search('^:{1,2}(.*)', src, re.M | re.S).group(1)
name, src = self._getIdent(src)
if not name:
raise self.ParseError('Selector Pseudo identifier not found', src, ctxsrc)
if src.startswith('('):
# function
src = src[1:].lstrip()
src, term = self._parseExpression(src, True)
if not src.startswith(')'):
raise self.ParseError('Selector Pseudo Function closing \')\' not found', src, ctxsrc)
src = src[1:]
selector.addPseudoFunction(name, term)
else:
selector.addPseudo(name)
return src, selector
#~ declaration and expression parsing ~~~~~~~~~~~~~~~
def _parseDeclarationGroup(self, src, braces=True):
ctxsrc = src
if src.startswith('{'):
src, braces = src[1:], True
elif braces:
raise self.ParseError('Declaration group opening \'{\' not found', src, ctxsrc)
properties = []
src = src.lstrip()
while src[:1] not in ('', ',', '{', '}', '[', ']', '(', ')', '@'): # XXX @?
src, property = self._parseDeclaration(src)
# XXX Workaround for styles like "*font: smaller"
if src.startswith("*"):
src = "-nothing-" + src[1:]
continue
if property is None:
break
properties.append(property)
if src.startswith(';'):
src = src[1:].lstrip()
else:
break
if braces:
if not src.startswith('}'):
raise self.ParseError('Declaration group closing \'}\' not found', src, ctxsrc)
src = src[1:]
return src.lstrip(), properties
def _parseDeclaration(self, src):
"""declaration
: ident S* ':' S* expr prio?
| /* empty */
;
"""
# property
propertyName, src = self._getIdent(src)
if propertyName is not None:
src = src.lstrip()
# S* : S*
if src[:1] in (':', '='):
# Note: we are being fairly flexable here... technically, the
# ":" is *required*, but in the name of flexibility we
# suppor a null transition, as well as an "=" transition
src = src[1:].lstrip()
src, property = self._parseDeclarationProperty(src, propertyName)
else:
property = None
return src, property
def _parseDeclarationProperty(self, src, propertyName):
# expr
src, expr = self._parseExpression(src)
# prio?
important, src = self._getMatchResult(self.re_important, src)
src = src.lstrip()
property = self.cssBuilder.property(propertyName, expr, important)
return src, property
def _parseExpression(self, src, returnList=False):
"""
expr
: term [ operator term ]*
;
"""
src, term = self._parseExpressionTerm(src)
operator = None
while src[:1] not in ('', ';', '{', '}', '[', ']', ')'):
for operator in self.ExpressionOperators:
if src.startswith(operator):
src = src[len(operator):]
break
else:
operator = ' '
src, term2 = self._parseExpressionTerm(src.lstrip())
if term2 is NotImplemented:
break
else:
term = self.cssBuilder.combineTerms(term, operator, term2)
if operator is None and returnList:
term = self.cssBuilder.combineTerms(term, None, None)
return src, term
else:
return src, term
def _parseExpressionTerm(self, src):
"""term
: unary_operator?
[ NUMBER S* | PERCENTAGE S* | LENGTH S* | EMS S* | EXS S* | ANGLE S* |
TIME S* | FREQ S* | function ]
| STRING S* | IDENT S* | URI S* | RGB S* | UNICODERANGE S* | hexcolor
;
"""
ctxsrc = src
result, src = self._getMatchResult(self.re_num, src)
if result is not None:
units, src = self._getMatchResult(self.re_unit, src)
term = self.cssBuilder.termNumber(result, units)
return src.lstrip(), term
result, src = self._getString(src, self.re_uri)
if result is not None:
# XXX URL!!!!
term = self.cssBuilder.termURI(result)
return src.lstrip(), term
result, src = self._getString(src)
if result is not None:
term = self.cssBuilder.termString(result)
return src.lstrip(), term
result, src = self._getMatchResult(self.re_functionterm, src)
if result is not None:
src, params = self._parseExpression(src, True)
if src[0] != ')':
raise self.ParseError('Terminal function expression expected closing \')\'', src, ctxsrc)
src = src[1:].lstrip()
term = self.cssBuilder.termFunction(result, params)
return src, term
result, src = self._getMatchResult(self.re_rgbcolor, src)
if result is not None:
term = self.cssBuilder.termRGB(result)
return src.lstrip(), term
result, src = self._getMatchResult(self.re_unicoderange, src)
if result is not None:
term = self.cssBuilder.termUnicodeRange(result)
return src.lstrip(), term
nsPrefix, src = self._getMatchResult(self.re_namespace_selector, src)
result, src = self._getIdent(src)
if result is not None:
if nsPrefix is not None:
result = self.cssBuilder.resolveNamespacePrefix(nsPrefix, result)
term = self.cssBuilder.termIdent(result)
return src.lstrip(), term
result, src = self._getMatchResult(self.re_unicodeid, src)
if result is not None:
term = self.cssBuilder.termIdent(result)
return src.lstrip(), term
return self.cssBuilder.termUnknown(src)
#~ utility methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _getIdent(self, src, default=None):
return self._getMatchResult(self.re_ident, src, default)
def _getString(self, src, rexpression=None, default=None):
if rexpression is None:
rexpression = self.re_string
result = rexpression.match(src)
if result:
strres = filter(None, result.groups())
if strres:
try:
strres = strres[0]
except Exception:
strres = result.groups()[0]
else:
strres = ''
return strres, src[result.end():]
else:
return default, src
def _getStringOrURI(self, src):
result, src = self._getString(src, self.re_uri)
if result is None:
result, src = self._getString(src)
return result, src
def _getMatchResult(self, rexpression, src, default=None, group=1):
result = rexpression.match(src)
if result:
return result.group(group), src[result.end():]
else:
return default, src
|
|
"""
downgrade_11.py from ioc_writer
Created: 12/17/15
Purpose: Provide a single reference class for converting an OpenIOC 1.1 document to OpenIOC 1.0.
This downgrade process is lossy as there are conditions, parameters and link metadata which may be present in the 1.1 indicator that cannot be expressed in the 1.0 indicator.
The data that is lost is detailed below:
Data that will be removed in the downgrade:
#.For items directly underneath the top-level Indicator node (OpenIOC/criteria/Indicator/@operator='OR'for a valid MIR IOC):
#. Any IndicatorItems under the top which use the preserve-case attribute will be removed.
#. Any IndicatorItems which use the conditions 'begins-with', 'ends-with', 'greater-than', 'less-than', or 'matches' will be removed.
#. Any Indicator nodes which contains a IndicatorItem node underneath it which match the conditions described above in 1) & 2) will be removed.
#.Metadata:
#. Any links which contain link/@href will lose the @href attribute.
#. Parmeters:
#. Any parmeters which point to a Indicator node will be removed.
#. Any parmeters which point to a IndicatorItem node which do not have param/@name='comment' set will be removed.
#. General:
#. The published date, OpenIOC/@published-date, will be removed.
Usage example:
::
iocm = DowngradeManager()
iocm.insert(iocs_dir)
errors = iocm.convert_to_10()
output_dir = './iocs'
iocm.write_iocs(output_dir)
iocm.write_pruned_iocs(output_dir, iocm.pruned_11_iocs)
iocm.write_pruned_iocs(output_dir, iocm.null_pruned_iocs
"""
# Stdlib
from __future__ import print_function
import logging
import os
# Third Party code
from lxml import etree as et
# Custom Code
import ioc_writer.ioc_api as ioc_api
import ioc_writer.utils as utils
from ioc_writer.managers import IOCManager
log = logging.getLogger(__name__)
__author__ = 'will.gibb'
__version__ = '0.0.1'
METADATA_ORDER_10 = ['short_description',
'description',
'keywords',
'authored_by',
'authored_date',
'links']
METADATA_REQUIRED_10 = ['authored_date']
class DowngradeError(ioc_api.IOCParseError):
"""
Exception raised when there is an error in the conversion
"""
pass
class DowngradeManager(IOCManager):
"""
Convert the OpenIOC 1.1 documents into a 1.0 format. The converts IOCs are stored in self.iocs_10.
IOCs which would have all nodes removed from under their top-level OR would be added to self.null_pruned_iocs
IOCs which have at least one node, but not all nodes, removed would be added to self.prunded_11_iocs.
"""
def __init__(self):
IOCManager.__init__(self)
self.iocs_10 = {} # elementTree representing the IOC, used by ioc_manager.convert_to_10
self.pruned_11_iocs = set() # set representing pruned IOCs, used by ioc_manager.convert_to_10
self.null_pruned_iocs = set() # set representing null IOCs, used by ioc_manager.convert_to_10
self.openioc_11_only_conditions = ['starts-with', 'ends-with', 'greater-than', 'less-than', 'matches']
self.default_encoding = 'utf-8'
def convert_to_10(self):
"""
converts the iocs in self.iocs from openioc 1.1 to openioc 1.0 format.
the converted iocs are stored in the dictionary self.iocs_10
:return: A list of iocid values which had errors downgrading.
"""
if len(self) < 1:
log.error('no iocs available to modify')
return False
log.info('Converting IOCs from 1.1 to 1.0.')
errors = []
for iocid in self.iocs:
pruned = False
ioc_obj_11 = self.iocs[iocid]
metadata = ioc_obj_11.metadata
# record metadata
name_11 = metadata.findtext('.//short_description')
keywords_11 = metadata.findtext('.//keywords')
description_11 = metadata.findtext('.//description')
author_11 = metadata.findtext('.//authored_by')
created_date_11 = metadata.findtext('.//authored_date')
last_modified_date_11 = ioc_obj_11.root.get('last-modified')
links_11 = []
for link in metadata.xpath('//link'):
link_rel = link.get('rel')
link_text = link.text
links_11.append((link_rel, None, link_text))
# get ioc_logic
try:
ioc_logic = ioc_obj_11.root.xpath('.//criteria')[0]
except IndexError:
log.exception(
'Could not find criteria nodes for IOC [{}]. Did you attempt to convert OpenIOC 1.0 iocs?'.format(
iocid))
errors.append(iocid)
continue
try:
tlo_11 = ioc_logic.getchildren()[0]
except IndexError:
log.exception(
'Could not find children for the top level criteria/children nodes for IOC [{}]'.format(iocid))
errors.append(iocid)
continue
tlo_id = tlo_11.get('id')
# record comment parameters
comment_dict = {}
for param in ioc_obj_11.parameters.xpath('//param[@name="comment"]'):
param_id = param.get('ref-id')
param_text = param.findtext('value')
comment_dict[param_id] = param_text
# create a 1.1 indicator and populate it with the metadata from the existing 1.1
# we will then modify this new IOC to conform to 1.1 schema
ioc_obj_10 = ioc_api.IOC(name=name_11, description=description_11, author=author_11, links=links_11,
keywords=keywords_11, iocid=iocid)
ioc_obj_10.root.attrib['last-modified'] = last_modified_date_11
authored_date_node = ioc_obj_10.metadata.find('authored_date')
authored_date_node.text = created_date_11
# convert 1.1 ioc object to 1.0
# change xmlns
ioc_obj_10.root.attrib['xmlns'] = 'http://schemas.mandiant.com/2010/ioc'
# remove published data
del ioc_obj_10.root.attrib['published-date']
# remove parameters node
ioc_obj_10.root.remove(ioc_obj_10.parameters)
# change root tag
ioc_obj_10.root.tag = 'ioc'
# metadata underneath the root node
metadata_node = ioc_obj_10.metadata
criteria_node = ioc_obj_10.top_level_indicator.getparent()
metadata_dictionary = {}
for child in metadata_node:
metadata_dictionary[child.tag] = child
for tag in METADATA_REQUIRED_10:
if tag not in metadata_dictionary:
msg = 'IOC {} is missing required metadata: [{}]'.format(iocid, tag)
raise DowngradeError(msg)
for tag in METADATA_ORDER_10:
if tag in metadata_dictionary:
ioc_obj_10.root.append(metadata_dictionary.get(tag))
ioc_obj_10.root.remove(metadata_node)
ioc_obj_10.root.remove(criteria_node)
criteria_node.tag = 'definition'
ioc_obj_10.root.append(criteria_node)
ioc_obj_10.top_level_indicator.attrib['id'] = tlo_id
# identify indicator items with 1.1 specific operators
# we will skip them when converting IOC from 1.1 to 1.0.
ids_to_skip = set()
indicatoritems_to_remove = set()
for condition_type in self.openioc_11_only_conditions:
for elem in ioc_logic.xpath('//IndicatorItem[@condition="%s"]' % condition_type):
pruned = True
indicatoritems_to_remove.add(elem)
for elem in ioc_logic.xpath('//IndicatorItem[@preserve-case="true"]'):
pruned = True
indicatoritems_to_remove.add(elem)
# walk up from each indicatoritem
# to build set of ids to skip when downconverting
for elem in indicatoritems_to_remove:
nid = None
current = elem
while nid != tlo_id:
parent = current.getparent()
nid = parent.get('id')
if nid == tlo_id:
current_id = current.get('id')
ids_to_skip.add(current_id)
else:
current = parent
# walk the 1.1 IOC to convert it into a 1.0 IOC
# noinspection PyBroadException
try:
self.convert_branch(tlo_11, ioc_obj_10.top_level_indicator, ids_to_skip, comment_dict)
except DowngradeError:
log.exception('Problem converting IOC [{}]'.format(iocid))
errors.append(iocid)
continue
except Exception:
log.exception('Unknown error occured while converting [{}]'.format(iocid))
errors.append(iocid)
continue
# bucket pruned iocs / null iocs
if not ioc_obj_10.top_level_indicator.getchildren():
self.null_pruned_iocs.add(iocid)
elif pruned is True:
self.pruned_11_iocs.add(iocid)
# Check the original to see if there was a comment prior to the root node, and if so, copy it's content
comment_node = ioc_obj_11.root.getprevious()
while comment_node is not None:
log.debug('found a comment node')
c = et.Comment(comment_node.text)
ioc_obj_10.root.addprevious(c)
comment_node = comment_node.getprevious()
# Record the IOC
# ioc_10 = et.ElementTree(root_10)
self.iocs_10[iocid] = ioc_obj_10
return errors
def convert_branch(self, old_node, new_node, ids_to_skip, comment_dict=None):
"""
Recursively walk a indicator logic tree, starting from a Indicator node.
Converts OpenIOC 1.1 Indicator/IndicatorItems to Openioc 1.0 and preserves order.
:param old_node: An Indicator node, which we walk down to convert
:param new_node: An Indicator node, which we add new IndicatorItem and Indicator nodes too
:param ids_to_skip: set of node @id values not to convert
:param comment_dict: maps ids to comment values. only applied to IndicatorItem nodes
:return: returns True upon completion.
:raises: DowngradeError if there is a problem during the conversion.
"""
expected_tag = 'Indicator'
if old_node.tag != expected_tag:
raise DowngradeError('old_node expected tag is [%s]' % expected_tag)
if not comment_dict:
comment_dict = {}
for node in old_node.getchildren():
node_id = node.get('id')
if node_id in ids_to_skip:
continue
if node.tag == 'IndicatorItem':
negation = node.get('negate')
condition = node.get('condition')
if 'true' in negation.lower():
new_condition = condition + 'not'
else:
new_condition = condition
document = node.xpath('Context/@document')[0]
search = node.xpath('Context/@search')[0]
content_type = node.xpath('Content/@type')[0]
content = node.findtext('Content')
context_type = node.xpath('Context/@type')[0]
new_ii_node = ioc_api.make_indicatoritem_node(condition=condition,
document=document,
search=search,
content_type=content_type,
content=content,
context_type=context_type,
nid=node_id)
# set condition
new_ii_node.attrib['condition'] = new_condition
# set comment
if node_id in comment_dict:
comment = comment_dict[node_id]
comment_node = et.Element('Comment')
comment_node.text = comment
new_ii_node.append(comment_node)
# remove preserver-case and negate
del new_ii_node.attrib['negate']
del new_ii_node.attrib['preserve-case']
new_node.append(new_ii_node)
elif node.tag == 'Indicator':
operator = node.get('operator')
if operator.upper() not in ['OR', 'AND']:
raise DowngradeError('Indicator@operator is not AND/OR. [%s] has [%s]' % (node_id, operator))
new_i_node = ioc_api.make_indicator_node(operator, node_id)
new_node.append(new_i_node)
self.convert_branch(node, new_i_node, ids_to_skip, comment_dict)
else:
# should never get here
raise DowngradeError('node is not a Indicator/IndicatorItem')
return True
def write_iocs(self, directory=None, source=None):
"""
Serializes IOCs to a directory.
:param directory: Directory to write IOCs to. If not provided, the current working directory is used.
:param source: Dictionary contianing iocid -> IOC mapping. Defaults to self.iocs_10. This is not normally modifed by a user for this class.
:return:
"""
"""
if directory is None, write the iocs to the current working directory
source: allows specifying a different dictionry of elmentTree ioc objects
"""
if not source:
source = self.iocs_10
if len(source) < 1:
log.error('no iocs available to write out')
return False
if not directory:
directory = os.getcwd()
if os.path.isfile(directory):
log.error('cannot writes iocs to a directory')
return False
source_iocs = set(source.keys())
source_iocs = source_iocs.difference(self.pruned_11_iocs)
source_iocs = source_iocs.difference(self.null_pruned_iocs)
if not source_iocs:
log.error('no iocs available to write out after removing pruned/null iocs')
return False
utils.safe_makedirs(directory)
output_dir = os.path.abspath(directory)
log.info('Writing IOCs to %s' % (str(output_dir)))
# serialize the iocs
for iocid in source_iocs:
ioc_obj = source[iocid]
ioc_obj.write_ioc_to_file(output_dir=output_dir, force=True)
return True
def write_pruned_iocs(self, directory=None, pruned_source=None):
"""
Writes IOCs to a directory that have been pruned of some or all IOCs.
:param directory: Directory to write IOCs to. If not provided, the current working directory is used.
:param pruned_source: Iterable containing a set of iocids. Defaults to self.iocs_10.
:return:
"""
"""
write_pruned_iocs to a directory
if directory is None, write the iocs to the current working directory
"""
if pruned_source is None:
pruned_source = self.pruned_11_iocs
if len(pruned_source) < 1:
log.error('no iocs available to write out')
return False
if not directory:
directory = os.getcwd()
if os.path.isfile(directory):
log.error('cannot writes iocs to a directory')
return False
utils.safe_makedirs(directory)
output_dir = os.path.abspath(directory)
# serialize the iocs
for iocid in pruned_source:
ioc_obj = self.iocs_10[iocid]
ioc_obj.write_ioc_to_file(output_dir=output_dir, force=True)
return True
|
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module contains some utility functions and classes that are used in the chemenv package.
"""
__author__ = "David Waroquiers"
__copyright__ = "Copyright 2012, The Materials Project"
__credits__ = "Geoffroy Hautier"
__version__ = "2.0"
__maintainer__ = "David Waroquiers"
__email__ = "david.waroquiers@gmail.com"
__date__ = "Feb 20, 2016"
from typing import Dict
import numpy as np
from pymatgen.analysis.chemenv.utils.math_utils import (
power2_decreasing_exp,
power2_inverse_decreasing,
power2_inverse_power2_decreasing,
smootherstep,
smoothstep,
)
class AbstractRatioFunction:
"""
Abstract class for all ratio functions
"""
ALLOWED_FUNCTIONS = {} # type: Dict[str, list]
def __init__(self, function, options_dict=None):
"""Constructor for AbstractRatioFunction
:param function: Ration function name.
:param options_dict: Dictionary containing the parameters for the ratio function.
"""
if function not in self.ALLOWED_FUNCTIONS:
raise ValueError(
'Function "{}" is not allowed in RatioFunction of '
'type "{}"'.format(function, self.__class__.__name__)
)
self.eval = object.__getattribute__(self, function)
self.function = function
self.setup_parameters(options_dict=options_dict)
def setup_parameters(self, options_dict):
"""Set up the parameters for this ratio function.
:param options_dict: Dictionary containing the parameters for the ratio function.
:return: None.
"""
function_options = self.ALLOWED_FUNCTIONS[self.function]
if len(function_options) > 0:
# Check if there are missing options
if options_dict is None:
missing_options = True
else:
missing_options = False
for op in function_options:
if op not in options_dict:
missing_options = True
break
# If there are missing options, raise an error
if missing_options:
if len(function_options) == 1:
opts = 'Option "{}"'.format(function_options[0])
else:
opts1 = ", ".join(['"{}"'.format(op) for op in function_options[:-1]])
opts = "Options {}".format(" and ".join([opts1, '"{}"'.format(function_options[-1])]))
if options_dict is None or len(options_dict) == 0:
missing = "no option was provided."
else:
optgiven = list(options_dict.keys())
if len(options_dict) == 1:
missing = 'only "{}" was provided.'.format(optgiven[0])
else:
missing1 = ", ".join(['"{}"'.format(miss) for miss in optgiven[:-1]])
missing = "only {} were provided.".format(" and ".join([missing1, '"{}"'.format(optgiven[-1])]))
raise ValueError(
'{} should be provided for function "{}" in RatioFunction of '
'type "{}" while {}'.format(opts, self.function, self.__class__.__name__, missing)
)
# Setup the options and raise an error if a wrong option is provided
for key, val in options_dict.items():
if key not in function_options:
raise ValueError(
'Option "{}" not allowed for function "{}" in RatioFunction of '
'type "{}"'.format(key, self.function, self.__class__.__name__)
)
self.__setattr__(key, val)
def evaluate(self, value):
"""Evaluate the ratio function for the given value.
:param value: Value for which ratio function has to be evaluated.
:return: Ratio function corresponding to the value.
"""
return self.eval(value)
@classmethod
def from_dict(cls, dd):
"""Construct ratio function from dict.
:param dd: Dict representation of the ratio function
:return: Ratio function object.
"""
return cls(function=dd["function"], options_dict=dd["options"])
class RatioFunction(AbstractRatioFunction):
"""Concrete implementation of a series of ratio functions."""
ALLOWED_FUNCTIONS = {
"power2_decreasing_exp": ["max", "alpha"],
"smoothstep": ["lower", "upper"],
"smootherstep": ["lower", "upper"],
"inverse_smoothstep": ["lower", "upper"],
"inverse_smootherstep": ["lower", "upper"],
"power2_inverse_decreasing": ["max"],
"power2_inverse_power2_decreasing": ["max"],
}
def power2_decreasing_exp(self, vals):
"""Get the evaluation of the ratio function f(x)=exp(-a*x)*(x-1)^2.
The values (i.e. "x"), are scaled to the "max" parameter. The "a" constant
correspond to the "alpha" parameter.
:param vals: Values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the values.
"""
return power2_decreasing_exp(vals, edges=[0.0, self.__dict__["max"]], alpha=self.__dict__["alpha"])
def smootherstep(self, vals):
"""Get the evaluation of the smootherstep ratio function: f(x)=6*x^5-15*x^4+10*x^3.
The values (i.e. "x"), are scaled between the "lower" and "upper" parameters.
:param vals: Values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the values.
"""
return smootherstep(vals, edges=[self.__dict__["lower"], self.__dict__["upper"]])
def smoothstep(self, vals):
"""Get the evaluation of the smoothstep ratio function: f(x)=3*x^2-2*x^3.
The values (i.e. "x"), are scaled between the "lower" and "upper" parameters.
:param vals: Values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the values.
"""
return smoothstep(vals, edges=[self.__dict__["lower"], self.__dict__["upper"]])
def inverse_smootherstep(self, vals):
"""Get the evaluation of the "inverse" smootherstep ratio function: f(x)=1-(6*x^5-15*x^4+10*x^3).
The values (i.e. "x"), are scaled between the "lower" and "upper" parameters.
:param vals: Values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the values.
"""
return smootherstep(vals, edges=[self.__dict__["lower"], self.__dict__["upper"]], inverse=True)
def inverse_smoothstep(self, vals):
"""Get the evaluation of the "inverse" smoothstep ratio function: f(x)=1-(3*x^2-2*x^3).
The values (i.e. "x"), are scaled between the "lower" and "upper" parameters.
:param vals: Values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the values.
"""
return smoothstep(vals, edges=[self.__dict__["lower"], self.__dict__["upper"]], inverse=True)
def power2_inverse_decreasing(self, vals):
"""Get the evaluation of the ratio function f(x)=(x-1)^2 / x.
The values (i.e. "x"), are scaled to the "max" parameter.
:param vals: Values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the values.
"""
return power2_inverse_decreasing(vals, edges=[0.0, self.__dict__["max"]])
def power2_inverse_power2_decreasing(self, vals):
"""Get the evaluation of the ratio function f(x)=(x-1)^2 / x^2.
The values (i.e. "x"), are scaled to the "max" parameter.
:param vals: Values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the values.
"""
return power2_inverse_power2_decreasing(vals, edges=[0.0, self.__dict__["max"]])
class CSMFiniteRatioFunction(AbstractRatioFunction):
"""Concrete implementation of a series of ratio functions applied to the continuous symmetry measure (CSM).
Uses "finite" ratio functions.
See the following reference for details:
ChemEnv: a fast and robust coordination environment identification tool,
D. Waroquiers et al., Acta Cryst. B 76, 683 (2020).
"""
ALLOWED_FUNCTIONS = {
"power2_decreasing_exp": ["max_csm", "alpha"],
"smoothstep": ["lower_csm", "upper_csm"],
"smootherstep": ["lower_csm", "upper_csm"],
}
def power2_decreasing_exp(self, vals):
"""Get the evaluation of the ratio function f(x)=exp(-a*x)*(x-1)^2.
The CSM values (i.e. "x"), are scaled to the "max_csm" parameter. The "a" constant
correspond to the "alpha" parameter.
:param vals: CSM values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the CSM values.
"""
return power2_decreasing_exp(vals, edges=[0.0, self.__dict__["max_csm"]], alpha=self.__dict__["alpha"])
def smootherstep(self, vals):
"""Get the evaluation of the smootherstep ratio function: f(x)=6*x^5-15*x^4+10*x^3.
The CSM values (i.e. "x"), are scaled between the "lower_csm" and "upper_csm" parameters.
:param vals: CSM values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the CSM values.
"""
return smootherstep(
vals,
edges=[self.__dict__["lower_csm"], self.__dict__["upper_csm"]],
inverse=True,
)
def smoothstep(self, vals):
"""Get the evaluation of the smoothstep ratio function: f(x)=3*x^2-2*x^3.
The CSM values (i.e. "x"), are scaled between the "lower_csm" and "upper_csm" parameters.
:param vals: CSM values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the CSM values.
"""
return smootherstep(
vals,
edges=[self.__dict__["lower_csm"], self.__dict__["upper_csm"]],
inverse=True,
)
def fractions(self, data):
"""Get the fractions from the CSM ratio function applied to the data.
:param data: List of CSM values to estimate fractions.
:return: Corresponding fractions for each CSM.
"""
if len(data) == 0:
return None
total = np.sum([self.eval(dd) for dd in data])
if total > 0.0:
return [self.eval(dd) / total for dd in data]
return None
def mean_estimator(self, data):
"""Get the weighted CSM using this CSM ratio function applied to the data.
:param data: List of CSM values to estimate the weighted CSM.
:return: Weighted CSM from this ratio function.
"""
if len(data) == 0:
return None
if len(data) == 1:
return data[0]
fractions = self.fractions(data)
if fractions is None:
return None
return np.sum(np.array(fractions) * np.array(data))
ratios = fractions
class CSMInfiniteRatioFunction(AbstractRatioFunction):
"""Concrete implementation of a series of ratio functions applied to the continuous symmetry measure (CSM).
Uses "infinite" ratio functions.
See the following reference for details:
ChemEnv: a fast and robust coordination environment identification tool,
D. Waroquiers et al., Acta Cryst. B 76, 683 (2020).
"""
ALLOWED_FUNCTIONS = {
"power2_inverse_decreasing": ["max_csm"],
"power2_inverse_power2_decreasing": ["max_csm"],
}
def power2_inverse_decreasing(self, vals):
"""Get the evaluation of the ratio function f(x)=(x-1)^2 / x.
The CSM values (i.e. "x"), are scaled to the "max_csm" parameter. The "a" constant
correspond to the "alpha" parameter.
:param vals: CSM values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the CSM values.
"""
return power2_inverse_decreasing(vals, edges=[0.0, self.__dict__["max_csm"]])
def power2_inverse_power2_decreasing(self, vals):
"""Get the evaluation of the ratio function f(x)=(x-1)^2 / x^2.
The CSM values (i.e. "x"), are scaled to the "max_csm" parameter. The "a" constant
correspond to the "alpha" parameter.
:param vals: CSM values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the CSM values.
"""
return power2_inverse_power2_decreasing(vals, edges=[0.0, self.__dict__["max_csm"]])
def fractions(self, data):
"""Get the fractions from the CSM ratio function applied to the data.
:param data: List of CSM values to estimate fractions.
:return: Corresponding fractions for each CSM.
"""
if len(data) == 0:
return None
close_to_zero = np.isclose(data, 0.0, atol=1e-10).tolist()
nzeros = close_to_zero.count(True)
if nzeros == 1:
fractions = [0.0] * len(data)
fractions[close_to_zero.index(True)] = 1.0
return fractions
if nzeros > 1:
raise RuntimeError("Should not have more than one continuous symmetry measure with value equal to 0.0")
fractions = self.eval(np.array(data))
total = np.sum(fractions)
if total > 0.0:
return fractions / total
return None
def mean_estimator(self, data):
"""Get the weighted CSM using this CSM ratio function applied to the data.
:param data: List of CSM values to estimate the weighted CSM.
:return: Weighted CSM from this ratio function.
"""
if len(data) == 0:
return None
if len(data) == 1:
return data[0]
fractions = self.fractions(data)
if fractions is None:
return None
return np.sum(np.array(fractions) * np.array(data))
ratios = fractions
class DeltaCSMRatioFunction(AbstractRatioFunction):
"""
Concrete implementation of a series of ratio functions applied to differences of
continuous symmetry measures (DeltaCSM).
Uses "finite" ratio functions.
See the following reference for details:
ChemEnv: a fast and robust coordination environment identification tool,
D. Waroquiers et al., Acta Cryst. B 76, 683 (2020).
"""
ALLOWED_FUNCTIONS = {"smootherstep": ["delta_csm_min", "delta_csm_max"]}
def smootherstep(self, vals):
"""Get the evaluation of the smootherstep ratio function: f(x)=6*x^5-15*x^4+10*x^3.
The DeltaCSM values (i.e. "x"), are scaled between the "delta_csm_min" and "delta_csm_max" parameters.
:param vals: DeltaCSM values for which the ratio function has to be evaluated.
:return: Result of the ratio function applied to the DeltaCSM values.
"""
return smootherstep(vals, edges=[self.__dict__["delta_csm_min"], self.__dict__["delta_csm_max"]])
|
|
# -*- coding: utf-8 -*-
import six
import unittest
from os import path
from webob.multidict import MultiDict
from iktomi.utils.storage import VersionedStorage
from iktomi.templates import Template, BoundTemplate
from iktomi.templates import jinja2 as jnj
from iktomi.templates.jinja2 import TemplateEngine
import jinja2
from lxml import html
from iktomi.forms import fields, convs, widgets, perms, \
Form, Field, FieldList, FieldSet
class TestFormClass(unittest.TestCase):
def setUp(self):
pass
@property
def env(self):
DIR = jnj.__file__
DIR = path.dirname(path.abspath(DIR))
TEMPLATES = [path.join(DIR, 'templates')]
jinja_loader = TemplateEngine(TEMPLATES)
template_loader = Template(engines={'html': jinja_loader},
*TEMPLATES)
env = VersionedStorage()
env.template = BoundTemplate(env, template_loader)
return env
def parse(self, value):
#print value
return html.fragment_fromstring(value, create_parent=True)
class TestWidget(TestFormClass):
def test_init(self):
kwargs = dict(template='textinput', classname='textinput')
widget = widgets.Widget(**kwargs)
for key, value in kwargs.items():
self.assertEqual(value, getattr(widget, key))
widget = widget()
for key, value in kwargs.items():
self.assertEqual(value, getattr(widget, key))
def test_obsolete(self):
with self.assertRaises(TypeError) as exc:
widgets.Widget(template='checkbox', multiple=True)
exc = exc.exception
self.assertIn('Obsolete parameters are used', str(exc))
self.assertIn('multiple', str(exc))
class TestTextInput(TestFormClass):
widget = widgets.TextInput
tag = 'input'
def get_value(self, html):
return html.xpath('.//'+self.tag+'/@value')[0]
def test_render(self):
class F(Form):
fields = [
Field('name',
conv=convs.Char(),
widget=self.widget(classname="cls")),
Field('unreadable',
permissions="w",
conv=convs.Char(),
widget=self.widget(classname="cls"))
]
form = F(self.env)
form.raw_data = MultiDict({'name': '<p>Paragraph</p>'})
render = form.get_field('name').widget.render()
html = self.parse(render)
value = self.get_value(html)
self.assertEqual(value, '<p>Paragraph</p>')
self.assertEqual(html.xpath('.//'+self.tag+'/@readonly'), [])
self.assertEqual(html.xpath('.//'+self.tag+'/@class'), ['cls'])
render = form.get_field('unreadable').widget.render()
self.assertEqual(render, '')
def test_escape(self):
class F(Form):
fields = [
Field('name',
conv=convs.Char(),
widget=self.widget())
]
form = F(self.env)
form.raw_data = MultiDict({'name': jinja2.Markup('<p>Paragraph</p>')})
render = form.get_field('name').widget.render()
html = self.parse(render)
value = self.get_value(html)
self.assertEqual(value, '<p>Paragraph</p>')
self.assert_('<p>Paragraph</p>' in six.text_type(render), render)
def test_render_readonly(self):
class F(Form):
fields = [
Field('name',
conv=convs.Char(),
widget=self.widget(),
permissions="r",
)
]
form = F(self.env)
form.raw_data = MultiDict({'name': '<p>Paragraph</p>'})
render = form.get_field('name').widget.render()
html = self.parse(render)
value = self.get_value(html)
self.assertEqual(value, '<p>Paragraph</p>')
self.assertEqual(html.xpath('.//'+self.tag+'/@readonly'), ['readonly'])
class TestTextarea(TestTextInput):
widget = widgets.Textarea
tag = 'textarea'
def get_value(self, html):
return ''.join(html.xpath('.//'+self.tag+'/text()'))
def test_escape(self):
class F(Form):
fields = [
Field('name',
conv=convs.Char(),
widget=self.widget())
]
form = F(self.env)
form.raw_data = MultiDict({'name': jinja2.Markup('</textarea>')})
render = form.get_field('name').widget.render()
html = self.parse(render)
value = self.get_value(html)
self.assertEqual(value, '</textarea>')
self.assert_('</textarea>' in six.text_type(render), render)
class TestCheckBox(TestFormClass):
widget = widgets.CheckBox
tag = 'input'
def get_value(self, html):
return bool(html.xpath('.//'+self.tag+'/@checked'))
def test_render(self):
class F(Form):
fields = [
Field('name',
conv=convs.Bool(),
widget=self.widget())
]
form = F(self.env)
form.raw_data = MultiDict({'name': ''})
render = form.get_field('name').widget.render()
html = self.parse(render)
value = self.get_value(html)
self.assertEqual(value, False)
form.raw_data = MultiDict({'name': 'checked'})
render = form.get_field('name').widget.render()
html = self.parse(render)
value = self.get_value(html)
self.assertEqual(value, True)
class TestHiddenInput(TestFormClass):
widget = widgets.HiddenInput
tag = 'input'
def get_value(self, html):
return html.xpath('.//'+self.tag+'/@value')[0]
def test_render(self):
class F(Form):
fields = [
Field('name',
conv=convs.Char(),
widget=self.widget())
]
form = F(self.env)
form.raw_data = MultiDict({'name': 'hidden value'})
render = form.get_field('name').widget.render()
html = self.parse(render)
value = self.get_value(html)
self.assertEqual(value, 'hidden value')
class TestCharDisplay(TestFormClass):
widget = widgets.CharDisplay
tag = 'span'
def get_value(self, html):
return ''.join(html.xpath('.//'+self.tag+'/text()'))
def test_render(self):
class F(Form):
fields = [
Field('name',
conv=convs.Char(),
widget=self.widget())
]
form = F(self.env)
form.raw_data = MultiDict({'name': '<p>char display</p>'})
render = form.get_field('name').widget.render()
html = self.parse(render)
value = self.get_value(html)
self.assertEqual(value, '<p>char display</p>')
def test_not_escape(self):
class F(Form):
fields = [
Field('name',
conv=convs.Char(),
widget=self.widget(escape=False))
]
form = F(self.env)
form.raw_data = MultiDict({'name': '<i>char display</i>'})
render = form.get_field('name').widget.render()
html = self.parse(render)
value = ''.join(html.xpath('.//'+self.tag+'/i/text()'))
self.assertEqual(value, 'char display')
def test_transform(self):
class F(Form):
fields = [
Field('name',
conv=convs.Char(),
widget=self.widget(getter=lambda x: x.replace('value', 'display')))
]
form = F(self.env)
form.raw_data = MultiDict({'name': 'char value'})
render = form.get_field('name').widget.render()
html = self.parse(render)
value = self.get_value(html)
self.assertEqual(value, 'char display')
class TestSelect(TestFormClass):
choices = [
('1', 'first'),
('2', 'second'),
]
widget = widgets.Select
def get_options(self, html):
return [(x.attrib['value'],
x.text,
'selected' in x.attrib)
for x in html.xpath('.//option')]
def check_multiple(self, html):
self.assertEqual(html.xpath('.//select/@multiple'),
['multiple'])
def check_not_multiple(self, html):
self.assertEqual(html.xpath('.//select/@multiple'),
[])
def test_render_not_required(self):
class F(Form):
fields = [
Field('name',
conv=convs.EnumChoice(choices=self.choices,
required=False),
widget=self.widget())
]
form = F(self.env)
form.raw_data = MultiDict({'name': '1'})
render = form.get_field('name').widget.render()
html = self.parse(render)
self.check_not_multiple(html)
options = self.get_options(html)
self.assertEqual(options, [('', self.widget.null_label, False),
('1', 'first', True),
('2', 'second', False)])
form.raw_data = MultiDict({'name': ''})
render = form.get_field('name').widget.render()
html = self.parse(render)
options = self.get_options(html)
self.assertEqual(options, [('', self.widget.null_label, True),
('1', 'first', False),
('2', 'second', False)])
def test_render_required(self):
class F(Form):
fields = [
Field('name',
conv=convs.EnumChoice(choices=self.choices,
required=True),
widget=self.widget())
]
form = F(self.env)
form.raw_data = MultiDict({'name': '1'})
render = form.get_field('name').widget.render()
html = self.parse(render)
self.check_not_multiple(html)
options = self.get_options(html)
self.assertEqual(options, [('1', 'first', True),
('2', 'second', False)])
form.raw_data = MultiDict({'name': ''})
render = form.get_field('name').widget.render()
html = self.parse(render)
options = self.get_options(html)
self.assertEqual(options, [('', self.widget.null_label, True),
('1', 'first', False),
('2', 'second', False)])
def test_render_multiple(self):
class F(Form):
fields = [
Field('name',
conv=convs.ListOf(
convs.EnumChoice(choices=self.choices,
required=True)),
widget=self.widget())
]
form = F(self.env)
form.raw_data = MultiDict([('name', '1'), ('name', '2')])
render = form.get_field('name').widget.render()
html = self.parse(render)
self.check_multiple(html)
options = self.get_options(html)
self.assertEqual(options, [('1', 'first', True),
('2', 'second', True)])
form.raw_data = MultiDict()
render = form.get_field('name').widget.render()
html = self.parse(render)
options = self.get_options(html)
self.assertEqual(options, [('1', 'first', False),
('2', 'second', False)])
def test_render_enum_boolean(self):
class F(Form):
fields = [
Field('name',
conv=convs.EnumChoice(conv=convs.Bool(),
required=True,
choices=[(False, u'no'),
(True, u'yes')]),
initial=False,
widget=self.widget())
]
form = F(self.env)
render = form.get_field('name').widget.render()
html = self.parse(render)
options = self.get_options(html)
self.assertEqual(options, [('', 'no', True),
('checked', 'yes', False)])
class TestCheckBoxSelect(TestSelect):
widget = widgets.CheckBoxSelect
def get_options(self, html):
return [(x.attrib['value'],
x.getparent().xpath('./label/text()')[0],
'checked' in x.attrib)
for x in html.xpath('.//input')]
def check_multiple(self, html):
self.assertEqual(html.xpath('.//input/@type')[0],
'checkbox')
def check_not_multiple(self, html):
self.assertEqual(html.xpath('.//input/@type')[0],
'radio')
class TestFieldList(TestFormClass):
def test_render(self):
class F(Form):
fields = [
FieldList('list',
field=FieldSet(None, fields=[
Field('name',
conv=convs.Char(),
widget=widgets.TextInput)]))
]
form = F(self.env)
form.accept(MultiDict((('list-indices','1'),
('list-indices', '2'),
('list.1.name', 'First' ),
('list.2.name', 'Second' )))
)
render = form.get_field('list').widget.render()
self.assertIn('<table class="fieldlist" id="list">', render)
self.assertIn('<input', render)
self.assertIn('value="1"', render)
self.assertIn('value="2"', render)
self.assertIn('First', render)
self.assertIn('Second', render)
template = form.get_field('list').widget.render_template_field()
self.assertIn('id="list.%list-index%.name"', template)
self.assertNotIn('First', template)
self.assertNotIn('Second', template)
|
|
# Copyright 2014 Google Inc. All Rights Reserved.
"""Command for creating instance templates."""
import collections
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.compute.lib import base_classes
from googlecloudsdk.compute.lib import constants
from googlecloudsdk.compute.lib import image_utils
from googlecloudsdk.compute.lib import instance_utils
from googlecloudsdk.compute.lib import metadata_utils
from googlecloudsdk.compute.lib import utils
DISK_METAVAR = (
'name=NAME [mode={ro,rw}] [boot={yes,no}] [device-name=DEVICE_NAME] '
'[auto-delete={yes,no}]')
class Create(base_classes.BaseAsyncCreator, image_utils.ImageExpander):
"""Create Google Compute Engine virtual machine instance templates."""
@staticmethod
def Args(parser):
metadata_utils.AddMetadataArgs(parser)
instance_utils.AddDiskArgs(parser)
instance_utils.AddLocalSsdArgs(parser)
instance_utils.AddImageArgs(parser)
instance_utils.AddCanIpForwardArgs(parser)
instance_utils.AddAddressArgs(parser, instances=False)
instance_utils.AddMachineTypeArgs(parser)
instance_utils.AddMaintenancePolicyArgs(parser)
instance_utils.AddNetworkArgs(parser)
instance_utils.AddNoRestartOnFailureArgs(parser)
instance_utils.AddScopeArgs(parser)
instance_utils.AddTagsArgs(parser)
parser.add_argument(
'--description',
help='Specifies a textual description for the instance template.')
parser.add_argument(
'name',
metavar='NAME',
help='The name of the instance template to create.')
@property
def service(self):
return self.compute.instanceTemplates
@property
def method(self):
return 'Insert'
@property
def resource_type(self):
return 'instanceTemplates'
def ValidateDiskFlags(self, args):
"""Validates the values of all disk-related flags.
Args:
args: the argparse arguments that this command was invoked with.
Raises:
ToolException: if any of the disk flags are invalid.
"""
boot_disk_specified = False
for disk in args.disk or []:
disk_name = disk.get('name')
if not disk_name:
raise exceptions.ToolException(
'[name] is missing in [--disk]. [--disk] value must be of the form '
'[{0}].'.format(DISK_METAVAR))
mode_value = disk.get('mode')
if mode_value and mode_value not in ('rw', 'ro'):
raise exceptions.ToolException(
'Value for [mode] in [--disk] must be [rw] or [ro], not [{0}].'
.format(mode_value))
boot_value = disk.get('boot')
if boot_value and boot_value not in ('yes', 'no'):
raise exceptions.ToolException(
'Value for [boot] in [--disk] must be [yes] or [no], not [{0}].'
.format(boot_value))
auto_delete_value = disk.get('auto-delete')
if auto_delete_value and auto_delete_value not in ('yes', 'no'):
raise exceptions.ToolException(
'Value for [auto-delete] in [--disk] must be [yes] or [no], not '
'[{0}].'.format(auto_delete_value))
# If this is a boot disk and we have already seen a boot disk,
# we need to fail because only one boot disk can be attached.
if boot_value == 'yes':
if boot_disk_specified:
raise exceptions.ToolException(
'Each instance can have exactly one boot disk. At least two '
'boot disks were specified through [--disk].')
else:
boot_disk_specified = True
if args.image and boot_disk_specified:
raise exceptions.ToolException(
'Each instance can have exactly one boot disk. One boot disk '
'was specified through [--disk] and another through [--image].')
if boot_disk_specified:
if args.boot_disk_device_name:
raise exceptions.ToolException(
'[--boot-disk-device-name] can only be used when creating a new '
'boot disk.')
if args.boot_disk_type:
raise exceptions.ToolException(
'[--boot-disk-type] can only be used when creating a new boot '
'disk.')
if args.boot_disk_size:
raise exceptions.ToolException(
'[--boot-disk-size] can only be used when creating a new boot '
'disk.')
if args.no_boot_disk_auto_delete:
raise exceptions.ToolException(
'[--no-boot-disk-auto-delete] can only be used when creating a '
'new boot disk.')
def UseExistingBootDisk(self, args):
"""Returns True if the user has specified an existing boot disk.
Args:
args: the argparse arguments that this command was invoked with.
Returns:
bool: True if an existing boot disk is to be used, False otherwise.
"""
return any(disk.get('boot') == 'yes' for disk in args.disk or [])
def CreateAttachedPersistentDiskMessages(self, args):
"""Returns a list of AttachedDisk messages based on command-line args.
Args:
args: the argparse arguments that this command was invoked with.
Returns:
disks: a list of AttachedDisk message objects
"""
disks = []
for disk in args.disk or []:
name = disk['name']
# Resolves the mode.
mode_value = disk.get('mode', 'rw')
if mode_value == 'rw':
mode = self.messages.AttachedDisk.ModeValueValuesEnum.READ_WRITE
else:
mode = self.messages.AttachedDisk.ModeValueValuesEnum.READ_ONLY
boot = disk.get('boot') == 'yes'
auto_delete = disk.get('auto-delete') == 'yes'
attached_disk = self.messages.AttachedDisk(
autoDelete=auto_delete,
boot=boot,
deviceName=disk.get('device-name'),
mode=mode,
source=name,
type=self.messages.AttachedDisk.TypeValueValuesEnum.PERSISTENT)
# The boot disk must end up at index 0.
if boot:
disks = [attached_disk] + disks
else:
disks.append(attached_disk)
return disks
def CreateDefaultBootAttachedDiskMessage(
self, args, boot_disk_size_gb, image_uri):
"""Returns an AttachedDisk message for creating a new boot disk.
Args:
args: the argparse arguments that this command was invoked with.
boot_disk_size_gb: size of the boot disk in GBs
image_uri: the source image URI
Returns:
disk: an AttachedDisk message object
"""
disk_type = None
if args.boot_disk_type:
disk_type = args.boot_disk_type
return self.messages.AttachedDisk(
autoDelete=not args.no_boot_disk_auto_delete,
boot=True,
deviceName=args.boot_disk_device_name,
initializeParams=self.messages.AttachedDiskInitializeParams(
sourceImage=image_uri,
diskSizeGb=boot_disk_size_gb,
diskType=disk_type),
mode=self.messages.AttachedDisk.ModeValueValuesEnum.READ_WRITE,
type=self.messages.AttachedDisk.TypeValueValuesEnum.PERSISTENT)
def CreateServiceAccountMessages(self, args):
"""Returns a list of ServiceAccount messages corresponding to --scopes.
Args:
args: the argparse arguments that this command was invoked with.
Returns:
res: a list of ServiceAccount message objects
Raises:
ToolException: if the scopes are provided in an invalid format.
"""
if args.no_scopes:
scopes = []
else:
scopes = args.scopes or constants.DEFAULT_SCOPES
accounts_to_scopes = collections.defaultdict(list)
for scope in scopes:
parts = scope.split('=')
if len(parts) == 1:
account = 'default'
scope_uri = scope
elif len(parts) == 2:
account, scope_uri = parts
else:
raise exceptions.ToolException(
'[{0}] is an illegal value for [--scopes]. Values must be of the '
'form [SCOPE] or [ACCOUNT=SCOPE].'.format(scope))
# Expands the scope if the user provided an alias like
# "compute-rw".
scope_uri = constants.SCOPES.get(scope_uri, scope_uri)
accounts_to_scopes[account].append(scope_uri)
res = []
for account, scopes in sorted(accounts_to_scopes.iteritems()):
res.append(self.messages.ServiceAccount(
email=account,
scopes=sorted(scopes)))
return res
def CreateNetworkInterfaceMessage(self, args):
"""Creates and returns a new NetworkInterface message.
Args:
args: the argparse arguments that this command was invoked with.
Returns:
network_interface: a NetworkInterface message object
"""
network_ref = self.CreateGlobalReference(
args.network, resource_type='networks')
network_interface = self.messages.NetworkInterface(
network=network_ref.SelfLink())
if not args.no_address:
access_config = self.messages.AccessConfig(
name=constants.DEFAULT_ACCESS_CONFIG_NAME,
type=self.messages.AccessConfig.TypeValueValuesEnum.ONE_TO_ONE_NAT)
# If the user provided an external IP, populate the access
# config with it.
if args.address:
access_config.natIP = args.address
network_interface.accessConfigs = [access_config]
return network_interface
def CreateRequests(self, args):
"""Creates and returns an InstanceTemplates.Insert request.
Args:
args: the argparse arguments that this command was invoked with.
Returns:
request: a ComputeInstanceTemplatesInsertRequest message object
"""
self.ValidateDiskFlags(args)
instance_utils.ValidateLocalSsdFlags(args)
boot_disk_size_gb = utils.BytesToGb(args.boot_disk_size)
utils.WarnIfDiskSizeIsTooSmall(boot_disk_size_gb, args.boot_disk_type)
instance_template_ref = self.CreateGlobalReference(args.name)
metadata = metadata_utils.ConstructMetadataMessage(
self.messages,
metadata=args.metadata,
metadata_from_file=args.metadata_from_file)
network_interface = self.CreateNetworkInterfaceMessage(args)
if args.maintenance_policy:
on_host_maintenance = (
self.messages.Scheduling.OnHostMaintenanceValueValuesEnum(
args.maintenance_policy))
else:
on_host_maintenance = None
scheduling = self.messages.Scheduling(
automaticRestart=not args.no_restart_on_failure,
onHostMaintenance=on_host_maintenance)
service_accounts = self.CreateServiceAccountMessages(args)
create_boot_disk = not self.UseExistingBootDisk(args)
if create_boot_disk:
image_uri, _ = self.ExpandImageFlag(
args,
return_image_resource=True)
else:
image_uri = None
if args.tags:
tags = self.messages.Tags(items=args.tags)
else:
tags = None
persistent_disks = self.CreateAttachedPersistentDiskMessages(args)
if create_boot_disk:
boot_disk_list = [self.CreateDefaultBootAttachedDiskMessage(
args, boot_disk_size_gb, image_uri)]
else:
boot_disk_list = []
local_ssds = [
instance_utils.CreateLocalSsdMessage(
self, x.get('device-name'), x.get('interface'))
for x in args.local_ssd or []]
disks = boot_disk_list + persistent_disks + local_ssds
request = self.messages.ComputeInstanceTemplatesInsertRequest(
instanceTemplate=self.messages.InstanceTemplate(
properties=self.messages.InstanceProperties(
machineType=args.machine_type,
disks=disks,
canIpForward=args.can_ip_forward,
metadata=metadata,
networkInterfaces=[network_interface],
serviceAccounts=service_accounts,
scheduling=scheduling,
tags=tags,
),
description=args.description,
name=instance_template_ref.Name(),
),
project=self.context['project'])
return [request]
Create.detailed_help = {
'brief': 'Create a Compute Engine virtual machine instance template',
'DESCRIPTION': """\
*{command}* facilitates the creation of Google Compute Engine
virtual machine instance templates. For example, running:
$ {command} INSTANCE-TEMPLATE
will create one instance templates called 'INSTANCE-TEMPLATE'.
Instance templates are global resources, and can be used to create
instances in any zone.
""",
}
|
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import collections
import itertools
import re
from ._structures import Infinity
__all__ = [
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
]
_Version = collections.namedtuple(
"_Version",
["epoch", "release", "dev", "pre", "post", "local"],
)
def parse(version):
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version)
class InvalidVersion(ValueError):
"""
An invalid version was found, users should refer to PEP 440.
"""
class _BaseVersion(object):
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, _BaseVersion):
return NotImplemented
return method(self._key, other._key)
class LegacyVersion(_BaseVersion):
def __init__(self, version):
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
def __str__(self):
return self._version
def __repr__(self):
return "<LegacyVersion({0})>".format(repr(str(self)))
@property
def public(self):
return self._version
@property
def base_version(self):
return self._version
@property
def epoch(self):
return -1
@property
def release(self):
return None
@property
def pre(self):
return None
@property
def post(self):
return None
@property
def dev(self):
return None
@property
def local(self):
return None
@property
def is_prerelease(self):
return False
@property
def is_postrelease(self):
return False
@property
def is_devrelease(self):
return False
_legacy_version_component_re = re.compile(
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)
_legacy_version_replacement_map = {
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
}
def _parse_version_parts(s):
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
if not part or part == ".":
continue
if part[:1] in "0123456789":
# pad for numeric comparison
yield part.zfill(8)
else:
yield "*" + part
# ensure that alpha/beta/candidate are before final
yield "*final"
def _legacy_cmpkey(version):
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
# as before all PEP 440 versions.
epoch = -1
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
if part < "*final":
while parts and parts[-1] == "*final-":
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == "00000000":
parts.pop()
parts.append(part)
parts = tuple(parts)
return epoch, parts
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
v?
(?:
(?:(?P<epoch>[0-9]+)!)? # epoch
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
(?P<post> # post release
(?:-(?P<post_n1>[0-9]+))
|
(?:
[-_\.]?
(?P<post_l>post|rev|r)
[-_\.]?
(?P<post_n2>[0-9]+)?
)
)?
(?P<dev> # dev release
[-_\.]?
(?P<dev_l>dev)
[-_\.]?
(?P<dev_n>[0-9]+)?
)?
)
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
class Version(_BaseVersion):
_regex = re.compile(
r"^\s*" + VERSION_PATTERN + r"\s*$",
re.VERBOSE | re.IGNORECASE,
)
def __init__(self, version):
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion("Invalid version: '{0}'".format(version))
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(
match.group("pre_l"),
match.group("pre_n"),
),
post=_parse_letter_version(
match.group("post_l"),
match.group("post_n1") or match.group("post_n2"),
),
dev=_parse_letter_version(
match.group("dev_l"),
match.group("dev_n"),
),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
def __repr__(self):
return "<Version({0})>".format(repr(str(self)))
def __str__(self):
parts = []
# Epoch
if self.epoch != 0:
parts.append("{0}!".format(self.epoch))
# Release segment
parts.append(".".join(str(x) for x in self.release))
# Pre-release
if self.pre is not None:
parts.append("".join(str(x) for x in self.pre))
# Post-release
if self.post is not None:
parts.append(".post{0}".format(self.post))
# Development release
if self.dev is not None:
parts.append(".dev{0}".format(self.dev))
# Local version segment
if self.local is not None:
parts.append("+{0}".format(self.local))
return "".join(parts)
@property
def epoch(self):
return self._version.epoch
@property
def release(self):
return self._version.release
@property
def pre(self):
return self._version.pre
@property
def post(self):
return self._version.post[1] if self._version.post else None
@property
def dev(self):
return self._version.dev[1] if self._version.dev else None
@property
def local(self):
if self._version.local:
return ".".join(str(x) for x in self._version.local)
else:
return None
@property
def public(self):
return str(self).split("+", 1)[0]
@property
def base_version(self):
parts = []
# Epoch
if self.epoch != 0:
parts.append("{0}!".format(self.epoch))
# Release segment
parts.append(".".join(str(x) for x in self.release))
return "".join(parts)
@property
def is_prerelease(self):
return self.dev is not None or self.pre is not None
@property
def is_postrelease(self):
return self.post is not None
@property
def is_devrelease(self):
return self.dev is not None
def _parse_letter_version(letter, number):
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
if number is None:
number = 0
# We normalize any letters to their lower case form
letter = letter.lower()
# We consider some words to be alternate spellings of other words and
# in those cases we want to normalize the spellings to our preferred
# spelling.
if letter == "alpha":
letter = "a"
elif letter == "beta":
letter = "b"
elif letter in ["c", "pre", "preview"]:
letter = "rc"
elif letter in ["rev", "r"]:
letter = "post"
return letter, int(number)
if not letter and number:
# We assume if we are given a number, but we are not given a letter
# then this is using the implicit post release syntax (e.g. 1.0-1)
letter = "post"
return letter, int(number)
_local_version_separators = re.compile(r"[\._-]")
def _parse_local_version(local):
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_separators.split(local)
)
def _cmpkey(epoch, release, pre, post, dev, local):
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
release = tuple(
reversed(list(
itertools.dropwhile(
lambda x: x == 0,
reversed(release),
)
))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
# We'll do this by abusing the pre segment, but we _only_ want to do this
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
pre = -Infinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
pre = Infinity
# Versions without a post segment should sort before those with one.
if post is None:
post = -Infinity
# Versions without a development segment should sort after those with one.
if dev is None:
dev = Infinity
if local is None:
# Versions without a local segment should sort before those with one.
local = -Infinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
# - Alpha numeric segments sort before numeric segments
# - Alpha numeric segments sort lexicographically
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
local = tuple(
(i, "") if isinstance(i, int) else (-Infinity, i)
for i in local
)
return epoch, release, pre, post, dev, local
|
|
# Copyright 2010-2011 OpenStack Foundation
# Copyright 2012-2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for database migrations.
There are "opportunistic" tests which allows testing against all 3 databases
(sqlite in memory, mysql, pg) in a properly configured unit test environment.
For the opportunistic testing you need to set up db's named 'openstack_citest'
with user 'openstack_citest' and password 'openstack_citest' on localhost. The
test will then use that db and u/p combo to run the tests.
For postgres on Ubuntu this can be done with the following commands::
| sudo -u postgres psql
| postgres=# create user openstack_citest with createdb login password
| 'openstack_citest';
| postgres=# create database openstack_citest with owner openstack_citest;
"""
import glob
import os
from migrate import UniqueConstraint
from migrate.versioning import repository
import mock
from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import test_migrations
from oslo_db.sqlalchemy import utils as oslodbutils
import sqlalchemy
from sqlalchemy.engine import reflection
import sqlalchemy.exc
from sqlalchemy.sql import null
from nova.db import migration
from nova.db.sqlalchemy import migrate_repo
from nova.db.sqlalchemy import migration as sa_migration
from nova.db.sqlalchemy import models
from nova.db.sqlalchemy import utils as db_utils
from nova import exception
from nova import test
from nova.tests import fixtures as nova_fixtures
# TODO(sdague): no tests in the nova/tests tree should inherit from
# base test classes in another library. This causes all kinds of havoc
# in these doing things incorrectly for what we need in subunit
# reporting. This is a long unwind, but should be done in the future
# and any code needed out of oslo_db should be exported / accessed as
# a fixture.
class NovaMigrationsCheckers(test_migrations.ModelsMigrationsSync,
test_migrations.WalkVersionsMixin):
"""Test sqlalchemy-migrate migrations."""
TIMEOUT_SCALING_FACTOR = 4
@property
def INIT_VERSION(self):
return migration.db_initial_version()
@property
def REPOSITORY(self):
return repository.Repository(
os.path.abspath(os.path.dirname(migrate_repo.__file__)))
@property
def migration_api(self):
return sa_migration.versioning_api
@property
def migrate_engine(self):
return self.engine
def setUp(self):
# NOTE(sdague): the oslo_db base test case completely
# invalidates our logging setup, we actually have to do that
# before it is called to keep this from vomitting all over our
# test output.
self.useFixture(nova_fixtures.StandardLogging())
super(NovaMigrationsCheckers, self).setUp()
# NOTE(rpodolyaka): we need to repeat the functionality of the base
# test case a bit here as this gets overridden by oslotest base test
# case and nova base test case cleanup must be the last one (as it
# deletes attributes of test case instances)
self.useFixture(nova_fixtures.Timeout(
os.environ.get('OS_TEST_TIMEOUT', 0),
self.TIMEOUT_SCALING_FACTOR))
def assertColumnExists(self, engine, table_name, column):
self.assertTrue(oslodbutils.column_exists(engine, table_name, column),
'Column %s.%s does not exist' % (table_name, column))
def assertColumnNotExists(self, engine, table_name, column):
self.assertFalse(oslodbutils.column_exists(engine, table_name, column),
'Column %s.%s should not exist' % (table_name, column))
def assertTableNotExists(self, engine, table):
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
oslodbutils.get_table, engine, table)
def assertIndexExists(self, engine, table_name, index):
self.assertTrue(oslodbutils.index_exists(engine, table_name, index),
'Index %s on table %s does not exist' %
(index, table_name))
def assertIndexNotExists(self, engine, table_name, index):
self.assertFalse(oslodbutils.index_exists(engine, table_name, index),
'Index %s on table %s should not exist' %
(index, table_name))
def assertIndexMembers(self, engine, table, index, members):
# NOTE(johannes): Order of columns can matter. Most SQL databases
# can use the leading columns for optimizing queries that don't
# include all of the covered columns.
self.assertIndexExists(engine, table, index)
t = oslodbutils.get_table(engine, table)
index_columns = None
for idx in t.indexes:
if idx.name == index:
index_columns = [c.name for c in idx.columns]
break
self.assertEqual(members, index_columns)
# Implementations for ModelsMigrationsSync
def db_sync(self, engine):
with mock.patch.object(sa_migration, 'get_engine',
return_value=engine):
sa_migration.db_sync()
def get_engine(self, context=None):
return self.migrate_engine
def get_metadata(self):
return models.BASE.metadata
def include_object(self, object_, name, type_, reflected, compare_to):
if type_ == 'table':
# migrate_version is a sqlalchemy-migrate control table and
# isn't included in the model. shadow_* are generated from
# the model and have their own tests to ensure they don't
# drift.
if name == 'migrate_version' or name.startswith('shadow_'):
return False
return True
def _skippable_migrations(self):
special = [
216, # Havana
272, # NOOP migration due to revert
]
havana_placeholders = list(range(217, 227))
icehouse_placeholders = list(range(235, 244))
juno_placeholders = list(range(255, 265))
kilo_placeholders = list(range(281, 291))
liberty_placeholders = list(range(303, 313))
mitaka_placeholders = list(range(320, 330))
newton_placeholders = list(range(335, 345))
ocata_placeholders = list(range(348, 358))
pike_placeholders = list(range(363, 373))
queens_placeholders = list(range(379, 389))
return (special +
havana_placeholders +
icehouse_placeholders +
juno_placeholders +
kilo_placeholders +
liberty_placeholders +
mitaka_placeholders +
newton_placeholders +
ocata_placeholders +
pike_placeholders +
queens_placeholders)
def migrate_up(self, version, with_data=False):
if with_data:
check = getattr(self, "_check_%03d" % version, None)
if version not in self._skippable_migrations():
self.assertIsNotNone(check,
('DB Migration %i does not have a '
'test. Please add one!') % version)
# NOTE(danms): This is a list of migrations where we allow dropping
# things. The rules for adding things here are very very specific.
# Chances are you don't meet the critera.
# Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE
exceptions = [
# 267 enforces non-nullable instance.uuid. This was mostly
# a special case because instance.uuid shouldn't be able
# to be nullable
267,
# 278 removes a FK restriction, so it's an alter operation
# that doesn't break existing users
278,
# 280 enforces non-null keypair name. This is really not
# something we should allow, but it's in the past
280,
# 292 drops completely orphaned tables with no users, so
# it can be done without affecting anything.
292,
# 346 Drops column scheduled_at from instances table since it
# is no longer used. The field value is always NULL so
# it does not affect anything.
346,
]
# Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE
# NOTE(danms): We only started requiring things be additive in
# kilo, so ignore all migrations before that point.
KILO_START = 265
if version >= KILO_START and version not in exceptions:
banned = ['Table', 'Column']
else:
banned = None
with nova_fixtures.BannedDBSchemaOperations(banned):
super(NovaMigrationsCheckers, self).migrate_up(version, with_data)
def test_walk_versions(self):
self.walk_versions(snake_walk=False, downgrade=False)
def _check_227(self, engine, data):
table = oslodbutils.get_table(engine, 'project_user_quotas')
# Insert fake_quotas with the longest resource name.
fake_quotas = {'id': 5,
'project_id': 'fake_project',
'user_id': 'fake_user',
'resource': 'injected_file_content_bytes',
'hard_limit': 10}
table.insert().execute(fake_quotas)
# Check we can get the longest resource name.
quota = table.select(table.c.id == 5).execute().first()
self.assertEqual(quota['resource'], 'injected_file_content_bytes')
def _check_228(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes', 'metrics')
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
self.assertIsInstance(compute_nodes.c.metrics.type,
sqlalchemy.types.Text)
def _check_229(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes', 'extra_resources')
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
self.assertIsInstance(compute_nodes.c.extra_resources.type,
sqlalchemy.types.Text)
def _check_230(self, engine, data):
for table_name in ['instance_actions_events',
'shadow_instance_actions_events']:
self.assertColumnExists(engine, table_name, 'host')
self.assertColumnExists(engine, table_name, 'details')
action_events = oslodbutils.get_table(engine,
'instance_actions_events')
self.assertIsInstance(action_events.c.host.type,
sqlalchemy.types.String)
self.assertIsInstance(action_events.c.details.type,
sqlalchemy.types.Text)
def _check_231(self, engine, data):
self.assertColumnExists(engine, 'instances', 'ephemeral_key_uuid')
instances = oslodbutils.get_table(engine, 'instances')
self.assertIsInstance(instances.c.ephemeral_key_uuid.type,
sqlalchemy.types.String)
self.assertTrue(db_utils.check_shadow_table(engine, 'instances'))
def _check_232(self, engine, data):
table_names = ['compute_node_stats', 'compute_nodes',
'instance_actions', 'instance_actions_events',
'instance_faults', 'migrations']
for table_name in table_names:
self.assertTableNotExists(engine, 'dump_' + table_name)
def _check_233(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes', 'stats')
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
self.assertIsInstance(compute_nodes.c.stats.type,
sqlalchemy.types.Text)
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
oslodbutils.get_table, engine, 'compute_node_stats')
def _check_234(self, engine, data):
self.assertIndexMembers(engine, 'reservations',
'reservations_deleted_expire_idx',
['deleted', 'expire'])
def _check_244(self, engine, data):
volume_usage_cache = oslodbutils.get_table(
engine, 'volume_usage_cache')
self.assertEqual(64, volume_usage_cache.c.user_id.type.length)
def _pre_upgrade_245(self, engine):
# create a fake network
networks = oslodbutils.get_table(engine, 'networks')
fake_network = {'id': 1}
networks.insert().execute(fake_network)
def _check_245(self, engine, data):
networks = oslodbutils.get_table(engine, 'networks')
network = networks.select(networks.c.id == 1).execute().first()
# mtu should default to None
self.assertIsNone(network.mtu)
# dhcp_server should default to None
self.assertIsNone(network.dhcp_server)
# enable dhcp should default to true
self.assertTrue(network.enable_dhcp)
# share address should default to false
self.assertFalse(network.share_address)
def _check_246(self, engine, data):
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
self.assertEqual(1, len([fk for fk in pci_devices.foreign_keys
if fk.parent.name == 'compute_node_id']))
def _check_247(self, engine, data):
quota_usages = oslodbutils.get_table(engine, 'quota_usages')
self.assertFalse(quota_usages.c.resource.nullable)
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
self.assertTrue(pci_devices.c.deleted.nullable)
self.assertFalse(pci_devices.c.product_id.nullable)
self.assertFalse(pci_devices.c.vendor_id.nullable)
self.assertFalse(pci_devices.c.dev_type.nullable)
def _check_248(self, engine, data):
self.assertIndexMembers(engine, 'reservations',
'reservations_deleted_expire_idx',
['deleted', 'expire'])
def _check_249(self, engine, data):
# Assert that only one index exists that covers columns
# instance_uuid and device_name
bdm = oslodbutils.get_table(engine, 'block_device_mapping')
self.assertEqual(1, len([i for i in bdm.indexes
if [c.name for c in i.columns] ==
['instance_uuid', 'device_name']]))
def _check_250(self, engine, data):
self.assertTableNotExists(engine, 'instance_group_metadata')
self.assertTableNotExists(engine, 'shadow_instance_group_metadata')
def _check_251(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes', 'numa_topology')
self.assertColumnExists(engine, 'shadow_compute_nodes',
'numa_topology')
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
shadow_compute_nodes = oslodbutils.get_table(engine,
'shadow_compute_nodes')
self.assertIsInstance(compute_nodes.c.numa_topology.type,
sqlalchemy.types.Text)
self.assertIsInstance(shadow_compute_nodes.c.numa_topology.type,
sqlalchemy.types.Text)
def _check_252(self, engine, data):
oslodbutils.get_table(engine, 'instance_extra')
oslodbutils.get_table(engine, 'shadow_instance_extra')
self.assertIndexMembers(engine, 'instance_extra',
'instance_extra_idx',
['instance_uuid'])
def _check_253(self, engine, data):
self.assertColumnExists(engine, 'instance_extra', 'pci_requests')
self.assertColumnExists(
engine, 'shadow_instance_extra', 'pci_requests')
instance_extra = oslodbutils.get_table(engine, 'instance_extra')
shadow_instance_extra = oslodbutils.get_table(engine,
'shadow_instance_extra')
self.assertIsInstance(instance_extra.c.pci_requests.type,
sqlalchemy.types.Text)
self.assertIsInstance(shadow_instance_extra.c.pci_requests.type,
sqlalchemy.types.Text)
def _check_254(self, engine, data):
self.assertColumnExists(engine, 'pci_devices', 'request_id')
self.assertColumnExists(
engine, 'shadow_pci_devices', 'request_id')
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
shadow_pci_devices = oslodbutils.get_table(
engine, 'shadow_pci_devices')
self.assertIsInstance(pci_devices.c.request_id.type,
sqlalchemy.types.String)
self.assertIsInstance(shadow_pci_devices.c.request_id.type,
sqlalchemy.types.String)
def _check_265(self, engine, data):
# Assert that only one index exists that covers columns
# host and deleted
instances = oslodbutils.get_table(engine, 'instances')
self.assertEqual(1, len([i for i in instances.indexes
if [c.name for c in i.columns][:2] ==
['host', 'deleted']]))
# and only one index covers host column
iscsi_targets = oslodbutils.get_table(engine, 'iscsi_targets')
self.assertEqual(1, len([i for i in iscsi_targets.indexes
if [c.name for c in i.columns][:1] ==
['host']]))
def _check_266(self, engine, data):
self.assertColumnExists(engine, 'tags', 'resource_id')
self.assertColumnExists(engine, 'tags', 'tag')
table = oslodbutils.get_table(engine, 'tags')
self.assertIsInstance(table.c.resource_id.type,
sqlalchemy.types.String)
self.assertIsInstance(table.c.tag.type,
sqlalchemy.types.String)
def _pre_upgrade_267(self, engine):
# Create a fixed_ips row with a null instance_uuid (if not already
# there) to make sure that's not deleted.
fixed_ips = oslodbutils.get_table(engine, 'fixed_ips')
fake_fixed_ip = {'id': 1}
fixed_ips.insert().execute(fake_fixed_ip)
# Create an instance record with a valid (non-null) UUID so we make
# sure we don't do something stupid and delete valid records.
instances = oslodbutils.get_table(engine, 'instances')
fake_instance = {'id': 1, 'uuid': 'fake-non-null-uuid'}
instances.insert().execute(fake_instance)
# Add a null instance_uuid entry for the volumes table
# since it doesn't have a foreign key back to the instances table.
volumes = oslodbutils.get_table(engine, 'volumes')
fake_volume = {'id': '9c3c317e-24db-4d57-9a6f-96e6d477c1da'}
volumes.insert().execute(fake_volume)
def _check_267(self, engine, data):
# Make sure the column is non-nullable and the UC exists.
fixed_ips = oslodbutils.get_table(engine, 'fixed_ips')
self.assertTrue(fixed_ips.c.instance_uuid.nullable)
fixed_ip = fixed_ips.select(fixed_ips.c.id == 1).execute().first()
self.assertIsNone(fixed_ip.instance_uuid)
instances = oslodbutils.get_table(engine, 'instances')
self.assertFalse(instances.c.uuid.nullable)
inspector = reflection.Inspector.from_engine(engine)
constraints = inspector.get_unique_constraints('instances')
constraint_names = [constraint['name'] for constraint in constraints]
self.assertIn('uniq_instances0uuid', constraint_names)
# Make sure the instances record with the valid uuid is still there.
instance = instances.select(instances.c.id == 1).execute().first()
self.assertIsNotNone(instance)
# Check that the null entry in the volumes table is still there since
# we skipped tables that don't have FK's back to the instances table.
volumes = oslodbutils.get_table(engine, 'volumes')
self.assertTrue(volumes.c.instance_uuid.nullable)
volume = fixed_ips.select(
volumes.c.id == '9c3c317e-24db-4d57-9a6f-96e6d477c1da'
).execute().first()
self.assertIsNone(volume.instance_uuid)
def test_migration_267(self):
# This is separate from test_walk_versions so we can test the case
# where there are non-null instance_uuid entries in the database which
# cause the 267 migration to fail.
engine = self.migrate_engine
self.migration_api.version_control(
engine, self.REPOSITORY, self.INIT_VERSION)
self.migration_api.upgrade(engine, self.REPOSITORY, 266)
# Create a consoles record with a null instance_uuid so
# we can test that the upgrade fails if that entry is found.
# NOTE(mriedem): We use the consoles table since that's the only table
# created in the 216 migration with a ForeignKey created on the
# instance_uuid table for sqlite.
consoles = oslodbutils.get_table(engine, 'consoles')
fake_console = {'id': 1}
consoles.insert().execute(fake_console)
# NOTE(mriedem): We handle the 267 migration where we expect to
# hit a ValidationError on the consoles table to have
# a null instance_uuid entry
ex = self.assertRaises(exception.ValidationError,
self.migration_api.upgrade,
engine, self.REPOSITORY, 267)
self.assertIn("There are 1 records in the "
"'consoles' table where the uuid or "
"instance_uuid column is NULL.",
ex.kwargs['detail'])
# Remove the consoles entry with the null instance_uuid column.
rows = consoles.delete().where(
consoles.c['instance_uuid'] == null()).execute().rowcount
self.assertEqual(1, rows)
# Now run the 267 upgrade again.
self.migration_api.upgrade(engine, self.REPOSITORY, 267)
# Make sure the consoles entry with the null instance_uuid
# was deleted.
console = consoles.select(consoles.c.id == 1).execute().first()
self.assertIsNone(console)
def _check_268(self, engine, data):
# We can only assert that the col exists, not the unique constraint
# as the engine is running sqlite
self.assertColumnExists(engine, 'compute_nodes', 'host')
self.assertColumnExists(engine, 'shadow_compute_nodes', 'host')
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
shadow_compute_nodes = oslodbutils.get_table(
engine, 'shadow_compute_nodes')
self.assertIsInstance(compute_nodes.c.host.type,
sqlalchemy.types.String)
self.assertIsInstance(shadow_compute_nodes.c.host.type,
sqlalchemy.types.String)
def _check_269(self, engine, data):
self.assertColumnExists(engine, 'pci_devices', 'numa_node')
self.assertColumnExists(engine, 'shadow_pci_devices', 'numa_node')
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
shadow_pci_devices = oslodbutils.get_table(
engine, 'shadow_pci_devices')
self.assertIsInstance(pci_devices.c.numa_node.type,
sqlalchemy.types.Integer)
self.assertTrue(pci_devices.c.numa_node.nullable)
self.assertIsInstance(shadow_pci_devices.c.numa_node.type,
sqlalchemy.types.Integer)
self.assertTrue(shadow_pci_devices.c.numa_node.nullable)
def _check_270(self, engine, data):
self.assertColumnExists(engine, 'instance_extra', 'flavor')
self.assertColumnExists(engine, 'shadow_instance_extra', 'flavor')
instance_extra = oslodbutils.get_table(engine, 'instance_extra')
shadow_instance_extra = oslodbutils.get_table(
engine, 'shadow_instance_extra')
self.assertIsInstance(instance_extra.c.flavor.type,
sqlalchemy.types.Text)
self.assertIsInstance(shadow_instance_extra.c.flavor.type,
sqlalchemy.types.Text)
def _check_271(self, engine, data):
self.assertIndexMembers(engine, 'block_device_mapping',
'snapshot_id', ['snapshot_id'])
self.assertIndexMembers(engine, 'block_device_mapping',
'volume_id', ['volume_id'])
self.assertIndexMembers(engine, 'dns_domains',
'dns_domains_project_id_idx',
['project_id'])
self.assertIndexMembers(engine, 'fixed_ips',
'network_id', ['network_id'])
self.assertIndexMembers(engine, 'fixed_ips',
'fixed_ips_instance_uuid_fkey',
['instance_uuid'])
self.assertIndexMembers(engine, 'fixed_ips',
'fixed_ips_virtual_interface_id_fkey',
['virtual_interface_id'])
self.assertIndexMembers(engine, 'floating_ips',
'fixed_ip_id', ['fixed_ip_id'])
self.assertIndexMembers(engine, 'iscsi_targets',
'iscsi_targets_volume_id_fkey', ['volume_id'])
self.assertIndexMembers(engine, 'virtual_interfaces',
'virtual_interfaces_network_id_idx',
['network_id'])
self.assertIndexMembers(engine, 'virtual_interfaces',
'virtual_interfaces_instance_uuid_fkey',
['instance_uuid'])
# Removed on MySQL, never existed on other databases
self.assertIndexNotExists(engine, 'dns_domains', 'project_id')
self.assertIndexNotExists(engine, 'virtual_interfaces', 'network_id')
def _pre_upgrade_273(self, engine):
if engine.name != 'sqlite':
return
# Drop a variety of unique constraints to ensure that the script
# properly readds them back
for table_name, constraint_name in [
('compute_nodes', 'uniq_compute_nodes0'
'host0hypervisor_hostname'),
('fixed_ips', 'uniq_fixed_ips0address0deleted'),
('instance_info_caches', 'uniq_instance_info_caches0'
'instance_uuid'),
('instance_type_projects', 'uniq_instance_type_projects0'
'instance_type_id0project_id0'
'deleted'),
('pci_devices', 'uniq_pci_devices0compute_node_id0'
'address0deleted'),
('virtual_interfaces', 'uniq_virtual_interfaces0'
'address0deleted')]:
table = oslodbutils.get_table(engine, table_name)
constraints = [c for c in table.constraints
if c.name == constraint_name]
for cons in constraints:
# Need to use sqlalchemy-migrate UniqueConstraint
cons = UniqueConstraint(*[c.name for c in cons.columns],
name=cons.name,
table=table)
cons.drop()
def _check_273(self, engine, data):
for src_table, src_column, dst_table, dst_column in [
('fixed_ips', 'instance_uuid', 'instances', 'uuid'),
('block_device_mapping', 'instance_uuid', 'instances', 'uuid'),
('instance_info_caches', 'instance_uuid', 'instances', 'uuid'),
('instance_metadata', 'instance_uuid', 'instances', 'uuid'),
('instance_system_metadata', 'instance_uuid',
'instances', 'uuid'),
('instance_type_projects', 'instance_type_id',
'instance_types', 'id'),
('iscsi_targets', 'volume_id', 'volumes', 'id'),
('reservations', 'usage_id', 'quota_usages', 'id'),
('security_group_instance_association', 'instance_uuid',
'instances', 'uuid'),
('security_group_instance_association', 'security_group_id',
'security_groups', 'id'),
('virtual_interfaces', 'instance_uuid', 'instances', 'uuid'),
('compute_nodes', 'service_id', 'services', 'id'),
('instance_actions', 'instance_uuid', 'instances', 'uuid'),
('instance_faults', 'instance_uuid', 'instances', 'uuid'),
('migrations', 'instance_uuid', 'instances', 'uuid')]:
src_table = oslodbutils.get_table(engine, src_table)
fkeys = {fk.parent.name: fk.column
for fk in src_table.foreign_keys}
self.assertIn(src_column, fkeys)
self.assertEqual(fkeys[src_column].table.name, dst_table)
self.assertEqual(fkeys[src_column].name, dst_column)
def _check_274(self, engine, data):
self.assertIndexMembers(engine, 'instances',
'instances_project_id_deleted_idx',
['project_id', 'deleted'])
self.assertIndexNotExists(engine, 'instances', 'project_id')
def _pre_upgrade_275(self, engine):
# Create a keypair record so we can test that the upgrade will set
# 'ssh' as default value in the new column for the previous keypair
# entries.
key_pairs = oslodbutils.get_table(engine, 'key_pairs')
fake_keypair = {'name': 'test-migr'}
key_pairs.insert().execute(fake_keypair)
def _check_275(self, engine, data):
self.assertColumnExists(engine, 'key_pairs', 'type')
self.assertColumnExists(engine, 'shadow_key_pairs', 'type')
key_pairs = oslodbutils.get_table(engine, 'key_pairs')
shadow_key_pairs = oslodbutils.get_table(engine, 'shadow_key_pairs')
self.assertIsInstance(key_pairs.c.type.type,
sqlalchemy.types.String)
self.assertIsInstance(shadow_key_pairs.c.type.type,
sqlalchemy.types.String)
# Make sure the keypair entry will have the type 'ssh'
key_pairs = oslodbutils.get_table(engine, 'key_pairs')
keypair = key_pairs.select(
key_pairs.c.name == 'test-migr').execute().first()
self.assertEqual('ssh', keypair.type)
def _check_276(self, engine, data):
self.assertColumnExists(engine, 'instance_extra', 'vcpu_model')
self.assertColumnExists(engine, 'shadow_instance_extra', 'vcpu_model')
instance_extra = oslodbutils.get_table(engine, 'instance_extra')
shadow_instance_extra = oslodbutils.get_table(
engine, 'shadow_instance_extra')
self.assertIsInstance(instance_extra.c.vcpu_model.type,
sqlalchemy.types.Text)
self.assertIsInstance(shadow_instance_extra.c.vcpu_model.type,
sqlalchemy.types.Text)
def _check_277(self, engine, data):
self.assertIndexMembers(engine, 'fixed_ips',
'fixed_ips_deleted_allocated_updated_at_idx',
['deleted', 'allocated', 'updated_at'])
def _check_278(self, engine, data):
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
self.assertEqual(0, len([fk for fk in compute_nodes.foreign_keys
if fk.parent.name == 'service_id']))
self.assertTrue(compute_nodes.c.service_id.nullable)
def _check_279(self, engine, data):
inspector = reflection.Inspector.from_engine(engine)
constraints = inspector.get_unique_constraints('compute_nodes')
constraint_names = [constraint['name'] for constraint in constraints]
self.assertNotIn('uniq_compute_nodes0host0hypervisor_hostname',
constraint_names)
self.assertIn('uniq_compute_nodes0host0hypervisor_hostname0deleted',
constraint_names)
def _check_280(self, engine, data):
key_pairs = oslodbutils.get_table(engine, 'key_pairs')
self.assertFalse(key_pairs.c.name.nullable)
def _check_291(self, engine, data):
# NOTE(danms): This is a dummy migration that just does a consistency
# check
pass
def _check_292(self, engine, data):
self.assertTableNotExists(engine, 'iscsi_targets')
self.assertTableNotExists(engine, 'volumes')
self.assertTableNotExists(engine, 'shadow_iscsi_targets')
self.assertTableNotExists(engine, 'shadow_volumes')
def _pre_upgrade_293(self, engine):
migrations = oslodbutils.get_table(engine, 'migrations')
fake_migration = {}
migrations.insert().execute(fake_migration)
def _check_293(self, engine, data):
self.assertColumnExists(engine, 'migrations', 'migration_type')
self.assertColumnExists(engine, 'shadow_migrations', 'migration_type')
migrations = oslodbutils.get_table(engine, 'migrations')
fake_migration = migrations.select().execute().first()
self.assertIsNone(fake_migration.migration_type)
self.assertFalse(fake_migration.hidden)
def _check_294(self, engine, data):
self.assertColumnExists(engine, 'services', 'last_seen_up')
self.assertColumnExists(engine, 'shadow_services', 'last_seen_up')
services = oslodbutils.get_table(engine, 'services')
shadow_services = oslodbutils.get_table(
engine, 'shadow_services')
self.assertIsInstance(services.c.last_seen_up.type,
sqlalchemy.types.DateTime)
self.assertIsInstance(shadow_services.c.last_seen_up.type,
sqlalchemy.types.DateTime)
def _pre_upgrade_295(self, engine):
self.assertIndexNotExists(engine, 'virtual_interfaces',
'virtual_interfaces_uuid_idx')
def _check_295(self, engine, data):
self.assertIndexMembers(engine, 'virtual_interfaces',
'virtual_interfaces_uuid_idx', ['uuid'])
def _check_296(self, engine, data):
pass
def _check_297(self, engine, data):
self.assertColumnExists(engine, 'services', 'forced_down')
def _check_298(self, engine, data):
# NOTE(nic): This is a MySQL-specific migration, and is a no-op from
# the point-of-view of unit tests, since they use SQLite
pass
def filter_metadata_diff(self, diff):
# Overriding the parent method to decide on certain attributes
# that maybe present in the DB but not in the models.py
def removed_column(element):
# Define a whitelist of columns that would be removed from the
# DB at a later release.
column_whitelist = {'instances': ['internal_id']}
if element[0] != 'remove_column':
return False
table_name, column = element[2], element[3]
return (table_name in column_whitelist and
column.name in column_whitelist[table_name])
return [
element
for element in diff
if not removed_column(element)
]
def _check_299(self, engine, data):
self.assertColumnExists(engine, 'services', 'version')
def _check_300(self, engine, data):
self.assertColumnExists(engine, 'instance_extra', 'migration_context')
def _check_301(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes',
'cpu_allocation_ratio')
self.assertColumnExists(engine, 'compute_nodes',
'ram_allocation_ratio')
def _check_302(self, engine, data):
self.assertIndexMembers(engine, 'instance_system_metadata',
'instance_uuid', ['instance_uuid'])
def _check_313(self, engine, data):
self.assertColumnExists(engine, 'pci_devices', 'parent_addr')
self.assertColumnExists(engine, 'shadow_pci_devices', 'parent_addr')
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
shadow_pci_devices = oslodbutils.get_table(
engine, 'shadow_pci_devices')
self.assertIsInstance(pci_devices.c.parent_addr.type,
sqlalchemy.types.String)
self.assertTrue(pci_devices.c.parent_addr.nullable)
self.assertIsInstance(shadow_pci_devices.c.parent_addr.type,
sqlalchemy.types.String)
self.assertTrue(shadow_pci_devices.c.parent_addr.nullable)
self.assertIndexMembers(engine, 'pci_devices',
'ix_pci_devices_compute_node_id_parent_addr_deleted',
['compute_node_id', 'parent_addr', 'deleted'])
def _check_314(self, engine, data):
self.assertColumnExists(engine, 'inventories', 'resource_class_id')
self.assertColumnExists(engine, 'allocations', 'resource_class_id')
self.assertColumnExists(engine, 'resource_providers', 'id')
self.assertColumnExists(engine, 'resource_providers', 'uuid')
self.assertColumnExists(engine, 'compute_nodes', 'uuid')
self.assertColumnExists(engine, 'shadow_compute_nodes', 'uuid')
self.assertIndexMembers(engine, 'allocations',
'allocations_resource_provider_class_id_idx',
['resource_provider_id', 'resource_class_id'])
def _check_315(self, engine, data):
self.assertColumnExists(engine, 'migrations',
'memory_total')
self.assertColumnExists(engine, 'migrations',
'memory_processed')
self.assertColumnExists(engine, 'migrations',
'memory_remaining')
self.assertColumnExists(engine, 'migrations',
'disk_total')
self.assertColumnExists(engine, 'migrations',
'disk_processed')
self.assertColumnExists(engine, 'migrations',
'disk_remaining')
def _check_316(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes',
'disk_allocation_ratio')
def _check_317(self, engine, data):
self.assertColumnExists(engine, 'aggregates', 'uuid')
self.assertColumnExists(engine, 'shadow_aggregates', 'uuid')
def _check_318(self, engine, data):
self.assertColumnExists(engine, 'resource_providers', 'name')
self.assertColumnExists(engine, 'resource_providers', 'generation')
self.assertColumnExists(engine, 'resource_providers', 'can_host')
self.assertIndexMembers(engine, 'resource_providers',
'resource_providers_name_idx',
['name'])
self.assertColumnExists(engine, 'resource_provider_aggregates',
'resource_provider_id')
self.assertColumnExists(engine, 'resource_provider_aggregates',
'aggregate_id')
self.assertIndexMembers(engine, 'resource_provider_aggregates',
'resource_provider_aggregates_aggregate_id_idx',
['aggregate_id'])
self.assertIndexMembers(engine, 'resource_provider_aggregates',
'resource_provider_aggregates_aggregate_id_idx',
['aggregate_id'])
self.assertIndexMembers(engine, 'inventories',
'inventories_resource_provider_resource_class_idx',
['resource_provider_id', 'resource_class_id'])
def _check_319(self, engine, data):
self.assertIndexMembers(engine, 'instances',
'instances_deleted_created_at_idx',
['deleted', 'created_at'])
def _check_330(self, engine, data):
# Just a sanity-check migration
pass
def _check_331(self, engine, data):
self.assertColumnExists(engine, 'virtual_interfaces', 'tag')
self.assertColumnExists(engine, 'block_device_mapping', 'tag')
def _check_332(self, engine, data):
self.assertColumnExists(engine, 'instance_extra', 'keypairs')
def _check_333(self, engine, data):
self.assertColumnExists(engine, 'console_auth_tokens', 'id')
self.assertColumnExists(engine, 'console_auth_tokens', 'token_hash')
self.assertColumnExists(engine, 'console_auth_tokens', 'console_type')
self.assertColumnExists(engine, 'console_auth_tokens', 'host')
self.assertColumnExists(engine, 'console_auth_tokens', 'port')
self.assertColumnExists(engine, 'console_auth_tokens',
'internal_access_path')
self.assertColumnExists(engine, 'console_auth_tokens',
'instance_uuid')
self.assertColumnExists(engine, 'console_auth_tokens', 'expires')
self.assertIndexMembers(engine, 'console_auth_tokens',
'console_auth_tokens_instance_uuid_idx',
['instance_uuid'])
self.assertIndexMembers(engine, 'console_auth_tokens',
'console_auth_tokens_host_expires_idx',
['host', 'expires'])
self.assertIndexMembers(engine, 'console_auth_tokens',
'console_auth_tokens_token_hash_idx',
['token_hash'])
def _check_334(self, engine, data):
self.assertColumnExists(engine, 'instance_extra', 'device_metadata')
self.assertColumnExists(engine, 'shadow_instance_extra',
'device_metadata')
def _check_345(self, engine, data):
# NOTE(danms): Just a sanity-check migration
pass
def _check_346(self, engine, data):
self.assertColumnNotExists(engine, 'instances', 'scheduled_at')
self.assertColumnNotExists(engine, 'shadow_instances', 'scheduled_at')
def _check_347(self, engine, data):
self.assertIndexMembers(engine, 'instances',
'instances_project_id_idx',
['project_id'])
self.assertIndexMembers(engine, 'instances',
'instances_updated_at_project_id_idx',
['updated_at', 'project_id'])
def _check_358(self, engine, data):
self.assertColumnExists(engine, 'block_device_mapping',
'attachment_id')
def _check_359(self, engine, data):
self.assertColumnExists(engine, 'services', 'uuid')
self.assertIndexMembers(engine, 'services', 'services_uuid_idx',
['uuid'])
def _check_360(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes', 'mapped')
self.assertColumnExists(engine, 'shadow_compute_nodes', 'mapped')
def _check_361(self, engine, data):
self.assertIndexMembers(engine, 'compute_nodes',
'compute_nodes_uuid_idx', ['uuid'])
def _check_362(self, engine, data):
self.assertColumnExists(engine, 'pci_devices', 'uuid')
def _check_373(self, engine, data):
self.assertColumnExists(engine, 'migrations', 'uuid')
def _check_374(self, engine, data):
self.assertColumnExists(engine, 'block_device_mapping', 'uuid')
self.assertColumnExists(engine, 'shadow_block_device_mapping', 'uuid')
inspector = reflection.Inspector.from_engine(engine)
constraints = inspector.get_unique_constraints('block_device_mapping')
constraint_names = [constraint['name'] for constraint in constraints]
self.assertIn('uniq_block_device_mapping0uuid', constraint_names)
def _check_375(self, engine, data):
self.assertColumnExists(engine, 'console_auth_tokens',
'access_url_base')
def _check_376(self, engine, data):
self.assertIndexMembers(
engine, 'console_auth_tokens',
'console_auth_tokens_token_hash_instance_uuid_idx',
['token_hash', 'instance_uuid'])
def _check_377(self, engine, data):
self.assertIndexMembers(engine, 'migrations',
'migrations_updated_at_idx', ['updated_at'])
def _check_378(self, engine, data):
self.assertIndexMembers(
engine, 'instance_actions',
'instance_actions_instance_uuid_updated_at_idx',
['instance_uuid', 'updated_at'])
def _check_389(self, engine, data):
self.assertIndexMembers(engine, 'aggregate_metadata',
'aggregate_metadata_value_idx',
['value'])
def _check_390(self, engine, data):
self.assertColumnExists(engine, 'instance_extra', 'trusted_certs')
self.assertColumnExists(engine, 'shadow_instance_extra',
'trusted_certs')
class TestNovaMigrationsSQLite(NovaMigrationsCheckers,
test_base.DbTestCase,
test.NoDBTestCase):
pass
class TestNovaMigrationsMySQL(NovaMigrationsCheckers,
test_base.MySQLOpportunisticTestCase,
test.NoDBTestCase):
def test_innodb_tables(self):
with mock.patch.object(sa_migration, 'get_engine',
return_value=self.migrate_engine):
sa_migration.db_sync()
total = self.migrate_engine.execute(
"SELECT count(*) "
"FROM information_schema.TABLES "
"WHERE TABLE_SCHEMA = '%(database)s'" %
{'database': self.migrate_engine.url.database})
self.assertGreater(total.scalar(), 0, "No tables found. Wrong schema?")
noninnodb = self.migrate_engine.execute(
"SELECT count(*) "
"FROM information_schema.TABLES "
"WHERE TABLE_SCHEMA='%(database)s' "
"AND ENGINE != 'InnoDB' "
"AND TABLE_NAME != 'migrate_version'" %
{'database': self.migrate_engine.url.database})
count = noninnodb.scalar()
self.assertEqual(count, 0, "%d non InnoDB tables created" % count)
class TestNovaMigrationsPostgreSQL(NovaMigrationsCheckers,
test_base.PostgreSQLOpportunisticTestCase,
test.NoDBTestCase):
pass
class ProjectTestCase(test.NoDBTestCase):
def test_no_migrations_have_downgrade(self):
topdir = os.path.normpath(os.path.dirname(__file__) + '/../../../')
# Walk both the nova_api and nova (cell) database migrations.
includes_downgrade = []
for subdir in ('api_migrations', ''):
py_glob = os.path.join(topdir, "db", "sqlalchemy", subdir,
"migrate_repo", "versions", "*.py")
for path in glob.iglob(py_glob):
has_upgrade = False
has_downgrade = False
with open(path, "r") as f:
for line in f:
if 'def upgrade(' in line:
has_upgrade = True
if 'def downgrade(' in line:
has_downgrade = True
if has_upgrade and has_downgrade:
fname = os.path.basename(path)
includes_downgrade.append(fname)
helpful_msg = ("The following migrations have a downgrade "
"which is not supported:"
"\n\t%s" % '\n\t'.join(sorted(includes_downgrade)))
self.assertFalse(includes_downgrade, helpful_msg)
|
|
# -*- coding: utf-8 -*-
"""
Module: build_particle_array.py
Created on Sat Oct 19 15:34:52 2013
@author: gav
Description:
"""
### Imports
from __future__ import print_function
import os, sys
import time
import os.path as osp
import numpy as np
import numpy.ma as ma
import pickle
from numpy import array, newaxis
import scipy.sparse as sp
import matplotlib.pyplot as plt
from pyproj import Proj
from shapely.geometry import Polygon
#
#import bonaparte
#import bonaparte.stoch.stoch_lib as sl
#from bonaparte.utils.grid_cell_areas import grid_cell_areas
### Logging
import logging
logging.basicConfig(level=logging.DEBUG)
debug, info, warn, error = logging.debug, logging.info, logging.warn, logging.error
### Constants
SOUTHERN_HEMISPHERE = True
SURFACE_TYPE = 0
AROMATIC_TYPE = -1
ENTRAINED_TYPE = -2
SURFACTANT_TYPE = -3
TR3_RECORD_SIZE = 40
EMPTY_HEADER_LINES = 7
### Classes
### Functions
### Grid cell area module
def grid_cell_areas(_grid_header=None, grid_fp=None):
"""
Return an array of the areas of a column of grid cells
"""
assert bool(_grid_header) != osp.isfile(grid_fp or "")
_gh = _grid_header or grid_header(grid_fp)
points_array = grid_column_verts(_gh)
utm_points_array = reproject(points_array)
area_array = polygon_areas(utm_points_array)
return area_array
def grid_column_verts(_grid_header=None, grid_fp=None):
"""
Given a grid, return an array of the corners of a column of the grid
Which column is unimportant as all will have the same set of areas.
The array will have dimensions
n_rows, that is the length of the column
n_points, that is 5, the number to specify a rectangle
n_geo_dims, that is 2 lat,lon
eg
[[(ax1, ay1), (ax2, ay2), (ax3, ay3), (ax4, ay4), (ax5, ay5)],
[(bx1, by1),.. (bx5, by5)],]
Fastest way to fill numpy array
http://stackoverflow.com/questions/5891410/numpy-array-initialization-fill-with-identical-values?lq=1
"""
# Ensure grid_header xor grid_fp exist
assert bool(_grid_header) != osp.isfile(grid_fp or "")
_gh = _grid_header or grid_header(grid_fp)
n_rows = _gh['n_rows']
dy = float(_gh['lon_delta'])
lon_0 = float(_gh['lon_lower_left'])
lat_0 = float(_gh['lat_lower_left'])
# Make me a function to generate vertices
vertices = verts_factory(_gh)
# Need a sequence of lower left points
verts_array = np.empty(shape=(n_rows, 5, 2))
ll_corners = np.empty(shape=(n_rows, 2)) # lower left corners (lon, lat)
ll_corners[:,0] = lon_0
ll_corners[:,1] = np.linspace(lat_0, lat_0 + n_rows * dy, n_rows)
verts_array[:] = np.array(map(vertices, ll_corners))
return verts_array
def polygon_areas(arr):
"""
Given an column of points, return a column of polygon areas
"""
ps = map(Polygon, arr)
areas = [p.area for p in ps]
return np.array(areas)
def reproject(arr, zone=None):
"""Given an aray of points, return the utm coordinates"""
new_arr = np.empty_like(arr)
_zone = zone or utm_zone(None)
proj = Proj(proj="utm", zone=_zone, ellps="WGS84")
for i, grid_cell in enumerate(arr):
for j, point in enumerate(grid_cell):
new_arr[i, j, :] = np.array(proj(*point))
return new_arr
def utm_zone(point):
"""
*** Warning stub only - fixed output
Given a geographical point, return the appropriate utm zone
Args:
point - array of shape (1, 2) ie (lon, lat)
Returns:
zone - string of the form "50L"
"""
warn("***Warning stub function - fixed return value***")
return "50L"
def verts_factory(grid_header):
"""
Return a function that will calculate the five verts given the lower left corner
"""
dx = np.array([float(grid_header['lon_delta']), 0])
dy = np.array([0, float(grid_header['lat_delta'])])
def verts(point):
_verts = np.empty((5,2))
_verts[0] = point
_verts[1] = point + dx
_verts[2] = point + dx + dy
_verts[3] = point + dy
_verts[4] = point
return _verts
return verts
###
### Stoch_lib module ###
def gridder_arr_factory(grid_fp=None,
grid_ext=None,
grid_spc=None):
"""Return a function to convert lon, lat to row, col
Args of factory:
grid_fp - String of the full path to a APASA .dep grid file
grid_extent - a dictionary of the upper right and lower left corners
see stoch_lib.grid_extent
grid_spacing_arr - array of lon_delta and lat_delta
see stoch.lib.grid_spacing
"""
err_msg = "Incomplete args. see \n {}".format(gridder_arr_factory.__doc__)
assert grid_fp or (grid_extent and grid_spacing), err_msg
if grid_fp is not None:
assert osp.isfile(grid_fp), "{} is not a file".format(grid_fp)
if grid_ext is None:
grid_ext = grid_extent(grid_fp)
if grid_spc is None:
grid_spc = grid_spacing_arr(grid_fp)
origin = np.array(grid_ext['upper_left'])
print("origin is {}".format(origin))
delta = np.array(grid_spc).view('<f4')
print("delta is {}".format(delta))
def _inner(particle_pos_arr):
# A little inconvenient to have to cast back to ndarray for ops
new_shape = (len(particle_pos_arr), 2)
particle_pos_nd = particle_pos_arr.view('<f4').reshape(new_shape)
res_arr = np.floor_divide((particle_pos_nd - origin), delta)
# Cast to int ready for geocoding
return res_arr.astype(np.uint16)
return _inner
def grid_extent(fp):
"""Return the upper left and lower right corners as lon, lat pairs
Maybe return a dictionary instead?
"""
h = grid_header(fp)
debug("grid header is")
debug(h)
# The upper left corner is closest to the 0, 0 the geographical origin
upper_left_lon = h['lon_lower_left']
upper_left_lat = h['lat_lower_left'] + h['n_rows'] * h['lat_delta']
lower_right_lon = h['lon_lower_left'] + h['n_cols'] * h['lon_delta']
lower_right_lat = h['lat_lower_left']
return {'upper_left' : [float(x) for x in [upper_left_lon, upper_left_lat ]],
'lower_right': [float(x) for x in [lower_right_lon, lower_right_lat]]}
def grid_header(fp):
"""Grab the header from the grid file, return as numpy record array"""
dt_names = ['h1', 'h2', 'lon_lower_left', 'lat_lower_left',
'lon_delta', 'lat_delta', 'n_cols', 'n_rows']
dt_formats = ['<i2'] * 2 + ['<f4'] * 4 + ['<i2'] * 2
dt = np.dtype(zip(dt_names, dt_formats))
with open(fp, 'rb') as fh:
header = np.fromfile(fh, dtype=dt, count=1)
return header
def grid_mass_dense(particles, gridder, grid_shape):
"""Grid the mass of the particles and return a dense array
Args:
particles - array from the chunker
gridder - function to return the grid indices
grid_shape - tuple of (n_rows, n_cols)
"""
idxs = gridder(particles[['lon', 'lat']])
_data = particles['mass']
_col = idxs[:,0] # lon
_row = idxs[:,1] # lat
mass_coo = sp.coo_matrix((_data, (_row, _col)), shape=grid_shape)
return mass_coo.todense()
def grid_mass_csr(particles, gridder, grid_shape):
"""Grid the mass of the particles and return a dense array
Args:
particles - array from the chunker
gridder - function to return the grid indices
grid_shape - tuple of (n_rows, n_cols)
"""
idxs = gridder(particles[['lon', 'lat']])
_data = particles['mass']
_col = idxs[:,0] # lon
_row = idxs[:,1] # lat
mass_coo = sp.coo_matrix((_data, (_row, _col)), shape=grid_shape)
return mass_coo.tocsr()
def grid_spacing_arr(fp):
"""Return the grid spacing as dictionary """
h = grid_header(fp)
res_arr = h[['lon_delta', 'lat_delta']].copy()
# Change sign of lat_delta for decreasing (-ve) lat from origin
res_arr.dtype.names = ['lon', 'lat']
# TODO - Make it work for the northern hemisphere
if SOUTHERN_HEMISPHERE:
res_arr['lat'] = -np.abs(res_arr['lat'])
# res_arr['lat'] = res_arr['lat'] * -1
return res_arr
def lu3_data(lu3_fp):
"""Return binary data from file"""
dtype_ls = [('iver', '<i4'), ('SIMTIME', '<i4'),
('time', '<f4'), ('rec1st', '<i4'), ('rec2st', '<i4'),
('rec2end', '<i4'), ('rec3st', '<i4'), ('rec3end', '<i4'),
('rec5st', '<i4'), ('rec5end', '<i4'), ('sed2st', '<i4'),
('sed2end', '<i4'), ('rar1st', '<i4'), ('rar1end', '<i4'),
('rth1st', '<i4'), ('rth1end', '<i4'), ('rsf1st', '<i4'),
('rsf1end', '<i4'), ('rsp1st', '<i4'), ('rsp1end', '<i4'),
('rss1st', '<i4'), ('rss1end', '<i4'), ('rat1st', '<i4'),
('rat1end', '<i4')]
return np.fromfile(lu3_fp, np.dtype(dtype_ls))
def particles_and_shore_generator(tr3_fp, lu3_fp, grid_fp):
"""Return a generator of particles and shore cells
Note mismatch in dtype lengths between particles and shore
Usage:
> gen = particles_and_shore_generator(tr3_fp, lu3_fp, grid_fp)
> for time, surf, entr, arom , shore in gen:
> ...
Yields (time, surf_p, entr_p, arom_p, shore_c)
"""
def log(log_msg): debug("particle_and_shore_generator: {}".format(log_msg))
if __debug__:
log("TR3 file is {}".format(tr3_fp))
log("LU3 file is {}".format(lu3_fp))
log("Grid file is {}".format(grid_fp))
grid_record = grid_extent(grid_fp)
lower_lon, upper_lat = grid_record['upper_left']
upper_lon, lower_lat = grid_record['lower_right']
lu3_arr = lu3_data(lu3_fp)
particle_names_ls = ['lon', 'lat', 'radius', 'prev_lon', 'prev_lat',
'type', 'mass', 'density', 'viscosity', 'age']
particle_formats_ls = ['<f4'] * 5 + ['<i4'] + ['<f4'] * 4
particle_dtype = np.dtype(zip(particle_names_ls, particle_formats_ls))
shore_names_ls = ['igrid', 'jgrid', '_1', '_2', 'habitat_type', 'area',
'shore_length', 'lon', 'lat', 'mass' ]
shore_formats_ls = ['<i4'] * 5 + ['<f4'] * 5
shore_dtype = np.dtype(zip(shore_names_ls, shore_formats_ls))
def inner():
with open(tr3_fp, 'rb') as fh:
for row in lu3_arr:
empty = np.fromfile(fh, dtype=particle_dtype, count=EMPTY_HEADER_LINES)
particles = np.fromfile(fh, dtype=particle_dtype,
count=row['rec2end']-row['rec2st'])
shore_cells = np.fromfile(fh, dtype=shore_dtype,
count=row['rec3end']-row['rec3st'] or 1)
np_and = np.logical_and
surf_mask = np.array(particles['type'] == SURFACE_TYPE)
entr_mask = np.array(particles['type'] == ENTRAINED_TYPE)
arom_mask = np.array(particles['type'] == AROMATIC_TYPE)
lon_mask = np_and(np.array(particles['lon'] > lower_lon),
np.array(particles['lon'] < upper_lon))
lat_mask = np_and(np.array(particles['lat'] > lower_lat),
np.array(particles['lat'] < upper_lat))
bounds_mask = np_and(lon_mask, lat_mask)
surf_p = particles[np_and(bounds_mask, surf_mask)]
entr_p = particles[np_and(bounds_mask, entr_mask)]
arom_p = particles[np_and(bounds_mask, arom_mask)]
yield (row['time'], surf_p, entr_p, arom_p, shore_cells)
return inner()
def main():
"""
Main func
"""
project_dir = r"J:\data\j0267_nv_remodel"
stem = "J0267_SC3_SBED_LEAK_TRA_001"
grid_fn = "VanGogh_800m.DEP"
h5_fn = "j0267_data.h5"
tr3_fp = osp.join(project_dir, "modelout", stem + ".tr3" )
lu3_fp = osp.join(project_dir, "modelout", stem + ".lu3" )
grid_fp = osp.join(project_dir, "grids", grid_fn)
surf_threshold = 1e-6 # T/m2 or 1 g/m2
header = grid_header(grid_fp)
grid_shape = (header['n_rows'], header['n_cols'])
particles = particles_and_shore_generator(tr3_fp, lu3_fp, grid_fp)
gridder = gridder_arr_factory(grid_fp)
start_time = time.time()
max_surf_mass = np.zeros(grid_shape, dtype=np.float32)
for i, tup in enumerate(particles):
sim_time, surf, entr, arom, shore = tup
surf_dense = grid_mass_dense(surf, gridder, grid_shape)
max_surf_mass = np.maximum(max_surf_mass, surf_dense)
# Now we need a threshold_matrix to find which cells have exceeded the threshold
cell_areas = grid_cell_areas(grid_fp=grid_fp)
# the mass threshold is the threshold * area eg 0.001 kg/m2 * 640000 m2 = mass in Ts
mass_threshold_T = cell_areas * surf_threshold
exceedance = max_surf_mass >= mass_threshold_T[:, np.newaxis]
max_mass = ma.array(max_surf_mass, mask=(max_surf_mass == 0.0))
elapsed_time = time.time() - start_time
print("Finished {} timesteps in {} seconds".format(i, elapsed_time))
# plt.imshow(exceedance, origin="upper", interpolation="nearest")
# plt.show()
### Tests
if __name__ == "__main__":
main()
print("Done __main__")
|
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the urlfetch API, based on httplib."""
_successfully_imported_fancy_urllib = False
_fancy_urllib_InvalidCertException = None
_fancy_urllib_SSLError = None
try:
import fancy_urllib
_successfully_imported_fancy_urllib = True
_fancy_urllib_InvalidCertException = fancy_urllib.InvalidCertificateException
_fancy_urllib_SSLError = fancy_urllib.SSLError
except ImportError:
pass
import gzip
import httplib
import logging
import os
import socket
import StringIO
import sys
import urllib
import urlparse
from google.appengine.api import apiproxy_stub
from google.appengine.api import urlfetch
from google.appengine.api import urlfetch_errors
from google.appengine.api import urlfetch_service_pb
from google.appengine.runtime import apiproxy_errors
MAX_REQUEST_SIZE = 5 << 20
MAX_RESPONSE_SIZE = 2 ** 25
MAX_REDIRECTS = urlfetch.MAX_REDIRECTS
REDIRECT_STATUSES = frozenset([
httplib.MOVED_PERMANENTLY,
httplib.FOUND,
httplib.SEE_OTHER,
httplib.TEMPORARY_REDIRECT,
])
_API_CALL_DEADLINE = 5.0
_API_CALL_VALIDATE_CERTIFICATE_DEFAULT = True
_CONNECTION_SUPPORTS_TIMEOUT = sys.version_info >= (2, 6)
_UNTRUSTED_REQUEST_HEADERS = frozenset([
'content-length',
'host',
'vary',
'via',
'x-forwarded-for',
])
_MAX_URL_LENGTH = 2048
def _CanValidateCerts():
return (_successfully_imported_fancy_urllib and
fancy_urllib.can_validate_certs())
def _SetupSSL(path):
global CERT_PATH
if os.path.exists(path):
CERT_PATH = path
else:
CERT_PATH = None
logging.warning('%s missing; without this urlfetch will not be able to '
'validate SSL certificates.', path)
if not _CanValidateCerts():
logging.warning('No ssl package found. urlfetch will not be able to '
'validate SSL certificates.')
_SetupSSL(os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..',
'..', 'lib', 'cacerts',
'urlfetch_cacerts.txt')))
def _IsAllowedPort(port):
if port is None:
return True
try:
port = int(port)
except ValueError, e:
return False
if ((port >= 80 and port <= 90) or
(port >= 440 and port <= 450) or
port >= 1024):
return True
return False
class URLFetchServiceStub(apiproxy_stub.APIProxyStub):
"""Stub version of the urlfetch API to be used with apiproxy_stub_map."""
def __init__(self, service_name='urlfetch'):
"""Initializer.
Args:
service_name: Service name expected for all calls.
"""
super(URLFetchServiceStub, self).__init__(service_name,
max_request_size=MAX_REQUEST_SIZE)
def _Dynamic_Fetch(self, request, response):
"""Trivial implementation of URLFetchService::Fetch().
Args:
request: the fetch to perform, a URLFetchRequest
response: the fetch response, a URLFetchResponse
"""
if len(request.url()) >= _MAX_URL_LENGTH:
logging.error('URL is too long: %s...' % request.url()[:50])
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
(protocol, host, path, query, fragment) = urlparse.urlsplit(request.url())
payload = None
if request.method() == urlfetch_service_pb.URLFetchRequest.GET:
method = 'GET'
elif request.method() == urlfetch_service_pb.URLFetchRequest.POST:
method = 'POST'
payload = request.payload()
elif request.method() == urlfetch_service_pb.URLFetchRequest.HEAD:
method = 'HEAD'
elif request.method() == urlfetch_service_pb.URLFetchRequest.PUT:
method = 'PUT'
payload = request.payload()
elif request.method() == urlfetch_service_pb.URLFetchRequest.DELETE:
method = 'DELETE'
else:
logging.error('Invalid method: %s', request.method())
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
if not (protocol == 'http' or protocol == 'https'):
logging.error('Invalid protocol: %s', protocol)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
if not host:
logging.error('Missing host.')
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
self._SanitizeHttpHeaders(_UNTRUSTED_REQUEST_HEADERS,
request.header_list())
deadline = _API_CALL_DEADLINE
if request.has_deadline():
deadline = request.deadline()
validate_certificate = _API_CALL_VALIDATE_CERTIFICATE_DEFAULT
if request.has_mustvalidateservercertificate():
validate_certificate = request.mustvalidateservercertificate()
self._RetrieveURL(request.url(), payload, method,
request.header_list(), request, response,
follow_redirects=request.followredirects(),
deadline=deadline,
validate_certificate=validate_certificate)
def _RetrieveURL(self, url, payload, method, headers, request, response,
follow_redirects=True, deadline=_API_CALL_DEADLINE,
validate_certificate=_API_CALL_VALIDATE_CERTIFICATE_DEFAULT):
"""Retrieves a URL.
Args:
url: String containing the URL to access.
payload: Request payload to send, if any; None if no payload.
If the payload is unicode, we assume it is utf-8.
method: HTTP method to use (e.g., 'GET')
headers: List of additional header objects to use for the request.
request: Request object from original request.
response: Response object to populate with the response data.
follow_redirects: optional setting (defaulting to True) for whether or not
we should transparently follow redirects (up to MAX_REDIRECTS)
deadline: Number of seconds to wait for the urlfetch to finish.
validate_certificate: If true, do not send request to server unless the
certificate is valid, signed by a trusted CA and the hostname matches
the certificate.
Raises:
Raises an apiproxy_errors.ApplicationError exception with
INVALID_URL_ERROR in cases where:
- The protocol of the redirected URL is bad or missing.
- The port is not in the allowable range of ports.
Raises an apiproxy_errors.ApplicationError exception with
TOO_MANY_REDIRECTS in cases when MAX_REDIRECTS is exceeded
"""
last_protocol = ''
last_host = ''
if isinstance(payload, unicode):
payload = payload.encode('utf-8')
for redirect_number in xrange(MAX_REDIRECTS + 1):
parsed = urlparse.urlsplit(url)
protocol, host, path, query, fragment = parsed
port = urllib.splitport(urllib.splituser(host)[1])[1]
if not _IsAllowedPort(port):
logging.error(
'urlfetch received %s ; port %s is not allowed in production!' %
(url, port))
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
if protocol and not host:
logging.error('Missing host on redirect; target url is %s' % url)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
if not host and not protocol:
host = last_host
protocol = last_protocol
adjusted_headers = {
'User-Agent':
'AppEngine-Google; (+http://code.google.com/appengine)',
'Host': host,
'Accept-Encoding': 'gzip',
}
if payload is not None:
adjusted_headers['Content-Length'] = str(len(payload))
if method == 'POST' and payload:
adjusted_headers['Content-Type'] = 'application/x-www-form-urlencoded'
passthrough_content_encoding = False
for header in headers:
if header.key().title().lower() == 'user-agent':
adjusted_headers['User-Agent'] = (
'%s %s' %
(header.value(), adjusted_headers['User-Agent']))
else:
if header.key().lower() == 'accept-encoding':
passthrough_content_encoding = True
adjusted_headers[header.key().title()] = header.value()
if payload is not None:
escaped_payload = payload.encode('string_escape')
else:
escaped_payload = ''
logging.debug('Making HTTP request: host = %r, '
'url = %r, payload = %.1000r, headers = %r',
host, url, escaped_payload, adjusted_headers)
try:
if protocol == 'http':
connection_class = httplib.HTTPConnection
elif protocol == 'https':
if (validate_certificate and _CanValidateCerts() and
CERT_PATH):
connection_class = fancy_urllib.create_fancy_connection(
ca_certs=CERT_PATH)
else:
connection_class = httplib.HTTPSConnection
else:
error_msg = 'Redirect specified invalid protocol: "%s"' % protocol
logging.error(error_msg)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.INVALID_URL, error_msg)
if _CONNECTION_SUPPORTS_TIMEOUT:
connection = connection_class(host, timeout=deadline)
else:
connection = connection_class(host)
last_protocol = protocol
last_host = host
if query != '':
full_path = path + '?' + query
else:
full_path = path
if not _CONNECTION_SUPPORTS_TIMEOUT:
orig_timeout = socket.getdefaulttimeout()
try:
if not _CONNECTION_SUPPORTS_TIMEOUT:
socket.setdefaulttimeout(deadline)
connection.request(method, full_path, payload, adjusted_headers)
http_response = connection.getresponse()
if method == 'HEAD':
http_response_data = ''
else:
http_response_data = http_response.read()
finally:
if not _CONNECTION_SUPPORTS_TIMEOUT:
socket.setdefaulttimeout(orig_timeout)
connection.close()
except (_fancy_urllib_InvalidCertException,
_fancy_urllib_SSLError), e:
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.SSL_CERTIFICATE_ERROR,
str(e))
except socket.timeout, e:
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.DEADLINE_EXCEEDED, str(e))
except (httplib.error, socket.error, IOError), e:
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, str(e))
if http_response.status in REDIRECT_STATUSES and follow_redirects:
url = http_response.getheader('Location', None)
if url is None:
error_msg = 'Redirecting response was missing "Location" header'
logging.error(error_msg)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.MALFORMED_REPLY,
error_msg)
else:
response.set_statuscode(http_response.status)
if (http_response.getheader('content-encoding') == 'gzip' and
not passthrough_content_encoding):
gzip_stream = StringIO.StringIO(http_response_data)
gzip_file = gzip.GzipFile(fileobj=gzip_stream)
http_response_data = gzip_file.read()
response.set_content(http_response_data[:MAX_RESPONSE_SIZE])
for header_key in http_response.msg.keys():
for header_value in http_response.msg.getheaders(header_key):
if (header_key.lower() == 'content-encoding' and
header_value == 'gzip' and
not passthrough_content_encoding):
continue
if header_key.lower() == 'content-length' and method != 'HEAD':
header_value = str(len(response.content()))
header_proto = response.add_header()
header_proto.set_key(header_key)
header_proto.set_value(header_value)
if len(http_response_data) > MAX_RESPONSE_SIZE:
response.set_contentwastruncated(True)
if request.url() != url:
response.set_finalurl(url)
break
else:
error_msg = 'Too many repeated redirects'
logging.error(error_msg)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.TOO_MANY_REDIRECTS,
error_msg)
def _SanitizeHttpHeaders(self, untrusted_headers, headers):
"""Cleans "unsafe" headers from the HTTP request, in place.
Args:
untrusted_headers: Set of untrusted headers names (all lowercase).
headers: List of Header objects. The list is modified in place.
"""
prohibited_headers = [h.key() for h in headers
if h.key().lower() in untrusted_headers]
if prohibited_headers:
logging.warn('Stripped prohibited headers from URLFetch request: %s',
prohibited_headers)
for index in reversed(xrange(len(headers))):
if headers[index].key().lower() in untrusted_headers:
del headers[index]
|
|
from Base_Applet import Base_Applet
import Tkinter as Tk
from widgets import *
# ICP Widgets
from . import *
class v_BDPC(Base_Applet):
"""
Base BDPC View. Collects and displays the following information:
Parameters:
* Closed-Loop MMC Parameters (P/V/I)
* Phase Shift from SYNC
Operation:
* Operation Mode (Switching On/Off)
* Data Refresh interval
Primary:
* Sensors (Current, Voltage)
* Power
Secondary:
* Sensors (Current, Voltage)
* Power
Diagnostics:
* Efficiency
* Conversion Ratio
"""
info = {
# View revision author
'author': 'KKENNEDY',
# View version
'version': '1.0',
# Revision date of View version
'date': '2015-02-11',
# List of compatible models
'validDrivers': ['drivers.UPEL.BDPC.m_BDPC_BR2',
'drivers.UPEL.BDPC.m_BDPC_BR32',
'drivers.UPEL.BDPC.m_BDPC_SRC6']
}
def run(self):
# TODO: Make this work
# self.instr = self.getModelObject()
self.wm_title("BDPC Controller")
self.instr = self.getInstrument()
self.frame_left = Tk.Frame(self)
#=======================================================================
# Control
#=======================================================================
self.frame_control = Tk.LabelFrame(self.frame_left,
text="Control",
padx=5, pady=5)
self.ops_control = vw_state.vw_BinaryFields(self.frame_control,
cb_get=self.methodWrapper(self.instr, 'getOption'),
cb_set=self.methodWrapper(self.instr, 'setOption'),
fields=self.methodWrapper(self.instr, 'getOptionFields')(),
names=self.methodWrapper(self.instr, 'getOptionDescriptions')() )
self.ops_control.pack()
self.frame_control.pack()
#=======================================================================
# Status
#=======================================================================
self.frame_status = Tk.LabelFrame(self.frame_left, text="Status", padx=5, pady=5)
self.ops_status = vw_state.vw_BinaryFields(self.frame_status,
cb_get=self.methodWrapper(self.instr, 'getStatus'),
cb_set=None,
fields=self.methodWrapper(self.instr, 'getStatusFields')(),
names=self.methodWrapper(self.instr, 'getStatusDescriptions')(),
update_interval=1000 )
self.ops_status.pack()
self.frame_status.pack()
#=======================================================================
# Parameters
#=======================================================================
self.frame_param = Tk.LabelFrame(self.frame_left, text="Parameters", padx=5, pady=5)
self.param_v = vw_entry.vw_Text(self.frame_param,
get_cb=self.methodWrapper(self.instr, 'getVoltageReference'),
set_cb=self.methodWrapper(self.instr, 'setVoltageReference'),
label="Voltage", units="V")
self.param_v.pack()
self.param_i = vw_entry.vw_Text(self.frame_param,
get_cb=self.methodWrapper(self.instr, 'getCurrentReference'),
set_cb=self.methodWrapper(self.instr, 'setCurrentReference'),
label="Current", units="A")
self.param_i.pack()
self.param_p = vw_entry.vw_Text(self.frame_param,
get_cb=self.methodWrapper(self.instr, 'getPowerReference'),
set_cb=self.methodWrapper(self.instr, 'setPowerReference'),
label="Power", units="W")
self.param_p.pack()
self.param_setall = vw_state.vw_Trigger(self.frame_param,
cb_func=self.cb_setAllParams,
label="Set All", button_label="Set")
self.param_setall.pack()
self.frame_param.pack()
self.frame_left.grid(row=0, column=0, rowspan=2)
#-----------------------------------------------------------------------
self.frame_middle = Tk.Frame(self)
#=======================================================================
# Sensors
#=======================================================================
self.sensor_widgets = []
self.frame_sensors = Tk.LabelFrame(self.frame_middle, text="Sensors", padx=5, pady=5)
sensor1 = BDPC_Sensor(self.frame_sensors, self.instr, 'PrimaryVoltage',
)#update_interval=1000)
sensor1.pack()
self.sensor_widgets.append(sensor1)
sensor2 = BDPC_Sensor(self.frame_sensors, self.instr, 'SecondaryVoltage',
)#update_interval=1000)
sensor2.pack()
self.sensor_widgets.append(sensor2)
sensor3 = BDPC_Sensor(self.frame_sensors, self.instr, 'PrimaryCurrent',
)#update_interval=1000)
sensor3.pack()
self.sensor_widgets.append(sensor3)
sensor4 = BDPC_Sensor(self.frame_sensors, self.instr, 'SecondaryCurrent',
)#update_interval=1000)
sensor4.pack()
self.sensor_widgets.append(sensor4)
self.frame_sensors.pack()
self.frame_zvs = Tk.LabelFrame(self.frame_middle, text="Zero Voltage Switching (ZVS)", padx=5, pady=5)
sensor1 = BDPC_Sensor(self.frame_zvs, self.instr, 'ZVSCurrentA')
sensor1.pack()
self.sensor_widgets.append(sensor1)
sensor2 = BDPC_Sensor(self.frame_zvs, self.instr, 'ZVSCurrentB')
sensor2.pack()
self.sensor_widgets.append(sensor2)
sensor3 = BDPC_Sensor(self.frame_zvs, self.instr, 'ZVSCurrentC')
sensor3.pack()
self.sensor_widgets.append(sensor3)
sensor4 = BDPC_Sensor(self.frame_zvs, self.instr, 'ZVSCurrentD')
sensor4.pack()
self.sensor_widgets.append(sensor4)
self.frame_zvs.pack()
#=======================================================================
# Diagnostics
#=======================================================================
self.frame_diag = Tk.LabelFrame(self.frame_middle, text="Diagnostics", padx=5, pady=5)
self.pri_power = vw_entry.vw_Text(self.frame_diag,
label="Input Power", units="W",
get_cb=self.methodWrapper(self.instr, 'getPrimaryPower'))
self.pri_power.pack()
self.sec_power = vw_entry.vw_Text(self.frame_diag,
label="Output Power", units="W",
get_cb=self.methodWrapper(self.instr, 'getSecondaryPower'))
self.sec_power.pack()
self.diag_efficiency = vw_entry.vw_Text(self.frame_diag,
get_cb=self.methodWrapper(self.instr, 'getEfficiency'),
label="Efficiency", units="%",
)#update_interval=5000)
self.diag_efficiency.pack()
self.diag_convRatio = vw_entry.vw_Text(self.frame_diag,
get_cb=self.methodWrapper(self.instr, 'getConversionRatioCalc'),
label="Conversion Ratio", units="")
self.diag_convRatio.pack()
self.diag_pcmd = vw_entry.vw_Text(self.frame_diag,
get_cb=self.methodWrapper(self.instr, 'getPowerCommand'),
label="Power Command", units="%")
self.diag_pcmd.pack()
self.frame_diag.pack()
self.frame_middle.grid(row=0, column=1, rowspan=2)
#=======================================================================
# Graphs
#=======================================================================
self.graph_input = vw_plots.vw_Plot(self, title="Input")
self.graph_input.addCollectorPlot('Voltage', self.instr, method='getInputVoltage')
self.graph_input.addCollectorPlot('Current', self.instr, method='getInputCurrent')
self.graph_input.grid(row=0, column=2)
self.graph_output = vw_plots.vw_Plot(self, title="Output")
self.graph_output.addCollectorPlot('Voltage', self.instr, method='getOutputVoltage')
self.graph_output.addCollectorPlot('Current', self.instr, method='getOutputCurrent')
self.graph_output.grid(row=1, column=2)
def cb_refreshToggle(self):
pass
def cb_setAllParams(self):
v = float(self.param_v.get())
i = float(self.param_i.get())
p = float(self.param_p.get())
new_v = self.instr.setVoltage(v)
new_i = self.instr.setCurrent(i)
new_p = self.instr.setPower(p)
self.instr.commitParameters()
self.param_v.set(new_v)
self.param_i.set(new_i)
self.param_p.set(new_p)
class BDPC_Sensor(vw.vw_Base):
"""
ICP Sensor Class is a specific type of register for
sensors.
TODO: Add Calibration editor
"""
def __init__(self, master, model, sensor, **kwargs):
vw.vw_Base.__init__(self, master, 8, 2)
self.instr = model
self.sensor = sensor
name = self.instr.getSensorDescription(sensor)
units = self.instr.getSensorUnits(sensor)
self.f_top = Tk.Frame(self)
self.l_name = Tk.Label(self.f_top, width=25, font=("Purisa", 12), text=name, anchor=Tk.W, justify=Tk.LEFT)
self.l_name.pack(side=Tk.LEFT)
self.f_top.grid(row=0, column=0, sticky=Tk.N+Tk.E+Tk.W)
self.f_bottom = Tk.Frame(self)
self.l_units = Tk.Label(self.f_bottom, width=2, font=("Purisa", 12), text=units)
self.l_units.pack(side=Tk.RIGHT)
self.val = Tk.StringVar()
self.val.set("0")
self.l_data = Tk.Label(self.f_bottom, width=6, font=("Purisa", 10), textvariable=self.val, relief=Tk.RIDGE)
self.l_data.pack(side=Tk.RIGHT)
self.l_calibrate = Tk.Button(self.f_bottom, text="Calibrate", font=("Purisa", 10), state=Tk.DISABLED)
self.l_calibrate.pack(side=Tk.LEFT)
self.f_bottom.grid(row=1, column=0, sticky=Tk.E+Tk.S+Tk.W)
self.update_interval = kwargs.get('update_interval', None)
self._schedule_update()
def cb_update(self):
try:
val = self.instr.getSensorValue(self.sensor)
val = "{:.2f}".format(val)
self.val.set(val)
except:
self.l_data.config(bg="red")
def get(self):
return self.val.get()
class BDPC_MainController(Tk.Toplevel):
"""
Main Controller:
* Closed-Loop Controller Gain
* Power Command
* MMC Droop Resistance
* Dead Time (Td)
Minimum Current Trajectory (MCT):
* Power Command
* Conversion Ratio
* Phi AB, AD, DC
"""
pass
class BDPC_AuxController(Tk.Toplevel):
"""
Auxiliary Leg Controllers:
* Angle Command (ACMD or Phi')
* Desired ZVS current
* Measured ZVS current
* Minimum Angle Command
Aux Controller Programmable Parameters:
* Dead Time (Tda)
* Open Loop Angle Command
"""
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.