code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the main global admin page."""
import copy
import config
import view_tests_base
class AdminGlobalIndexViewTests(view_tests_base.ViewTestsBase):
"""Tests the global admin index view."""
_PRIOR_CONFIG = {
'sms_number_to_repo': '{"+15551234567": "haiti"}',
'repo_aliases': '{"h": "haiti"}',
'brand': 'none',
'privacy_policy_url': 'www.example.com/privacy',
'tos_url': 'www.example.com/tos',
'feedback_url': 'www.example.com/feedback',
'captcha_site_key': 'captcha-key',
'captcha_secret_key': 'captcha-secret-key',
'analytics_id': 'analytics-id',
'amp_gtm_id': 'amp-gtm-id',
'maps_api_key': 'maps-api-key',
'translate_api_key': 'translate-api-key',
'notification_email': 'notifications@example.com',
'unreviewed_notes_threshold': 12,
}
_BASE_POST_PARAMS = {
'sms_number_to_repo': '{"+15551234567": "haiti"}',
'repo_aliases': '{"h": "haiti"}',
'brand': 'none',
'privacy_policy_url': 'www.example.com/privacy',
'tos_url': 'www.example.com/tos',
'feedback_url': 'www.example.com/feedback',
'captcha_site_key': 'captcha-key',
'captcha_secret_key': 'captcha-secret-key',
'analytics_id': 'analytics-id',
'amp_gtm_id': 'amp-gtm-id',
'maps_api_key': 'maps-api-key',
'translate_api_key': 'translate-api-key',
'notification_email': 'notifications@example.com',
'unreviewed_notes_threshold': '12',
}
def setUp(self):
super(AdminGlobalIndexViewTests, self).setUp()
self.data_generator.repo()
config.set_for_repo('*', **AdminGlobalIndexViewTests._PRIOR_CONFIG)
self.login_as_superadmin()
def test_get(self):
"""Tests GET requests."""
resp = self.client.get('/global/admin/', secure=True)
self.assertEqual(
resp.context.get('sms_config'), {
'sms_number_to_repo': '"{\\"+15551234567\\": \\"haiti\\"}"',
})
self.assertEqual(
resp.context.get('repo_alias_config'), {
'repo_aliases': '"{\\"h\\": \\"haiti\\"}"',
})
self.assertEqual(
resp.context.get('site_info_config'), {
'brand': 'none',
'privacy_policy_url': 'www.example.com/privacy',
'tos_url': 'www.example.com/tos',
'feedback_url': 'www.example.com/feedback',
})
self.assertEqual(
resp.context.get('recaptcha_config'), {
'captcha_site_key': 'captcha-key',
'captcha_secret_key': 'captcha-secret-key',
})
self.assertEqual(
resp.context.get('ganalytics_config'), {
'analytics_id': 'analytics-id',
'amp_gtm_id': 'amp-gtm-id',
})
self.assertEqual(
resp.context.get('gmaps_config'), {
'maps_api_key': 'maps-api-key',
})
self.assertEqual(
resp.context.get('gtranslate_config'), {
'translate_api_key': 'translate-api-key',
})
self.assertEqual(
resp.context.get('notification_config'), {
'notification_email': 'notifications@example.com',
'unreviewed_notes_threshold': '12',
})
def test_edit_sms_config(self):
self._post_with_params(sms_number_to_repo='{"+1800pfhaiti": "haiti"}')
conf = config.Configuration('*')
self.assertEqual(conf.sms_number_to_repo, {'+1800pfhaiti': 'haiti'})
def test_edit_repo_alias_config(self):
self._post_with_params(repo_aliases='{"e": "ecuador"}')
conf = config.Configuration('*')
self.assertEqual(conf.repo_aliases, {'e': 'ecuador'})
def test_edit_site_info_config(self):
self._post_with_params(
brand='google',
privacy_policy_url='othersite.org/privacy',
tos_url='othersite.org/tos',
feedback_url='othersite.org/feedback')
conf = config.Configuration('*')
self.assertEqual(conf.brand, 'google')
self.assertEqual(conf.privacy_policy_url, 'othersite.org/privacy')
self.assertEqual(conf.tos_url, 'othersite.org/tos')
self.assertEqual(conf.feedback_url, 'othersite.org/feedback')
def test_edit_recaptcha_config(self):
self._post_with_params(
captcha_site_key='NEW-captcha-key',
captcha_secret_key='NEW-captcha-secret-key')
conf = config.Configuration('*')
self.assertEqual(conf.captcha_site_key, 'NEW-captcha-key')
self.assertEqual(conf.captcha_secret_key, 'NEW-captcha-secret-key')
def test_edit_ganalytics_config(self):
self._post_with_params(
analytics_id='NEW-analytics-id',
amp_gtm_id='NEW-amp-gtm-id')
conf = config.Configuration('*')
self.assertEqual(conf.analytics_id, 'NEW-analytics-id')
self.assertEqual(conf.amp_gtm_id, 'NEW-amp-gtm-id')
def test_edit_gmaps_config(self):
self._post_with_params(maps_api_key='NEW-maps-api-key')
conf = config.Configuration('*')
self.assertEqual(conf.maps_api_key, 'NEW-maps-api-key')
def test_edit_gtranslate_config(self):
self._post_with_params(translate_api_key='NEW-translate-api-key')
conf = config.Configuration('*')
self.assertEqual(conf.translate_api_key, 'NEW-translate-api-key')
def test_edit_notification_config(self):
self._post_with_params(
notification_email='notifications@othersite.org',
unreviewed_notes_threshold='86')
conf = config.Configuration('*')
self.assertEqual(conf.notification_email, 'notifications@othersite.org')
self.assertEqual(conf.unreviewed_notes_threshold, 86)
def _post_with_params(self, **kwargs):
get_doc = self.to_doc(self.client.get('/global/admin', secure=True))
xsrf_token = get_doc.cssselect_one('input[name="xsrf_token"]').get(
'value')
post_params = copy.deepcopy(AdminGlobalIndexViewTests._BASE_POST_PARAMS)
post_params['xsrf_token'] = xsrf_token
post_params.update(kwargs)
return self.client.post('/global/admin/', post_params, secure=True)
| google/personfinder | tests/views/test_admin_global_index.py | Python | apache-2.0 | 6,931 |
#!/usr/bin/env python3
from GModServer import Variables
import os
def StartGarrysModServer(steanApiAuthKey=Variables.SteamApiAuthKey, steamWorkShopID=Variables.SteamWorkShopId,
serverGamemode=Variables.ServerGamemode, serverDefaultMap=Variables.ServerDefaultMap,
serverPort=Variables.ServerPort, serverMaxPlayer=Variables.ServerMaxPlayer,
serverRunFile=Variables.ServerRunFile, debug=False):
Command="%s -game garrysmod +maxplayers %s -authkey %s +host_workshop_collection %s +map %s +gamemode %s +port " \
"%s" % (serverRunFile, serverMaxPlayer, steanApiAuthKey,
steamWorkShopID, serverDefaultMap, serverGamemode, serverPort)
if(debug==True):
print(Command)
os.system(Command) #start gMod server
if __name__ == '__main__':
from PythonServerKernel.Exceptions import RunnedFromFalseFile
raise RunnedFromFalseFile('GModServer_StartServer_py') | Kloenk/GarrysModserver | GModServer/StartServer.py | Python | apache-2.0 | 1,006 |
import keras | jadsonjs/DataScience | DeepLearning/keras/hello_world.py | Python | apache-2.0 | 12 |
# Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import os
import mock
from cinder import exception
from cinder.image import image_utils
from cinder import test
from cinder.volume.drivers import smbfs
class SmbFsTestCase(test.TestCase):
_FAKE_SHARE = '//1.2.3.4/share1'
_FAKE_MNT_BASE = '/mnt'
_FAKE_VOLUME_NAME = 'volume-4f711859-4928-4cb7-801a-a50c37ceaccc'
_FAKE_TOTAL_SIZE = '2048'
_FAKE_TOTAL_AVAILABLE = '1024'
_FAKE_TOTAL_ALLOCATED = 1024
_FAKE_VOLUME = {'id': '4f711859-4928-4cb7-801a-a50c37ceaccc',
'size': 1,
'provider_location': _FAKE_SHARE,
'name': _FAKE_VOLUME_NAME,
'status': 'available'}
_FAKE_MNT_POINT = os.path.join(_FAKE_MNT_BASE, 'fake_hash')
_FAKE_VOLUME_PATH = os.path.join(_FAKE_MNT_POINT, _FAKE_VOLUME_NAME)
_FAKE_SNAPSHOT_ID = '5g811859-4928-4cb7-801a-a50c37ceacba'
_FAKE_SNAPSHOT = {'id': _FAKE_SNAPSHOT_ID,
'volume': _FAKE_VOLUME,
'status': 'available',
'volume_size': 1}
_FAKE_SNAPSHOT_PATH = (
_FAKE_VOLUME_PATH + '-snapshot' + _FAKE_SNAPSHOT_ID)
_FAKE_SHARE_OPTS = '-o username=Administrator,password=12345'
_FAKE_OPTIONS_DICT = {'username': 'Administrator',
'password': '12345'}
_FAKE_LISTDIR = [_FAKE_VOLUME_NAME, _FAKE_VOLUME_NAME + '.vhd',
_FAKE_VOLUME_NAME + '.vhdx', 'fake_folder']
_FAKE_SMBFS_CONFIG = mock.MagicMock()
_FAKE_SMBFS_CONFIG.smbfs_oversub_ratio = 2
_FAKE_SMBFS_CONFIG.smbfs_used_ratio = 0.5
_FAKE_SMBFS_CONFIG.smbfs_shares_config = '/fake/config/path'
_FAKE_SMBFS_CONFIG.smbfs_default_volume_format = 'raw'
_FAKE_SMBFS_CONFIG.smbfs_sparsed_volumes = False
def setUp(self):
super(SmbFsTestCase, self).setUp()
self._smbfs_driver = smbfs.SmbfsDriver(configuration=mock.Mock())
self._smbfs_driver._remotefsclient = mock.Mock()
self._smbfs_driver._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
self._smbfs_driver._execute = mock.Mock()
self._smbfs_driver.base = self._FAKE_MNT_BASE
def test_delete_volume(self):
drv = self._smbfs_driver
fake_vol_info = self._FAKE_VOLUME_PATH + '.info'
drv._ensure_share_mounted = mock.MagicMock()
fake_ensure_mounted = drv._ensure_share_mounted
drv._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
drv.get_active_image_from_info = mock.Mock(
return_value=self._FAKE_VOLUME_NAME)
drv._delete = mock.Mock()
drv._local_path_volume_info = mock.Mock(
return_value=fake_vol_info)
with mock.patch('os.path.exists', lambda x: True):
drv.delete_volume(self._FAKE_VOLUME)
fake_ensure_mounted.assert_called_once_with(self._FAKE_SHARE)
drv._delete.assert_any_call(
self._FAKE_VOLUME_PATH)
drv._delete.assert_any_call(fake_vol_info)
@mock.patch('os.path.exists')
@mock.patch.object(image_utils, 'check_qemu_img_version')
def _test_setup(self, mock_check_qemu_img_version,
mock_exists, config, share_config_exists=True):
mock_exists.return_value = share_config_exists
fake_ensure_mounted = mock.MagicMock()
self._smbfs_driver._ensure_shares_mounted = fake_ensure_mounted
self._smbfs_driver.configuration = config
if not (config.smbfs_shares_config and share_config_exists and
config.smbfs_oversub_ratio > 0 and
0 <= config.smbfs_used_ratio <= 1):
self.assertRaises(exception.SmbfsException,
self._smbfs_driver.do_setup,
None)
else:
self._smbfs_driver.do_setup(mock.sentinel.context)
mock_check_qemu_img_version.assert_called_once_with()
self.assertEqual(self._smbfs_driver.shares, {})
fake_ensure_mounted.assert_called_once_with()
def test_setup_missing_shares_config_option(self):
fake_config = copy.copy(self._FAKE_SMBFS_CONFIG)
fake_config.smbfs_shares_config = None
self._test_setup(config=fake_config,
share_config_exists=False)
def test_setup_missing_shares_config_file(self):
self._test_setup(config=self._FAKE_SMBFS_CONFIG,
share_config_exists=False)
def test_setup_invlid_oversub_ratio(self):
fake_config = copy.copy(self._FAKE_SMBFS_CONFIG)
fake_config.smbfs_oversub_ratio = -1
self._test_setup(config=fake_config)
def test_setup_invalid_used_ratio(self):
fake_config = copy.copy(self._FAKE_SMBFS_CONFIG)
fake_config.smbfs_used_ratio = -1
self._test_setup(config=fake_config)
def _test_create_volume(self, volume_exists=False, volume_format=None):
fake_method = mock.MagicMock()
self._smbfs_driver.configuration = copy.copy(self._FAKE_SMBFS_CONFIG)
self._smbfs_driver._set_rw_permissions_for_all = mock.MagicMock()
fake_set_permissions = self._smbfs_driver._set_rw_permissions_for_all
self._smbfs_driver.get_volume_format = mock.MagicMock()
windows_image_format = False
fake_vol_path = self._FAKE_VOLUME_PATH
self._smbfs_driver.get_volume_format.return_value = volume_format
if volume_format:
if volume_format in ('vhd', 'vhdx'):
windows_image_format = volume_format
if volume_format == 'vhd':
windows_image_format = 'vpc'
method = '_create_windows_image'
fake_vol_path += '.' + volume_format
else:
method = '_create_%s_file' % volume_format
if volume_format == 'sparsed':
self._smbfs_driver.configuration.smbfs_sparsed_volumes = (
True)
else:
method = '_create_regular_file'
setattr(self._smbfs_driver, method, fake_method)
with mock.patch('os.path.exists', new=lambda x: volume_exists):
if volume_exists:
self.assertRaises(exception.InvalidVolume,
self._smbfs_driver._do_create_volume,
self._FAKE_VOLUME)
return
self._smbfs_driver._do_create_volume(self._FAKE_VOLUME)
if windows_image_format:
fake_method.assert_called_once_with(
fake_vol_path,
self._FAKE_VOLUME['size'],
windows_image_format)
else:
fake_method.assert_called_once_with(
fake_vol_path, self._FAKE_VOLUME['size'])
fake_set_permissions.assert_called_once_with(fake_vol_path)
def test_create_existing_volume(self):
self._test_create_volume(volume_exists=True)
def test_create_vhdx(self):
self._test_create_volume(volume_format='vhdx')
def test_create_qcow2(self):
self._test_create_volume(volume_format='qcow2')
def test_create_sparsed(self):
self._test_create_volume(volume_format='sparsed')
def test_create_regular(self):
self._test_create_volume()
def _test_find_share(self, existing_mounted_shares=True,
eligible_shares=True):
if existing_mounted_shares:
mounted_shares = ('fake_share1', 'fake_share2', 'fake_share3')
else:
mounted_shares = None
self._smbfs_driver._mounted_shares = mounted_shares
self._smbfs_driver._is_share_eligible = mock.Mock(
return_value=eligible_shares)
fake_capacity_info = ((2, 1, 5), (2, 1, 4), (2, 1, 1))
self._smbfs_driver._get_capacity_info = mock.Mock(
side_effect=fake_capacity_info)
if not mounted_shares:
self.assertRaises(exception.SmbfsNoSharesMounted,
self._smbfs_driver._find_share,
self._FAKE_VOLUME['size'])
elif not eligible_shares:
self.assertRaises(exception.SmbfsNoSuitableShareFound,
self._smbfs_driver._find_share,
self._FAKE_VOLUME['size'])
else:
ret_value = self._smbfs_driver._find_share(
self._FAKE_VOLUME['size'])
# The eligible share with the minimum allocated space
# will be selected
self.assertEqual(ret_value, 'fake_share3')
def test_find_share(self):
self._test_find_share()
def test_find_share_missing_mounted_shares(self):
self._test_find_share(existing_mounted_shares=False)
def test_find_share_missing_eligible_shares(self):
self._test_find_share(eligible_shares=False)
def _test_is_share_eligible(self, capacity_info, volume_size):
self._smbfs_driver._get_capacity_info = mock.Mock(
return_value=[float(x << 30) for x in capacity_info])
self._smbfs_driver.configuration = self._FAKE_SMBFS_CONFIG
return self._smbfs_driver._is_share_eligible(self._FAKE_SHARE,
volume_size)
def test_share_volume_above_used_ratio(self):
fake_capacity_info = (4, 1, 1)
fake_volume_size = 2
ret_value = self._test_is_share_eligible(fake_capacity_info,
fake_volume_size)
self.assertFalse(ret_value)
def test_eligible_share(self):
fake_capacity_info = (4, 4, 0)
fake_volume_size = 1
ret_value = self._test_is_share_eligible(fake_capacity_info,
fake_volume_size)
self.assertTrue(ret_value)
def test_share_volume_above_oversub_ratio(self):
fake_capacity_info = (4, 4, 7)
fake_volume_size = 2
ret_value = self._test_is_share_eligible(fake_capacity_info,
fake_volume_size)
self.assertFalse(ret_value)
def test_share_reserved_above_oversub_ratio(self):
fake_capacity_info = (4, 4, 10)
fake_volume_size = 1
ret_value = self._test_is_share_eligible(fake_capacity_info,
fake_volume_size)
self.assertFalse(ret_value)
def test_parse_options(self):
(opt_list,
opt_dict) = self._smbfs_driver.parse_options(
self._FAKE_SHARE_OPTS)
expected_ret = ([], self._FAKE_OPTIONS_DICT)
self.assertEqual(expected_ret, (opt_list, opt_dict))
def test_parse_credentials(self):
fake_smb_options = r'-o user=MyDomain\Administrator,noperm'
expected_flags = '-o username=Administrator,noperm'
flags = self._smbfs_driver.parse_credentials(fake_smb_options)
self.assertEqual(expected_flags, flags)
@mock.patch.object(smbfs.SmbfsDriver, '_get_local_volume_path_template')
@mock.patch.object(smbfs.SmbfsDriver, '_lookup_local_volume_path')
@mock.patch.object(smbfs.SmbfsDriver, 'get_volume_format')
def _test_get_volume_path(self, mock_get_volume_format, mock_lookup_volume,
mock_get_path_template, volume_exists=True,
volume_format='raw'):
drv = self._smbfs_driver
mock_get_path_template.return_value = self._FAKE_VOLUME_PATH
expected_vol_path = self._FAKE_VOLUME_PATH
if volume_format in (drv._DISK_FORMAT_VHD, drv._DISK_FORMAT_VHDX):
expected_vol_path += '.' + volume_format
mock_lookup_volume.return_value = (
expected_vol_path if volume_exists else None)
mock_get_volume_format.return_value = volume_format
ret_val = drv.local_path(self._FAKE_VOLUME)
if volume_exists:
self.assertFalse(mock_get_volume_format.called)
else:
mock_get_volume_format.assert_called_once_with(self._FAKE_VOLUME)
self.assertEqual(expected_vol_path, ret_val)
def test_get_existing_volume_path(self):
self._test_get_volume_path()
def test_get_new_raw_volume_path(self):
self._test_get_volume_path(volume_exists=False)
def test_get_new_vhd_volume_path(self):
self._test_get_volume_path(volume_exists=False, volume_format='vhd')
@mock.patch.object(smbfs.SmbfsDriver, '_local_volume_dir')
def test_get_local_volume_path_template(self, mock_get_local_dir):
mock_get_local_dir.return_value = self._FAKE_MNT_POINT
ret_val = self._smbfs_driver._get_local_volume_path_template(
self._FAKE_VOLUME)
self.assertEqual(self._FAKE_VOLUME_PATH, ret_val)
@mock.patch('os.path.exists')
def test_lookup_local_volume_path(self, mock_exists):
expected_path = self._FAKE_VOLUME_PATH + '.vhdx'
mock_exists.side_effect = lambda x: x == expected_path
ret_val = self._smbfs_driver._lookup_local_volume_path(
self._FAKE_VOLUME_PATH)
possible_paths = [self._FAKE_VOLUME_PATH + ext
for ext in ('', '.vhd', '.vhdx')]
mock_exists.assert_has_calls(
[mock.call(path) for path in possible_paths])
self.assertEqual(expected_path, ret_val)
@mock.patch.object(smbfs.SmbfsDriver, '_get_local_volume_path_template')
@mock.patch.object(smbfs.SmbfsDriver, '_lookup_local_volume_path')
@mock.patch.object(smbfs.SmbfsDriver, '_qemu_img_info')
@mock.patch.object(smbfs.SmbfsDriver, '_get_volume_format_spec')
def _mock_get_volume_format(self, mock_get_format_spec, mock_qemu_img_info,
mock_lookup_volume, mock_get_path_template,
qemu_format=False, volume_format='raw',
volume_exists=True):
mock_get_path_template.return_value = self._FAKE_VOLUME_PATH
mock_lookup_volume.return_value = (
self._FAKE_VOLUME_PATH if volume_exists else None)
mock_qemu_img_info.return_value.file_format = volume_format
mock_get_format_spec.return_value = volume_format
ret_val = self._smbfs_driver.get_volume_format(self._FAKE_VOLUME,
qemu_format)
if volume_exists:
mock_qemu_img_info.assert_called_once_with(self._FAKE_VOLUME_PATH,
self._FAKE_VOLUME_NAME)
self.assertFalse(mock_get_format_spec.called)
else:
mock_get_format_spec.assert_called_once_with(self._FAKE_VOLUME)
self.assertFalse(mock_qemu_img_info.called)
return ret_val
def test_get_existing_raw_volume_format(self):
fmt = self._mock_get_volume_format()
self.assertEqual(fmt, 'raw')
def test_get_new_vhd_volume_format(self):
expected_fmt = 'vhd'
fmt = self._mock_get_volume_format(volume_format=expected_fmt,
volume_exists=False)
self.assertEqual(expected_fmt, fmt)
def test_get_new_vhd_legacy_volume_format(self):
img_fmt = 'vhd'
expected_fmt = 'vpc'
ret_val = self._mock_get_volume_format(volume_format=img_fmt,
volume_exists=False,
qemu_format=True)
self.assertEqual(expected_fmt, ret_val)
def test_initialize_connection(self):
self._smbfs_driver.get_active_image_from_info = mock.Mock(
return_value=self._FAKE_VOLUME_NAME)
self._smbfs_driver._get_mount_point_base = mock.Mock(
return_value=self._FAKE_MNT_BASE)
self._smbfs_driver.shares = {self._FAKE_SHARE: self._FAKE_SHARE_OPTS}
self._smbfs_driver._qemu_img_info = mock.Mock(
return_value=mock.Mock(file_format='raw'))
fake_data = {'export': self._FAKE_SHARE,
'format': 'raw',
'name': self._FAKE_VOLUME_NAME,
'options': self._FAKE_SHARE_OPTS}
expected = {
'driver_volume_type': 'smbfs',
'data': fake_data,
'mount_point_base': self._FAKE_MNT_BASE}
ret_val = self._smbfs_driver.initialize_connection(
self._FAKE_VOLUME, None)
self.assertEqual(expected, ret_val)
def _test_extend_volume(self, extend_failed=False, image_format='raw'):
drv = self._smbfs_driver
drv.local_path = mock.Mock(
return_value=self._FAKE_VOLUME_PATH)
drv._check_extend_volume_support = mock.Mock(
return_value=True)
drv._is_file_size_equal = mock.Mock(
return_value=not extend_failed)
drv._qemu_img_info = mock.Mock(
return_value=mock.Mock(file_format=image_format))
drv._delete = mock.Mock()
with mock.patch.object(image_utils, 'resize_image') as fake_resize, \
mock.patch.object(image_utils, 'convert_image') as \
fake_convert:
if extend_failed:
self.assertRaises(exception.ExtendVolumeError,
drv._extend_volume,
self._FAKE_VOLUME, mock.sentinel.new_size)
else:
drv._extend_volume(
self._FAKE_VOLUME,
mock.sentinel.new_size)
if image_format in (drv._DISK_FORMAT_VHDX,
drv._DISK_FORMAT_VHD_LEGACY):
fake_tmp_path = self._FAKE_VOLUME_PATH + '.tmp'
fake_convert.assert_any_call(self._FAKE_VOLUME_PATH,
fake_tmp_path, 'raw')
fake_resize.assert_called_once_with(
fake_tmp_path, mock.sentinel.new_size)
fake_convert.assert_any_call(fake_tmp_path,
self._FAKE_VOLUME_PATH,
image_format)
else:
fake_resize.assert_called_once_with(
self._FAKE_VOLUME_PATH, mock.sentinel.new_size)
def test_extend_volume(self):
self._test_extend_volume()
def test_extend_volume_failed(self):
self._test_extend_volume(extend_failed=True)
def test_extend_vhd_volume(self):
self._test_extend_volume(image_format='vpc')
def _test_check_extend_support(self, has_snapshots=False,
is_eligible=True):
self._smbfs_driver.local_path = mock.Mock(
return_value=self._FAKE_VOLUME_PATH)
if has_snapshots:
active_file_path = self._FAKE_SNAPSHOT_PATH
else:
active_file_path = self._FAKE_VOLUME_PATH
self._smbfs_driver.get_active_image_from_info = mock.Mock(
return_value=active_file_path)
self._smbfs_driver._is_share_eligible = mock.Mock(
return_value=is_eligible)
if has_snapshots:
self.assertRaises(exception.InvalidVolume,
self._smbfs_driver._check_extend_volume_support,
self._FAKE_VOLUME, 2)
elif not is_eligible:
self.assertRaises(exception.ExtendVolumeError,
self._smbfs_driver._check_extend_volume_support,
self._FAKE_VOLUME, 2)
else:
self._smbfs_driver._check_extend_volume_support(
self._FAKE_VOLUME, 2)
self._smbfs_driver._is_share_eligible.assert_called_once_with(
self._FAKE_SHARE, 1)
def test_check_extend_support(self):
self._test_check_extend_support()
def test_check_extend_volume_with_snapshots(self):
self._test_check_extend_support(has_snapshots=True)
def test_check_extend_volume_uneligible_share(self):
self._test_check_extend_support(is_eligible=False)
def test_create_volume_from_in_use_snapshot(self):
fake_snapshot = {'status': 'in-use'}
self.assertRaises(
exception.InvalidSnapshot,
self._smbfs_driver.create_volume_from_snapshot,
self._FAKE_VOLUME, fake_snapshot)
def test_copy_volume_from_snapshot(self):
drv = self._smbfs_driver
fake_volume_info = {self._FAKE_SNAPSHOT_ID: 'fake_snapshot_file_name'}
fake_img_info = mock.MagicMock()
fake_img_info.backing_file = self._FAKE_VOLUME_NAME
drv.get_volume_format = mock.Mock(
return_value='raw')
drv._local_path_volume_info = mock.Mock(
return_value=self._FAKE_VOLUME_PATH + '.info')
drv._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
drv._read_info_file = mock.Mock(
return_value=fake_volume_info)
drv._qemu_img_info = mock.Mock(
return_value=fake_img_info)
drv.local_path = mock.Mock(
return_value=self._FAKE_VOLUME_PATH[:-1])
drv._extend_volume = mock.Mock()
drv._set_rw_permissions_for_all = mock.Mock()
with mock.patch.object(image_utils, 'convert_image') as (
fake_convert_image):
drv._copy_volume_from_snapshot(
self._FAKE_SNAPSHOT, self._FAKE_VOLUME,
self._FAKE_VOLUME['size'])
drv._extend_volume.assert_called_once_with(
self._FAKE_VOLUME, self._FAKE_VOLUME['size'])
fake_convert_image.assert_called_once_with(
self._FAKE_VOLUME_PATH, self._FAKE_VOLUME_PATH[:-1], 'raw')
def test_ensure_mounted(self):
self._smbfs_driver.shares = {self._FAKE_SHARE: self._FAKE_SHARE_OPTS}
self._smbfs_driver._ensure_share_mounted(self._FAKE_SHARE)
self._smbfs_driver._remotefsclient.mount.assert_called_once_with(
self._FAKE_SHARE, self._FAKE_SHARE_OPTS.split())
def _test_copy_image_to_volume(self, wrong_size_after_fetch=False):
drv = self._smbfs_driver
vol_size_bytes = self._FAKE_VOLUME['size'] << 30
fake_img_info = mock.MagicMock()
if wrong_size_after_fetch:
fake_img_info.virtual_size = 2 * vol_size_bytes
else:
fake_img_info.virtual_size = vol_size_bytes
drv.get_volume_format = mock.Mock(
return_value=drv._DISK_FORMAT_VHDX)
drv.local_path = mock.Mock(
return_value=self._FAKE_VOLUME_PATH)
drv._do_extend_volume = mock.Mock()
drv.configuration = mock.MagicMock()
drv.configuration.volume_dd_blocksize = (
mock.sentinel.block_size)
with mock.patch.object(image_utils, 'fetch_to_volume_format') as \
fake_fetch, mock.patch.object(image_utils, 'qemu_img_info') as \
fake_qemu_img_info:
fake_qemu_img_info.return_value = fake_img_info
if wrong_size_after_fetch:
self.assertRaises(
exception.ImageUnacceptable,
drv.copy_image_to_volume,
mock.sentinel.context, self._FAKE_VOLUME,
mock.sentinel.image_service,
mock.sentinel.image_id)
else:
drv.copy_image_to_volume(
mock.sentinel.context, self._FAKE_VOLUME,
mock.sentinel.image_service,
mock.sentinel.image_id)
fake_fetch.assert_called_once_with(
mock.sentinel.context, mock.sentinel.image_service,
mock.sentinel.image_id, self._FAKE_VOLUME_PATH,
drv._DISK_FORMAT_VHDX,
mock.sentinel.block_size)
drv._do_extend_volume.assert_called_once_with(
self._FAKE_VOLUME_PATH,
self._FAKE_VOLUME['size'],
self._FAKE_VOLUME['name'])
def test_copy_image_to_volume(self):
self._test_copy_image_to_volume()
def test_copy_image_to_volume_wrong_size_after_fetch(self):
self._test_copy_image_to_volume(wrong_size_after_fetch=True)
def test_get_capacity_info(self):
fake_block_size = 4096.0
fake_total_blocks = 1024
fake_avail_blocks = 512
fake_total_allocated = fake_total_blocks * fake_block_size
fake_df = ('%s %s %s' % (fake_block_size, fake_total_blocks,
fake_avail_blocks), None)
fake_du = (str(fake_total_allocated), None)
self._smbfs_driver._get_mount_point_for_share = mock.Mock(
return_value=self._FAKE_MNT_POINT)
self._smbfs_driver._execute = mock.Mock(
side_effect=(fake_df, fake_du))
ret_val = self._smbfs_driver._get_capacity_info(self._FAKE_SHARE)
expected = (fake_block_size * fake_total_blocks,
fake_block_size * fake_avail_blocks,
fake_total_allocated)
self.assertEqual(expected, ret_val)
| saeki-masaki/cinder | cinder/tests/unit/test_smbfs.py | Python | apache-2.0 | 25,862 |
import sys
import logging
import pexpect
class Session(object):
NO_SESSION = 1
SESSION_AVAILABLE = 2
PRIVILEDGE_MODE = 3
CONFIGURATION_MODE = 4
def __init__(self, hostname, username='', password='', enable_username='', enable_password=''):
''' Sets up configuration to be transfered '''
self.hostname = hostname
self.username = username
self.password = password
self.enable_username = enable_username
self.enable_password = enable_password
self.session = None
self.session_state = Session.NO_SESSION
self.session_lf = ''
self.session_prompt = ''
self._login_status = False
self._enable_status = False
def login(self):
''' Attempt to Login to Device '''
if(self._login_status):
return True
COMMAND = "ssh %s@%s" % (self.username, self.hostname)
self.session = pexpect.spawn(COMMAND)
self.session.logfile = open('/var/log/campus_ztp_icx_sshlog', 'w')
i = self.session.expect(['timed out', 'assword:', 'yes/no', 'failed', pexpect.TIMEOUT],
timeout=30)
if i == 0:
print("SSH Connection to '%s' timed out" % self.hostname)
self._login_status = False
return False
if i == 1:
self.session.sendline(self.password)
if i == 2:
self.session.sendline('yes')
self.session.expect('assword:')
self.session.sendline(self.password)
if i == 3:
print("Known key failed to match device key!\r\n")
self._login_status = False
return False
if i == 4:
print("Failed to connect!\r\n")
self._login_status = False
return False
# Should be logged in at this point
i = self.session.expect(['assword:', '>', '#', pexpect.TIMEOUT], timeout=15)
if i == 0:
# incorrect credentials
# TODO: Terminate Login
print("Invalid login username/password for '%s'" % self.hostname)
self._login_status = False
return False
if i == 1:
self.session_state = Session.SESSION_AVAILABLE
if i == 2:
self.session_state = Session.PRIVILEDGE_MODE
if i == 3:
print("Failed to connect!\r\n")
self._login_status = False
return False
self.session_prompt = "%s" % self.session.before.split()[-1]
self._login_status = True
return True
def sendline(self, line):
''' Wrapper function to add LF or not '''
self.session.sendline('%s%s' % (line, self.session_lf))
def enter_enable_mode(self):
''' enters enable mode '''
if(self._enable_status):
return True
if self.session_state == Session.SESSION_AVAILABLE:
prompt = self.session_prompt
self.sendline('enable')
c = self.session.expect(['assword:', 'Name:', '%s#' % prompt, pexpect.TIMEOUT])
if c == 0:
# is just asking for enable password
self.sendline(self.enable_password)
if c == 1:
# is asking for username and password
self.sendline(self.enable_username)
self.session.expect('assword:')
self.sendline(self.enable_password)
if c == 2:
# there is no enable password
self.session_state = Session.PRIVILEDGE_MODE
self._enable_status = True
return True
if c == 3:
sys.stderr.write("Timeout trying to enter enable mode\r\n")
self._enable_status = False
return False
# double check we are in enable mode
i = self.session.expect(['assword:', 'Name:', '%s>' % prompt, '%s#' % prompt])
if i < 3:
# incorrect credentials
# TODO: Terminate Login
sys.stderr.write("Invalid enable username/password!\r\n")
self._enable_status = False
return False
self.session_state = Session.PRIVILEDGE_MODE
self._enable_status = True
return True
if self.session_state == Session.PRIVILEDGE_MODE:
self._enable_status = True
return True
raise Exception("Trying to enter enable mode while State is not "
"Available or already in priviledge mode")
return False
def enter_configuration_mode(self):
''' enters configuration mode '''
if self.session_state == Session.PRIVILEDGE_MODE:
prompt = "\(config\)#"
sys.stdout.write("Entering Configuration mode on %s\r\n" % self.hostname)
self.sendline('configure terminal')
i = self.session.expect([prompt, pexpect.TIMEOUT], timeout=5)
if i == 0:
self.session_state = Session.CONFIGURATION_MODE
return True
sys.stderr.write("Failed to enter configuration mode")
return False
else:
raise Exception("Attempted to enter configuration mode when device "
"was not in priviledge mode on '%s'" % self.hostname)
def exit_configuration_mode(self):
''' exits configuration mode '''
if self.session_state == Session.CONFIGURATION_MODE:
sys.stdout.write("Exiting Configuration mode on %s\r\n" % self.hostname)
self.sendline('end')
self.session.expect('#')
self.sendline('')
self.session.expect('#')
sys.stdout.write("Exiting Configuration mode successful\r\n")
self.session_state = Session.PRIVILEDGE_MODE
else:
raise Exception("Attempted to exit configuration mode when device "
"was not in configuration mode on '%s'" % self.hostname)
def create_crypto_keys(self, keytype='rsa', modulus=2048):
'''generates ssh keys. keytype can be either rsa or dsa, modules can be 1024 or 2048'''
assert (modulus == 1024 or modulus == 2048)
assert (keytype == 'dsa' or keytype == 'rsa')
if self.session_state == Session.CONFIGURATION_MODE:
sys.stdout.write("Configuring crypto keys on %s\r\n" % self.hostname)
self.sendline('crypto key generate %s modulus %d' % (keytype, modulus))
i = self.session.expect(['zeroize it', 'created', pexpect.TIMEOUT], timeout=120)
if i == 0:
self.sendline('crypto key zeroize rsa')
self.session.expect('deleted')
self.sendline('crypto key generate %s modulus %d' % (keytype, modulus))
j = self.session.expect(['created', pexpect.TIMEOUT], timeout=120)
if j == 0:
self.sendline('')
return True
sys.stderr.write("Timed out creating keys\r\n")
if i == 1:
self.sendline('')
return True
sys.stderr.write("Timed out creating keys\r\n")
return False
else:
raise Exception("Attempted to configuration crypto keys when device "
"was not in configuration mode on '%s'" % self.hostname)
def page_on(self):
if self.session_state == Session.PRIVILEDGE_MODE:
prompt = "%s#" % self.session_prompt
self.sendline('page')
self.session.expect(prompt)
def page_skip(self):
if self.session_state == Session.PRIVILEDGE_MODE:
prompt = "%s#" % self.session_prompt
self.sendline('skip')
self.session.expect(prompt)
def send_line(self, line):
''' Set an arbitrary cli command - output is sent to stdout'''
prompt = "%s#" % self.session_prompt
if self.session_state == Session.CONFIGURATION_MODE:
prompt = r'%s\((.).+\)#' % self.session_prompt
self.sendline(line)
i = 0
# Record any output from the command
output = []
c = self.session.expect([prompt, pexpect.EOF, pexpect.TIMEOUT])
# skip first line, as it's just a repeat of the command
output.append(self.session.before)
return output
def set_hostname(self, hostname):
''' Set Hostname for testing '''
sys.stdout.write("Setting hostname on %s\r\n" % self.hostname)
if self.session_state == Session.CONFIGURATION_MODE:
self.sendline("hostname %s" % hostname)
self.session.expect('#')
return True
else:
raise Exception("Attempted to configuration hostname while device "
"was not in configuration mode on '%s'" % self.hostname)
def upgrade_code_by_tftp(self, tftp_server, filename, towhere):
''' Upgrades code to a location specified by 'towhere' '''
assert(towhere == 'primary' or towhere == 'secondary' or towhere == 'bootrom')
sys.stdout.write("Upgrading %s on %s\r\n" % (towhere, self.hostname))
if self.session_state == Session.PRIVILEDGE_MODE:
self.session.sendline('copy tftp flash %s %s %s' % (tftp_server, filename, towhere))
self.session.sendline('\r\n')
i = self.session.expect(['Done.', 'Error', 'please wait', pexpect.TIMEOUT],
timeout=300)
if i == 1:
sys.stderr.write("TFTP error occurred trying to update %s code on %s\r\n" %
(towhere, self.hostname))
return False
if i == 2:
sys.stderr.write("Flash is busy during %s code upgrade on %s\r\n" %
(towhere, self.hostname))
return False
if i == 3:
sys.stderr.write("Timeout trying to update %s code on %s\r\n" %
(towhere, self.hostname))
return False
sys.stdout.write("Upgrade of %s code successful on %s\r\n" % (towhere, self.hostname))
return True
raise Exception("Attempted to upgrade %s code while device was "
"not in priviledge mode on '%s'" % (towhere, self.hostname))
return False
def upgrade_bootcode_by_tftp(self, tftp_server, filename):
''' Upgrades boot code '''
return self.upgrade_code_by_tftp(tftp_server, filename, 'bootrom')
def reload(self, writemem=True):
''' reload device '''
logging.debug("Reloading '%s'" % self.hostname)
if self.session_state == Session.PRIVILEDGE_MODE:
if writemem:
self.session.sendline('write memory')
self.session.expect('#')
self.session.sendline('reload')
i = self.session.expect(['\):',pexpect.TIMEOUT],timeout=2)
if i == 1: # FCX FIX
self.session.send('\r\n')
self.session.expect('\):',timeout=2)
self.session.send('y')
i = self.session.expect(['\):', pexpect.EOF],timeout=2)
if i == 0:
self.session.sendline('y')
self.session.sendline('')
self.session.close()
self.session_state = Session.NO_SESSION
else:
logging.warning("Attempted to logout when device was not priviledge "
"mode on '%s'" % self.hostname)
def logout(self):
''' logout of device '''
self.sendline('exit')
self.sendline('exit')
self.session.close()
self.session_state = Session.NO_SESSION
self._login_status = False
self._enable_status = False
return
# Or is this better?
if self.session_state == Session.PRIVILEDGE_MODE:
self.sendline('logout')
self.session.expect(pexpect.EOF)
self.session_state = Session.NO_SESSION
else:
logging.warning("Attempted to logout when device was not priviledge "
"mode on '%s'" % self.hostname)
| pjimmybrcd/campus_ztp_nps | actions/lib/Session.py | Python | apache-2.0 | 12,262 |
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/django_coverage_plugin/blob/master/NOTICE.txt
"""
Pytest auto configuration.
This module is run automatically by pytest, to define and enable fixtures.
"""
import re
import warnings
import django.utils.deprecation
import pytest
@pytest.fixture(autouse=True)
def set_warnings():
"""Configure warnings to show while running tests."""
warnings.simplefilter("default")
warnings.simplefilter("once", DeprecationWarning)
# Warnings to suppress:
# How come these warnings are successfully suppressed here, but not in setup.cfg??
# We know we do tricky things with Django settings, don't warn us about it.
warnings.filterwarnings(
"ignore",
category=UserWarning,
message=r"Overriding setting DATABASES can lead to unexpected behavior.",
)
# Django has warnings like RemovedInDjango40Warning. We use features that are going to be
# deprecated, so we don't need to see those warnings. But the specific warning classes change
# in every release. Find them and ignore them.
for name, obj in vars(django.utils.deprecation).items():
if re.match(r"RemovedInDjango\d+Warning", name):
warnings.filterwarnings("ignore", category=obj)
| nedbat/django_coverage_plugin | tests/conftest.py | Python | apache-2.0 | 1,344 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import pkgutil
from collections import defaultdict
from twitter.common.collections import OrderedSet
from pants.backend.jvm.tasks.ide_gen import IdeGen
from pants.base.build_environment import get_buildroot
from pants.base.generator import Generator, TemplateData
from pants.util.dirutil import safe_delete, safe_mkdir, safe_open
_TEMPLATE_BASEDIR = os.path.join('templates', 'eclipse')
_VERSIONS = {
'3.5': '3.7', # 3.5-3.7 are .project/.classpath compatible
'3.6': '3.7',
'3.7': '3.7',
}
_SETTINGS = (
'org.eclipse.core.resources.prefs',
'org.eclipse.jdt.ui.prefs',
)
class EclipseGen(IdeGen):
@classmethod
def register_options(cls, register):
super(EclipseGen, cls).register_options(register)
register('--version', choices=sorted(list(_VERSIONS.keys())), default='3.6',
help='The Eclipse version the project configuration should be generated for.')
def __init__(self, *args, **kwargs):
super(EclipseGen, self).__init__(*args, **kwargs)
version = _VERSIONS[self.get_options().version]
self.project_template = os.path.join(_TEMPLATE_BASEDIR, 'project-%s.mustache' % version)
self.classpath_template = os.path.join(_TEMPLATE_BASEDIR, 'classpath-%s.mustache' % version)
self.apt_template = os.path.join(_TEMPLATE_BASEDIR, 'factorypath-%s.mustache' % version)
self.pydev_template = os.path.join(_TEMPLATE_BASEDIR, 'pydevproject-%s.mustache' % version)
self.debug_template = os.path.join(_TEMPLATE_BASEDIR, 'debug-launcher-%s.mustache' % version)
self.coreprefs_template = os.path.join(_TEMPLATE_BASEDIR,
'org.eclipse.jdt.core.prefs-%s.mustache' % version)
self.project_filename = os.path.join(self.cwd, '.project')
self.classpath_filename = os.path.join(self.cwd, '.classpath')
self.apt_filename = os.path.join(self.cwd, '.factorypath')
self.pydev_filename = os.path.join(self.cwd, '.pydevproject')
self.coreprefs_filename = os.path.join(self.cwd, '.settings', 'org.eclipse.jdt.core.prefs')
def generate_project(self, project):
def linked_folder_id(source_set):
return source_set.source_base.replace(os.path.sep, '.')
def base_path(source_set):
return os.path.join(source_set.root_dir, source_set.source_base)
def create_source_base_template(source_set):
source_base = base_path(source_set)
return source_base, TemplateData(
id=linked_folder_id(source_set),
path=source_base
)
source_bases = dict(map(create_source_base_template, project.sources))
if project.has_python:
source_bases.update(map(create_source_base_template, project.py_sources))
source_bases.update(map(create_source_base_template, project.py_libs))
def create_source_template(base_id, includes=None, excludes=None):
return TemplateData(
base=base_id,
includes='|'.join(OrderedSet(includes)) if includes else None,
excludes='|'.join(OrderedSet(excludes)) if excludes else None,
)
def create_sourcepath(base_id, sources):
def normalize_path_pattern(path):
return '%s/' % path if not path.endswith('/') else path
includes = [normalize_path_pattern(src_set.path) for src_set in sources if src_set.path]
excludes = []
for source_set in sources:
excludes.extend(normalize_path_pattern(exclude) for exclude in source_set.excludes)
return create_source_template(base_id, includes, excludes)
pythonpaths = []
if project.has_python:
for source_set in project.py_sources:
pythonpaths.append(create_source_template(linked_folder_id(source_set)))
for source_set in project.py_libs:
lib_path = source_set.path if source_set.path.endswith('.egg') else '%s/' % source_set.path
pythonpaths.append(create_source_template(linked_folder_id(source_set),
includes=[lib_path]))
configured_project = TemplateData(
name=self.project_name,
java=TemplateData(
jdk=self.java_jdk,
language_level=('1.%d' % self.java_language_level)
),
python=project.has_python,
scala=project.has_scala and not project.skip_scala,
source_bases=source_bases.values(),
pythonpaths=pythonpaths,
debug_port=project.debug_port,
)
outdir = os.path.abspath(os.path.join(self.gen_project_workdir, 'bin'))
safe_mkdir(outdir)
source_sets = defaultdict(OrderedSet) # base_id -> source_set
for source_set in project.sources:
source_sets[linked_folder_id(source_set)].add(source_set)
sourcepaths = [create_sourcepath(base_id, sources) for base_id, sources in source_sets.items()]
libs = list(project.internal_jars)
libs.extend(project.external_jars)
configured_classpath = TemplateData(
sourcepaths=sourcepaths,
has_tests=project.has_tests,
libs=libs,
scala=project.has_scala,
# Eclipse insists the outdir be a relative path unlike other paths
outdir=os.path.relpath(outdir, get_buildroot()),
)
def apply_template(output_path, template_relpath, **template_data):
with safe_open(output_path, 'w') as output:
Generator(pkgutil.get_data(__name__, template_relpath), **template_data).write(output)
apply_template(self.project_filename, self.project_template, project=configured_project)
apply_template(self.classpath_filename, self.classpath_template, classpath=configured_classpath)
apply_template(os.path.join(self.gen_project_workdir,
'Debug on port %d.launch' % project.debug_port),
self.debug_template, project=configured_project)
apply_template(self.coreprefs_filename, self.coreprefs_template, project=configured_project)
for resource in _SETTINGS:
with safe_open(os.path.join(self.cwd, '.settings', resource), 'w') as prefs:
prefs.write(pkgutil.get_data(__name__, os.path.join(_TEMPLATE_BASEDIR, resource)))
factorypath = TemplateData(
project_name=self.project_name,
# The easiest way to make sure eclipse sees all annotation processors is to put all libs on
# the apt factorypath - this does not seem to hurt eclipse performance in any noticeable way.
jarpaths=libs
)
apply_template(self.apt_filename, self.apt_template, factorypath=factorypath)
if project.has_python:
apply_template(self.pydev_filename, self.pydev_template, project=configured_project)
else:
safe_delete(self.pydev_filename)
print('\nGenerated project at %s%s' % (self.gen_project_workdir, os.sep))
| tejal29/pants | src/python/pants/backend/jvm/tasks/eclipse_gen.py | Python | apache-2.0 | 6,935 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.channel_v1.types import common
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.channel.v1",
manifest={
"ChannelPartnerLinkView",
"ChannelPartnerLinkState",
"ChannelPartnerLink",
},
)
class ChannelPartnerLinkView(proto.Enum):
r"""The level of granularity the
[ChannelPartnerLink][google.cloud.channel.v1.ChannelPartnerLink]
will display.
"""
UNSPECIFIED = 0
BASIC = 1
FULL = 2
class ChannelPartnerLinkState(proto.Enum):
r"""ChannelPartnerLinkState represents state of a channel partner
link.
"""
CHANNEL_PARTNER_LINK_STATE_UNSPECIFIED = 0
INVITED = 1
ACTIVE = 2
REVOKED = 3
SUSPENDED = 4
class ChannelPartnerLink(proto.Message):
r"""Entity representing a link between distributors and their
indirect resellers in an n-tier resale channel.
Attributes:
name (str):
Output only. Resource name for the channel partner link, in
the format accounts/{account_id}/channelPartnerLinks/{id}.
reseller_cloud_identity_id (str):
Required. Cloud Identity ID of the linked
reseller.
link_state (google.cloud.channel_v1.types.ChannelPartnerLinkState):
Required. State of the channel partner link.
invite_link_uri (str):
Output only. URI of the web page where
partner accepts the link invitation.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp of when the channel
partner link is created.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp of when the channel
partner link is updated.
public_id (str):
Output only. Public identifier that a
customer must use to generate a transfer token
to move to this distributor-reseller
combination.
channel_partner_cloud_identity_info (google.cloud.channel_v1.types.CloudIdentityInfo):
Output only. Cloud Identity info of the
channel partner (IR).
"""
name = proto.Field(proto.STRING, number=1,)
reseller_cloud_identity_id = proto.Field(proto.STRING, number=2,)
link_state = proto.Field(proto.ENUM, number=3, enum="ChannelPartnerLinkState",)
invite_link_uri = proto.Field(proto.STRING, number=4,)
create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,)
update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,)
public_id = proto.Field(proto.STRING, number=7,)
channel_partner_cloud_identity_info = proto.Field(
proto.MESSAGE, number=8, message=common.CloudIdentityInfo,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| googleapis/python-channel | google/cloud/channel_v1/types/channel_partner_links.py | Python | apache-2.0 | 3,498 |
import re
class Solution:
def isPalindrome(self, s: str) -> bool:
ret = re.findall(r'[\da-zA-Z]+', s)
final = ''.join(ret).lower()
return final == final[::-1]
| fy0/my-leetcode | 125. Valid Palindrome/main.py | Python | apache-2.0 | 188 |
# -*- coding: utf-8 -*-
class AutocompleteMeta:
"""
Simple meta class to allow the model to define aspects of the autocomplete.
:var name: used for the named url
:var path: the path to autocomplete view
:var follow_fks: when searching should ForeignKey fields be followed.
:var fields: list of fields, if empty then all searchable fields are used
:var permissions: bool, string or iter
* if ``permissions`` ``False`` (default) no authentication is checked.
* if ``permissions`` ``True`` then request.user must be authenticated.
* if ``permissions`` ``string`` then request.user must have the permission defined by ``string``.
* if ``permissions`` ``iter`` then request.user must have all the permissionis defined in the ``iter``
See :class:`django_autocomplete.views.AutocompleteView` for more clarification.
For example as a simple object:
>>> from django_autocomplete.meta import AutocompleteMeta
>>> class TestModel(object):
... autocomplete = AutocompleteMeta(
... name='silly',
... path='api/filter/silly',
... )
The model autocomplete configures the model for use:
>>> m = TestModel()
>>> m.autocomplete
<django_autocomplete.meta.AutocompleteMeta object at 0x...>
>>> m.autocomplete.path
'api/filter/silly'
>>> m.autocomplete.name
'silly'
>>> m.autocomplete.follow_fks
True
>>> m.autocomplete.fields
[]
"""
name = ''
path = ''
fields = []
permissions = None
follow_fks = True
def __init__(self, autocomplete=None, **kwargs):
if autocomplete:
autocomplete_attrs = autocomplete.__dict__
else:
autocomplete_attrs = kwargs
for attr in self.__class__.__dict__:
if attr in autocomplete_attrs:
self.__dict__[attr] = autocomplete_attrs[attr]
| darrylcousins/django-autocomplete | django_autocomplete/meta.py | Python | apache-2.0 | 1,979 |
# coding=utf-8
# Copyright 2020 The PI-SAC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Project inputs to a tanh-squashed MultivariateNormalDiag distribution."""
import gin
import tensorflow as tf
import tensorflow_probability as tfp
from tf_agents.distributions import utils as distribution_utils
from tf_agents.networks import network
from tf_agents.networks import utils as network_utils
from tf_agents.specs import distribution_spec
from tf_agents.specs import tensor_spec
@gin.configurable
class TanhNormalProjectionNetwork(network.DistributionNetwork):
"""Generates a tanh-squashed MultivariateNormalDiag distribution."""
def __init__(self,
sample_spec,
activation_fn=None,
kernel_initializer=None,
std_transform=tf.exp,
min_std=None,
max_std=None,
name='TanhNormalProjectionNetwork'):
"""Creates an instance of TanhNormalProjectionNetwork.
Args:
sample_spec: A `tensor_spec.BoundedTensorSpec` detailing the shape and
dtypes of samples pulled from the output distribution.
activation_fn: Activation function to use in dense layer.
kernel_initializer: Initializer to use for the kernels of the conv and
dense layers. If none is provided a default glorot_uniform
std_transform: Transformation function to apply to the stddevs.
min_std: Minimum std.
max_std: Maximum std.
name: A string representing name of the network.
"""
if len(tf.nest.flatten(sample_spec)) != 1:
raise ValueError('Tanh Normal Projection network only supports single'
' spec samples.')
output_spec = self._output_distribution_spec(sample_spec, name)
super(TanhNormalProjectionNetwork, self).__init__(
# We don't need these, but base class requires them.
input_tensor_spec=None,
state_spec=(),
output_spec=output_spec,
name=name)
self._sample_spec = sample_spec
self._std_transform = std_transform
self._min_std = min_std
self._max_std = max_std
if kernel_initializer is None:
kernel_initializer = 'glorot_uniform'
self._projection_layer = tf.keras.layers.Dense(
sample_spec.shape.num_elements() * 2,
activation=activation_fn,
kernel_initializer=kernel_initializer,
name='projection_layer')
def _output_distribution_spec(self, sample_spec, network_name):
input_param_shapes = {
'loc': sample_spec.shape,
'scale_diag': sample_spec.shape
}
input_param_spec = { # pylint: disable=g-complex-comprehension
name: tensor_spec.TensorSpec(
shape=shape,
dtype=sample_spec.dtype,
name=network_name + '_' + name)
for name, shape in input_param_shapes.items()
}
def distribution_builder(*args, **kwargs):
distribution = tfp.distributions.MultivariateNormalDiag(*args, **kwargs)
return distribution_utils.scale_distribution_to_spec(
distribution, sample_spec)
return distribution_spec.DistributionSpec(
distribution_builder, input_param_spec, sample_spec=sample_spec)
def call(self, inputs, outer_rank, training=False, mask=None):
if inputs.dtype != self._sample_spec.dtype:
raise ValueError('Inputs to TanhNormalProjectionNetwork must match the '
'sample_spec.dtype.')
if mask is not None:
raise NotImplementedError(
'TanhNormalProjectionNetwork does not yet implement action masking; '
'got mask={}'.format(mask))
# outer_rank is needed because the projection is not done on the raw
# observations so getting the outer rank is hard as there is no spec to
# compare to.
batch_squash = network_utils.BatchSquash(outer_rank)
inputs = batch_squash.flatten(inputs)
means_and_stds = self._projection_layer(inputs, training=training)
means, stds = tf.split(means_and_stds, num_or_size_splits=2, axis=-1)
means = tf.reshape(means, [-1] + self._sample_spec.shape.as_list())
means = tf.cast(means, self._sample_spec.dtype)
if self._std_transform is not None:
stds = self._std_transform(stds)
if self._min_std is not None:
stds = tf.maximum(stds, self._min_std)
if self._max_std is not None:
stds = tf.minimum(stds, self._max_std)
stds = tf.cast(stds, self._sample_spec.dtype)
means = batch_squash.unflatten(means)
stds = batch_squash.unflatten(stds)
return self.output_spec.build_distribution(loc=means, scale_diag=stds), ()
| google-research/pisac | pisac/tanh_normal_projection_network.py | Python | apache-2.0 | 5,102 |
"""Model for an access log."""
import functools
import logging
import numpy as np
from django.conf import settings
from django.db import models
from django.utils import timezone
logger = logging.getLogger(__name__)
class AccessLogMixin(models.Model):
"""Base class which logs access of information."""
# The user which accessed the data.
user = models.ForeignKey(settings.AUTH_USER_MODEL,
db_index=True,
on_delete=models.CASCADE)
# Timestamp of the access.
timestamp = models.DateTimeField(db_index=True)
class Meta:
abstract = True
index_together = (('user', 'timestamp'), )
def __init__(self, *args, **kwargs):
super(AccessLogMixin, self).__init__(*args, **kwargs)
if self.timestamp is None:
self.timestamp = timezone.now()
@classmethod
def by_user(cls, user, start_time=None, end_time=None):
"""Gets the time-sorted list of access log for the given user.
Args:
user: The user to get the access log for.
start_time: Optional. Inclusive start time.
end_time: Optional. Exclusive end time.
Returns:
A list of access log objects for the given user sorted by timestamp.
"""
query = cls.objects.filter(user_id=user.pk)
if start_time:
query = query.filter(timestamp__gte=start_time)
if end_time:
query = query.filter(timestamp__lt=end_time)
return query.order_by('timestamp')
@classmethod
def last_for_user(cls, user, start_time=None, end_time=None):
"""Gets the last access log for the user.
Args:
user: The user to get the access log for.
start_time: Optional. Inclusive start time.
end_time: Optional. Exclusive end time.
Returns:
The last access log for the user.
"""
return cls.by_user(user, start_time, end_time).last()
@classmethod
def by_time_period(cls, user, time_periods):
"""Gets a list of time-sorted lists of access logs for each time period.
The method returns the full sets of AccessLogMixins for each TimePeriod. If
overlapping TimePeriods are provided, the results may contain duplicate
logs.
Args:
user: The user to get the access log for.
time_periods: A list of TimePeriod objects.
Returns:
A list of AccessLogMixin lists, where each AccessLogMixin list contains all
AccessLogMixins corresponding to the related TimePeriod.
"""
return [cls.by_user(user, p.start, p.end) for p in time_periods]
@classmethod
def rates(cls, user, time_periods, time_period_logs=None):
"""Gets the access log rates.
Args:
user: The user to get the access log rates for.
time_periods: A list of TimePeriod objects. Note: to avoid
computing rates with duplicate logs, ensure that all
time periods are non-overlapping.
time_period_logs: Optional. A sequence of AccessLogMixin sequences,
where each AccessLogMixin sequence contains all AccessLogMixins
corresponding to the related TimePeriod. If None, will obtain
by calling by_time_period().
Returns:
A (max, avg) tuple. The max is the max time between logs, and avg
is the avg time between logs.
"""
# Check that time periods were provided.
if not time_periods:
return (None, None)
# Check that all time periods are closed.
for time_period in time_periods:
if time_period.duration() is None:
return (None, None)
# If logs were not provided, obtain.
if not time_period_logs:
time_period_logs = cls.by_time_period(user, time_periods)
# Utility generator for time durations.
def time_between_logs(time_periods, time_period_logs):
for ix, period in enumerate(time_periods):
prev_time = period.start
for log in time_period_logs[ix]:
yield (log.timestamp - prev_time).total_seconds()
prev_time = log.timestamp
yield (period.end - prev_time).total_seconds()
# Calculate max, sum, count for time durations.
(m, s, c) = functools.reduce(
lambda r, d: (max(r[0], d), r[1] + d, r[2] + 1),
time_between_logs(time_periods, time_period_logs), (0.0, 0.0, 0))
# Convert to max and average.
return (m, s / c)
| auvsi-suas/interop | server/auvsi_suas/models/access_log.py | Python | apache-2.0 | 4,701 |
import numpy as np
from ray.rllib.models.modelv2 import ModelV2
from ray.rllib.models.tf.misc import normc_initializer
from ray.rllib.models.tf.tf_modelv2 import TFModelV2
from ray.rllib.models.torch.misc import SlimFC, normc_initializer as \
torch_normc_initializer
from ray.rllib.models.torch.torch_modelv2 import TorchModelV2
from ray.rllib.utils.annotations import override
from ray.rllib.utils.framework import try_import_tf, try_import_torch
tf1, tf, tfv = try_import_tf()
torch, nn = try_import_torch()
class BatchNormModel(TFModelV2):
"""Example of a TFModelV2 that is built w/o using tf.keras.
NOTE: This example does not work when using a keras-based TFModelV2 due
to a bug in keras related to missing values for input placeholders, even
though these input values have been provided in a forward pass through the
actual keras Model.
All Model logic (layers) is defined in the `forward` method (incl.
the batch_normalization layers). Also, all variables are registered
(only once) at the end of `forward`, so an optimizer knows which tensors
to train on. A standard `value_function` override is used.
"""
capture_index = 0
def __init__(self, obs_space, action_space, num_outputs, model_config,
name):
super().__init__(obs_space, action_space, num_outputs, model_config,
name)
# Have we registered our vars yet (see `forward`)?
self._registered = False
@override(ModelV2)
def forward(self, input_dict, state, seq_lens):
last_layer = input_dict["obs"]
hiddens = [256, 256]
with tf1.variable_scope("model", reuse=tf1.AUTO_REUSE):
for i, size in enumerate(hiddens):
last_layer = tf1.layers.dense(
last_layer,
size,
kernel_initializer=normc_initializer(1.0),
activation=tf.nn.tanh,
name="fc{}".format(i))
# Add a batch norm layer
last_layer = tf1.layers.batch_normalization(
last_layer,
training=input_dict["is_training"],
name="bn_{}".format(i))
output = tf1.layers.dense(
last_layer,
self.num_outputs,
kernel_initializer=normc_initializer(0.01),
activation=None,
name="out")
self._value_out = tf1.layers.dense(
last_layer,
1,
kernel_initializer=normc_initializer(1.0),
activation=None,
name="vf")
if not self._registered:
self.register_variables(
tf1.get_collection(
tf1.GraphKeys.TRAINABLE_VARIABLES, scope=".+/model/.+"))
self._registered = True
return output, []
@override(ModelV2)
def value_function(self):
return tf.reshape(self._value_out, [-1])
class KerasBatchNormModel(TFModelV2):
"""Keras version of above BatchNormModel with exactly the same structure.
IMORTANT NOTE: This model will not work with PPO due to a bug in keras
that surfaces when having more than one input placeholder (here: `inputs`
and `is_training`) AND using the `make_tf_callable` helper (e.g. used by
PPO), in which auto-placeholders are generated, then passed through the
tf.keras. models.Model. In this last step, the connection between 1) the
provided value in the auto-placeholder and 2) the keras `is_training`
Input is broken and keras complains.
Use the above `BatchNormModel` (a non-keras based TFModelV2), instead.
"""
def __init__(self, obs_space, action_space, num_outputs, model_config,
name):
super().__init__(obs_space, action_space, num_outputs, model_config,
name)
inputs = tf.keras.layers.Input(shape=obs_space.shape, name="inputs")
is_training = tf.keras.layers.Input(
shape=(), dtype=tf.bool, batch_size=1, name="is_training")
last_layer = inputs
hiddens = [256, 256]
for i, size in enumerate(hiddens):
label = "fc{}".format(i)
last_layer = tf.keras.layers.Dense(
units=size,
kernel_initializer=normc_initializer(1.0),
activation=tf.nn.tanh,
name=label)(last_layer)
# Add a batch norm layer
last_layer = tf.keras.layers.BatchNormalization()(
last_layer, training=is_training[0])
output = tf.keras.layers.Dense(
units=self.num_outputs,
kernel_initializer=normc_initializer(0.01),
activation=None,
name="fc_out")(last_layer)
value_out = tf.keras.layers.Dense(
units=1,
kernel_initializer=normc_initializer(0.01),
activation=None,
name="value_out")(last_layer)
self.base_model = tf.keras.models.Model(
inputs=[inputs, is_training], outputs=[output, value_out])
self.register_variables(self.base_model.variables)
@override(ModelV2)
def forward(self, input_dict, state, seq_lens):
out, self._value_out = self.base_model(
[input_dict["obs"], input_dict["is_training"]])
return out, []
@override(ModelV2)
def value_function(self):
return tf.reshape(self._value_out, [-1])
class TorchBatchNormModel(TorchModelV2, nn.Module):
"""Example of a TorchModelV2 using batch normalization."""
capture_index = 0
def __init__(self, obs_space, action_space, num_outputs, model_config,
name, **kwargs):
TorchModelV2.__init__(self, obs_space, action_space, num_outputs,
model_config, name)
nn.Module.__init__(self)
layers = []
prev_layer_size = int(np.product(obs_space.shape))
self._logits = None
# Create layers 0 to second-last.
for size in [256, 256]:
layers.append(
SlimFC(
in_size=prev_layer_size,
out_size=size,
initializer=torch_normc_initializer(1.0),
activation_fn=nn.ReLU))
prev_layer_size = size
# Add a batch norm layer.
layers.append(nn.BatchNorm1d(prev_layer_size))
self._logits = SlimFC(
in_size=prev_layer_size,
out_size=self.num_outputs,
initializer=torch_normc_initializer(0.01),
activation_fn=None)
self._value_branch = SlimFC(
in_size=prev_layer_size,
out_size=1,
initializer=torch_normc_initializer(1.0),
activation_fn=None)
self._hidden_layers = nn.Sequential(*layers)
self._hidden_out = None
@override(ModelV2)
def forward(self, input_dict, state, seq_lens):
# Set the correct train-mode for our hidden module (only important
# b/c we have some batch-norm layers).
self._hidden_layers.train(mode=input_dict.get("is_training", False))
self._hidden_out = self._hidden_layers(input_dict["obs"])
logits = self._logits(self._hidden_out)
return logits, []
@override(ModelV2)
def value_function(self):
assert self._hidden_out is not None, "must call forward first!"
return torch.reshape(self._value_branch(self._hidden_out), [-1])
| richardliaw/ray | rllib/examples/models/batch_norm_model.py | Python | apache-2.0 | 7,538 |
# -*- coding: utf-8 -*-
# Copyright 2015-2019 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
import errno
import json
import logging
import os
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
logger = logging.getLogger(__name__)
class DashboardExporter(object):
def process_dashboard(self, project_name, dashboard_name, dashboard_data):
pass
class ProjectProcessor(object):
def __init__(self, dashboard_processors):
"""
:type dashboard_processors: list[grafana_dashboards.builder.DashboardExporter]
"""
super(ProjectProcessor, self).__init__()
self._dashboard_processors = dashboard_processors
def process_projects(self, projects, parent_context=None):
"""
:type projects: list[grafana_dashboards.components.projects.Project]
:type parent_context: dict
"""
for project in projects:
logger.info("Processing project '%s'", project.name)
for context in project.get_contexts(parent_context):
for dashboard in project.get_dashboards():
json_obj = dashboard.gen_json(context)
dashboard_name = context.expand_placeholders(dashboard.name)
for processor in self._dashboard_processors:
processor.process_dashboard(project.name, dashboard_name, json_obj)
class FileExporter(DashboardExporter):
def __init__(self, output_folder):
super(FileExporter, self).__init__()
self._output_folder = output_folder
if not os.path.exists(self._output_folder):
os.makedirs(self._output_folder)
if not os.path.isdir(self._output_folder):
raise Exception("'{0}' must be a directory".format(self._output_folder))
def process_dashboard(self, project_name, dashboard_name, dashboard_data):
super(FileExporter, self).process_dashboard(project_name, dashboard_name, dashboard_data)
dirname = os.path.join(self._output_folder, project_name)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno != errno.EEXIST:
raise
dashboard_path = os.path.join(dirname, dashboard_name + '.json')
logger.info("Saving dashboard '%s' to '%s'", dashboard_name, os.path.abspath(dashboard_path))
with open(dashboard_path, 'w') as f:
json.dump(dashboard_data, f, sort_keys=True, indent=2, separators=(',', ': '))
| jakubplichta/grafana-dashboard-builder | grafana_dashboards/exporter.py | Python | apache-2.0 | 3,057 |
#!/usr/bin/python
import os
import sys
extra_opts = {'test_suite': 'tests'}
extra_deps = []
extra_test_deps = []
if sys.version_info[:2] == (2, 6):
extra_deps.append('argparse')
extra_deps.append('simplejson')
extra_test_deps.append('unittest2')
extra_opts['test_suite'] = 'unittest2.collector'
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
try:
with open('README.rst', 'r') as fd:
extra_opts['long_description'] = fd.read()
except IOError:
pass # Install without README.rst
setup(
name='mongo-orchestration',
version='0.4.dev0',
author='MongoDB, Inc.',
author_email='mongodb-user@googlegroups.com',
description='Restful service for managing MongoDB servers',
keywords=['mongo-orchestration', 'mongodb', 'mongo', 'rest', 'testing'],
license="http://www.apache.org/licenses/LICENSE-2.0.html",
platforms=['any'],
url='https://github.com/10gen/mongo-orchestration',
install_requires=['pymongo>=3.0.2',
'bottle>=0.12.7',
'CherryPy>=3.5.0'] + extra_deps,
tests_require=['coverage>=3.5'] + extra_test_deps,
packages=find_packages(exclude=('tests',)),
package_data={
'mongo_orchestration': [
os.path.join('configurations', config_dir, '*.json')
for config_dir in ('servers', 'replica_sets', 'sharded_clusters')
] + [os.path.join('lib', 'client.pem')]
},
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython"
],
entry_points={
'console_scripts': [
'mongo-orchestration = mongo_orchestration.server:main'
]
},
**extra_opts
)
| agilemobiledev/mongo-orchestration | setup.py | Python | apache-2.0 | 2,438 |
import decimal
import json
from datetime import date, datetime, time
import dateutil.parser
from django.conf import settings
from django.db.models import Model
from versions.models import Versionable
DEFAULTS = {
'user_mail_required': {
'default': 'False',
'type': bool
},
'max_items_per_order': {
'default': '10',
'type': int
},
'attendee_names_asked': {
'default': 'True',
'type': bool
},
'attendee_names_required': {
'default': 'False',
'type': bool
},
'reservation_time': {
'default': '30',
'type': int
},
'payment_term_days': {
'default': '14',
'type': int
},
'payment_term_last': {
'default': None,
'type': datetime,
},
'payment_term_accept_late': {
'default': 'True',
'type': bool
},
'presale_start_show_date': {
'default': 'True',
'type': bool
},
'show_items_outside_presale_period': {
'default': 'True',
'type': bool
},
'timezone': {
'default': settings.TIME_ZONE,
'type': str
},
'locales': {
'default': json.dumps([settings.LANGUAGE_CODE]),
'type': list
},
'locale': {
'default': settings.LANGUAGE_CODE,
'type': str
},
'show_date_to': {
'default': 'True',
'type': bool
},
'show_times': {
'default': 'True',
'type': bool
},
'ticket_download': {
'default': 'False',
'type': bool
},
'ticket_download_date': {
'default': None,
'type': datetime
},
'last_order_modification_date': {
'default': None,
'type': datetime
},
'contact_mail': {
'default': None,
'type': str
},
'imprint_url': {
'default': None,
'type': str
},
'mail_prefix': {
'default': None,
'type': str
},
'mail_from': {
'default': settings.MAIL_FROM,
'type': str
}
}
class SettingsProxy:
"""
This objects allows convenient access to settings stored in the
EventSettings/OrganizerSettings database model. It exposes all settings as
properties and it will do all the nasty inheritance and defaults stuff for
you. It will return None for non-existing properties.
"""
def __init__(self, obj, parent=None, type=None):
self._obj = obj
self._parent = parent
self._cached_obj = None
self._type = type
def _cache(self):
if self._cached_obj is None:
self._cached_obj = {}
for setting in self._obj.setting_objects.current.all():
self._cached_obj[setting.key] = setting
return self._cached_obj
def _flush(self):
self._cached_obj = None
def _unserialize(self, value, as_type):
if as_type is not None and isinstance(value, as_type):
return value
elif value is None:
return None
elif as_type == int or as_type == float or as_type == decimal.Decimal:
return as_type(value)
elif as_type == dict or as_type == list:
return json.loads(value)
elif as_type == bool or value in ('True', 'False'):
return value == 'True'
elif as_type == datetime:
return dateutil.parser.parse(value)
elif as_type == date:
return dateutil.parser.parse(value).date()
elif as_type == time:
return dateutil.parser.parse(value).time()
elif as_type is not None and issubclass(as_type, Versionable):
return as_type.objects.current.get(identity=value)
elif as_type is not None and issubclass(as_type, Model):
return as_type.objects.get(pk=value)
return value
def _serialize(self, value):
if isinstance(value, str):
return value
elif isinstance(value, int) or isinstance(value, float) \
or isinstance(value, bool) or isinstance(value, decimal.Decimal):
return str(value)
elif isinstance(value, list) or isinstance(value, dict):
return json.dumps(value)
elif isinstance(value, datetime) or isinstance(value, date) or isinstance(value, time):
return value.isoformat()
elif isinstance(value, Versionable):
return value.identity
elif isinstance(value, Model):
return value.pk
raise TypeError('Unable to serialize %s into a setting.' % str(type(value)))
def get(self, key, default=None, as_type=None):
"""
Get a setting specified by key 'key'. Normally, settings are strings, but
if you put non-strings into the settings object, you can request unserialization
by specifying 'as_type'
"""
if as_type is None and key in DEFAULTS:
as_type = DEFAULTS[key]['type']
if key in self._cache():
return self._unserialize(self._cache()[key].value, as_type)
value = None
if self._parent:
value = self._parent.settings.get(key)
if value is None and key in DEFAULTS:
return self._unserialize(DEFAULTS[key]['default'], as_type)
if value is None and default is not None:
return self._unserialize(default, as_type)
return self._unserialize(value, as_type)
def __getitem__(self, key):
return self.get(key)
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
if key.startswith('_'):
return super().__setattr__(key, value)
self.set(key, value)
def __setitem__(self, key, value):
self.set(key, value)
def set(self, key, value):
if key in self._cache():
s = self._cache()[key]
s = s.clone()
else:
s = self._type(object=self._obj, key=key)
s.value = self._serialize(value)
s.save()
self._cache()[key] = s
def __delattr__(self, key):
if key.startswith('_'):
return super().__delattr__(key)
return self.__delitem__(key)
def __delitem__(self, key):
if key in self._cache():
self._cache()[key].delete()
del self._cache()[key]
class SettingsSandbox:
"""
Transparently proxied access to event settings, handling your domain-
prefixes for you.
"""
def __init__(self, type, key, event):
self._event = event
self._type = type
self._key = key
def _convert_key(self, key):
return '%s_%s_%s' % (self._type, self._key, key)
def __setitem__(self, key, value):
self.set(key, value)
def __setattr__(self, key, value):
if key.startswith('_'):
return super().__setattr__(key, value)
self.set(key, value)
def __getattr__(self, item):
return self.get(item)
def __getitem__(self, item):
return self.get(item)
def __delitem__(self, key):
del self._event.settings[self._convert_key(key)]
def __delattr__(self, key):
del self._event.settings[self._convert_key(key)]
def get(self, key, default=None, as_type=str):
return self._event.settings.get(self._convert_key(key), default=default, as_type=as_type)
def set(self, key, value):
self._event.settings.set(self._convert_key(key), value)
| lab2112/pretix | src/pretix/base/settings.py | Python | apache-2.0 | 7,483 |
# ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import math
# ============= enthought library imports =======================
from chaco.array_data_source import ArrayDataSource
from chaco.tools.broadcaster import BroadcasterTool
from chaco.tools.data_label_tool import DataLabelTool
from numpy import Inf, vstack, zeros_like, ma
from traits.api import HasTraits, Any, Int, Str, Property, \
Event, cached_property, List, Float, Instance, TraitError
from uncertainties import std_dev, nominal_value, ufloat
from pychron.core.filtering import filter_ufloats, sigma_filter
from pychron.core.helpers.formatting import floatfmt, format_percent_error, standard_sigfigsfmt
from pychron.graph.error_bar_overlay import ErrorBarOverlay
from pychron.graph.ticks import SparseLogTicks
from pychron.graph.ticks import SparseTicks
from pychron.graph.tools.analysis_inspector import AnalysisPointInspector
from pychron.graph.tools.point_inspector import PointInspectorOverlay
from pychron.graph.tools.rect_selection_tool import RectSelectionOverlay, \
RectSelectionTool
from pychron.pipeline.plot.flow_label import FlowDataLabel, FlowPlotLabel
from pychron.pipeline.plot.overlays.points_label_overlay import PointsLabelOverlay
from pychron.pipeline.plot.point_move_tool import OverlayMoveTool
from pychron.processing.analyses.analysis_group import AnalysisGroup
from pychron.pychron_constants import PLUSMINUS, format_mswd
class SelectionFigure(HasTraits):
graph = Any
def _set_selected(self, ans, sel):
for i, a in enumerate(ans):
if i in sel:
a.set_temp_status(a.otemp_status if a.otemp_status else 'omit')
else:
a.set_temp_status('ok')
def _filter_metadata_changes(self, obj, ans, func=None):
sel = obj.metadata.get('selections', [])
self._set_selected(ans, sel)
if func:
func(sel)
return sel
class BaseArArFigure(SelectionFigure):
analyses = Any
sorted_analyses = Property(depends_on='analyses')
analysis_group = Property(depends_on='analyses, _analysis_group')
_analysis_group = Instance(AnalysisGroup)
_analysis_group_klass = AnalysisGroup
group_id = Int
ytitle = Str
title = Str
xtitle = Str
replot_needed = Event
recalculate_event = Event
options = Any
refresh_unknowns_table = Event
suppress_ylimits_update = False
suppress_xlimits_update = False
xpad = None
ymas = List
ymis = List
xmi = Float
xma = Float
data_xma = 0
_has_formatting_hash = None
_reverse_sorted_analyses = False
def get_update_dict(self):
return {}
def build(self, plots, plot_dict=None):
"""
make plots
"""
graph = self.graph
vertical_resize = not all([p.height for p in plots])
graph.vertical_resize = vertical_resize
graph.clear_has_title()
title = self.title
if not title:
title = self.options.title
for i, po in enumerate(plots):
kw = {'ytitle': po.name}
if plot_dict:
kw.update(plot_dict)
if po.height:
kw['bounds'] = [50, po.height]
if i == (len(plots) - 1):
kw['title'] = title
if i == 0 and self.ytitle:
kw['ytitle'] = self.ytitle
if not po.ytitle_visible:
kw['ytitle'] = ''
if self.xtitle:
kw['xtitle'] = self.xtitle
kw['padding'] = self.options.get_paddings()
p = graph.new_plot(**kw)
if i == (len(plots) - 1):
p.title_font = self.options.title_font
# set a tag for easy identification
p.y_axis.tag = po.name
self._setup_plot(i, p, po)
def post_make(self):
self._fix_log_axes()
def post_plot(self, plots):
graph = self.graph
for (plotobj, po) in zip(graph.plots, plots):
self._apply_aux_plot_options(plotobj, po)
def plot(self, *args, **kw):
pass
def replot(self, *args, **kw):
if self.options:
self.plot(self.options.get_plotable_aux_plots())
def max_x(self, *args):
return -Inf
def min_x(self, *args):
return Inf
def mean_x(self, *args):
return 0
# private
def _fix_log_axes(self):
for i, p in enumerate(self.graph.plots):
if p.value_scale == 'log':
if p.value_mapper.range.low < 0:
ys = self.graph.get_data(plotid=i, axis=1)
m = 10 ** math.floor(math.log10(min(ys)))
p.value_mapper.range.low = m
def _setup_plot(self, i, pp, po):
# add limit tools
self.graph.add_limit_tool(pp, 'x', self._handle_xlimits)
self.graph.add_limit_tool(pp, 'y', self._handle_ylimits)
self.graph.add_axis_tool(pp, pp.x_axis)
self.graph.add_axis_tool(pp, pp.y_axis)
pp.value_range.on_trait_change(lambda: self.update_options_limits(i), 'updated')
pp.index_range.on_trait_change(lambda: self.update_options_limits(i), 'updated')
pp.value_range.tight_bounds = False
self._apply_aux_plot_options(pp, po)
def _apply_aux_plot_options(self, pp, po):
options = self.options
for k, axis in (('x', pp.x_axis), ('y', pp.y_axis)):
for attr in ('title_font', 'tick_in', 'tick_out', 'tick_label_formatter'):
value = getattr(options, '{}{}'.format(k, attr))
try:
setattr(axis, attr, value)
except TraitError:
pass
axis.tick_label_font = getattr(options, '{}tick_font'.format(k))
# pp.x_axis.title_font = options.xtitle_font
# pp.x_axis.tick_label_font = options.xtick_font
# pp.x_axis.tick_in = options.xtick_in
# pp.x_axis.tick_out = options.xtick_out
#
# pp.y_axis.title_font = options.ytitle_font
# pp.y_axis.tick_label_font = options.ytick_font
# pp.y_axis.tick_in = options.ytick_in
# pp.y_axis.tick_out = options.ytick_out
pp.bgcolor = options.plot_bgcolor
pp.x_grid.visible = options.use_xgrid
pp.y_grid.visible = options.use_ygrid
if po:
if not po.ytick_visible:
pp.y_axis.tick_visible = False
pp.y_axis.tick_label_formatter = lambda x: ''
if po.y_axis_right:
pp.y_axis.orientation = 'right'
pp.y_axis.axis_line_visible = False
pp.value_scale = po.scale
if po.scale == 'log':
if po.use_sparse_yticks:
st = SparseLogTicks(step=po.sparse_yticks_step)
pp.value_axis.tick_generator = st
pp.value_grid.tick_generator = st
else:
pp.value_axis.tick_interval = po.ytick_interval
if po.use_sparse_yticks:
st = SparseTicks(step=po.sparse_yticks_step)
pp.value_axis.tick_generator = st
pp.value_grid.tick_generator = st
def _set_options_format(self, pp):
# print 'using options format'
pass
def _set_selected(self, ans, sel):
super(BaseArArFigure, self)._set_selected(ans, sel)
self.refresh_unknowns_table = True
def _cmp_analyses(self, x):
return x.timestamp or 0
def _unpack_attr(self, attr, scalar=1, exclude_omit=False, nonsorted=False, ans=None):
if ans is None:
ans = self.sorted_analyses
if nonsorted:
ans = self.analyses
def gen():
for ai in ans:
if exclude_omit and ai.is_omitted():
continue
v = ai.get_value(attr)
if v is None:
v = ufloat(0, 0)
yield v * scalar
return gen()
def _set_y_limits(self, a, b, min_=None, max_=None, pid=0, pad=None):
mi, ma = self.graph.get_y_limits(plotid=pid)
mi = min_ if min_ is not None else min(mi, a)
ma = max_ if max_ is not None else max(ma, b)
self.graph.set_y_limits(min_=mi, max_=ma, pad=pad, plotid=pid, pad_style='upper')
def update_options_limits(self, pid):
if not self.suppress_xlimits_update:
if hasattr(self.options, 'aux_plots'):
# n = len(self.options.aux_plots)
xlimits = self.graph.get_x_limits(pid)
for ap in self.options.aux_plots:
ap.xlimits = xlimits
if not self.suppress_ylimits_update:
if hasattr(self.options, 'aux_plots'):
# n = len(self.options.aux_plots)
ylimits = self.graph.get_y_limits(pid)
for i, ap in enumerate(self.options.get_plotable_aux_plots()):
if i == pid:
ap.ylimits = ylimits
break
# for ap in self.options.aux_plots:
# ap.ylimits = ylimits
# ap = self.options.aux_plots[n - pid - 1]
# if not self.suppress_ylimits_update:
# ap.ylimits = self.graph.get_y_limits(pid)
# if not self.suppress_xlimits_update:
# ap.xlimits = self.graph.get_x_limits(pid)
# print('asdfpasdf', id(self.options), id(ap), ap.xlimits)
def get_valid_xbounds(self):
pass
# ===========================================================================
# aux plots
# ===========================================================================
def _do_aux_plot_filtering(self, scatter, po, vs, es):
omits, invalids, outliers = [], [], []
if po.filter_str:
omits, invalids, outliers = self._get_aux_plot_filtered(po, vs, es)
for idx, item in enumerate(self.sorted_analyses):
if idx in omits:
s = 'omit'
elif idx in invalids:
s = 'invalid'
elif idx in outliers:
s = 'outlier'
else:
s = 'ok'
item.set_temp_status(s)
return omits, invalids, outliers
def _get_aux_plot_filtered(self, po, vs, es=None):
omits = []
invalids = []
outliers = []
fs = po.filter_str
nsigma = po.sigma_filter_n
if fs or nsigma:
if es is None:
es = zeros_like(vs)
ufs = vstack((vs, es)).T
filter_str_idx = None
if fs:
filter_str_idx = filter_ufloats(ufs, fs)
ftag = po.filter_str_tag.lower()
if ftag == 'invalid':
invalids.extend(filter_str_idx)
elif ftag == 'outlier':
outliers.extend(filter_str_idx)
else:
omits.extend(filter_str_idx)
if nsigma:
vs = ma.array(vs, mask=False)
if filter_str_idx is not None:
vs.mask[filter_str_idx] = True
sigma_idx = sigma_filter(vs, nsigma)
stag = po.sigma_filter_tag.lower()
if stag == 'invalid':
invalids.extend(sigma_idx)
elif stag == 'outlier':
outliers.extend(sigma_idx)
else:
omits.extend(sigma_idx)
return omits, invalids, outliers
def _plot_raw_40_36(self, po, pid):
k = 'uAr40/Ar36'
return self._plot_aux('noncor. <sup>40</sup>Ar/<sup>36</sup>Ar', k, po, pid)
def _plot_ic_40_36(self, po, pobj, pid):
k = 'Ar40/Ar36'
return self._plot_aux('<sup>40</sup>Ar/<sup>36</sup>Ar', k, po, pid)
def _plot_icf_40_36(self, po, pobj, pid):
k = 'icf_40_36'
return self._plot_aux('ifc <sup>40</sup>Ar/<sup>36</sup>Ar', k, po, pid)
def _plot_radiogenic_yield(self, po, pobj, pid):
k = 'radiogenic_yield'
return self._plot_aux('%<sup>40</sup>Ar*', k, po, pid)
def _plot_kcl(self, po, pobj, pid):
k = 'kcl'
return self._plot_aux('K/Cl', k, po, pid)
def _plot_kca(self, po, pobj, pid):
k = 'kca'
return self._plot_aux('K/Ca', k, po, pid)
def _plot_signal_k39(self, po, pobj, pid):
k = 'k39'
return self._plot_aux('<sup>39</sup>Ar<sub>K</sub>(fA)', k, po, pid)
def _plot_moles_k39(self, po, pobj, pid):
k = 'moles_k39'
return self._plot_aux('<sup>39</sup>Ar<sub>K</sub>(mol)', k, po, pid)
def _plot_moles_ar40(self, po, pobj, pid):
k = 'Ar40'
return self._plot_aux('<sup>40</sup>Ar<sub>tot</sub>(fA)', k, po, pid)
def _plot_moles_ar36(self, po, pobj, pid):
k = 'Ar36'
return self._plot_aux('<sup>36</sup>Ar<sub>tot</sub>(fA)', k, po, pid)
def _plot_extract_value(self, po, pobj, pid):
k = 'extract_value'
return self._plot_aux('Extract Value', k, po, pid)
def _get_aux_plot_data(self, k, scalar=1):
vs = list(self._unpack_attr(k, scalar=scalar))
return [nominal_value(vi) for vi in vs], [std_dev(vi) for vi in vs]
def _handle_ylimits(self):
pass
def _handle_xlimits(self):
pass
def _add_point_labels(self, scatter, ans=None):
labels = []
f = self.options.analysis_label_format
if not f:
f = '{aliquot:02d}{step:}'
if ans is None:
ans = self.sorted_analyses
labels = [f.format(aliquot=si.aliquot, step=si.step, sample=si.sample, name=si.name,
label_name=si.label_name, runid=si.record_id) for si in ans]
font = self.options.label_font
ov = PointsLabelOverlay(component=scatter,
labels=labels,
label_box=self.options.label_box,
font=font)
scatter.underlays.append(ov)
def _add_error_bars(self, scatter, errors, axis, nsigma,
end_caps=True,
visible=True):
ebo = ErrorBarOverlay(component=scatter,
orientation=axis,
nsigma=nsigma,
visible=visible,
use_end_caps=end_caps)
scatter.underlays.append(ebo)
setattr(scatter, '{}error'.format(axis), ArrayDataSource(errors))
return ebo
def _add_scatter_inspector(self,
scatter,
inspector=None,
add_tool=True,
add_selection=True,
value_format=None,
additional_info=None,
index_tag=None,
index_attr=None,
convert_index=None,
items=None,
update_meta_func=None):
if add_tool:
broadcaster = BroadcasterTool()
scatter.tools.append(broadcaster)
if add_selection:
rect_tool = RectSelectionTool(scatter)
rect_overlay = RectSelectionOverlay(component=scatter,
tool=rect_tool)
scatter.overlays.append(rect_overlay)
broadcaster.tools.append(rect_tool)
if inspector is None:
if value_format is None:
def value_format(x):
return '{:0.5f}'.format(x)
if convert_index is None:
def convert_index(x):
return '{:0.3f}'.format(x)
if items is None:
items = self.sorted_analyses
inspector = AnalysisPointInspector(scatter,
use_pane=False,
analyses=items,
convert_index=convert_index,
index_tag=index_tag,
index_attr=index_attr,
value_format=value_format,
additional_info=additional_info)
pinspector_overlay = PointInspectorOverlay(component=scatter,
tool=inspector)
scatter.overlays.append(pinspector_overlay)
broadcaster.tools.append(inspector)
else:
if not isinstance(inspector, (list, tuple)):
inspector = (inspector,)
for i in inspector:
broadcaster.tools.append(i)
# # pinspector_overlay = PointInspectorOverlay(component=scatter,
# # tool=point_inspector)
# # print 'fff', inspector
#
# event_queue = {}
# for i in inspector:
# i.event_queue = event_queue
# i.on_trait_change(self._handle_inspection, 'inspector_item')
# # scatter.overlays.append(pinspector_overlay)
# broadcaster.tools.append(i)
if update_meta_func is None:
update_meta_func = self.update_graph_metadata
# u = lambda a, b, c, d: self.update_graph_metadata(a, b, c, d)
scatter.index.on_trait_change(update_meta_func, 'metadata_changed')
def update_graph_metadata(self, obj, name, old, new):
pass
# ===============================================================================
# labels
# ===============================================================================
def _add_info_label(self, plot, text_lines, font=None):
if font is None:
font = self.options.error_info_font
ov = FlowPlotLabel(text='\n'.join(text_lines),
overlay_position='inside top',
hjustify='left',
bgcolor=plot.bgcolor,
font=font,
component=plot)
plot.overlays.append(ov)
plot.tools.append(OverlayMoveTool(component=ov))
def _add_data_label(self, s, text, point, bgcolor='transparent',
label_position='top right', color=None, append=True, **kw):
if color is None:
color = s.color
label = FlowDataLabel(component=s, data_point=point,
label_position=label_position,
label_text=text,
border_visible=False,
bgcolor=bgcolor,
show_label_coords=False,
marker_visible=False,
text_color=color,
# setting the arrow to visible causes an error when reading with illustrator
# if the arrow is not drawn
arrow_visible=False,
**kw)
s.overlays.append(label)
tool = DataLabelTool(label)
if append:
label.tools.append(tool)
else:
label.tools.insert(0, tool)
label.on_trait_change(self._handle_overlay_move, 'label_position')
return label
def _build_label_text(self, x, we, n,
total_n=None,
mswd_args=None,
display_n=True,
display_mswd=True,
display_mswd_pvalue=False,
percent_error=False,
sig_figs=3,
mswd_sig_figs=3):
display_mswd = n >= 2 and display_mswd
if display_n:
if total_n and n != total_n:
n = 'n= {}/{}'.format(n, total_n)
else:
n = 'n= {}'.format(n)
else:
n = ''
if mswd_args and display_mswd:
mswd, valid_mswd, _, pvalue = mswd_args
mswd = format_mswd(mswd, valid_mswd, n=mswd_sig_figs, include_tag=True)
if display_mswd_pvalue:
mswd = '{} pvalue={:0.2f}'.format(mswd, pvalue)
else:
mswd = ''
if sig_figs == 'Std':
sx, swe = standard_sigfigsfmt(x, we)
else:
sx = floatfmt(x, sig_figs)
swe = floatfmt(we, sig_figs)
if self.options.index_attr in ('uF', 'Ar40/Ar36'):
me = u'{} {}{}'.format(sx, PLUSMINUS, swe)
else:
age_units = self._get_age_units()
pe = ''
if percent_error:
pe = '({})'.format(format_percent_error(x, we, include_percent_sign=True))
me = u'{} {}{}{} {}'.format(sx, PLUSMINUS, swe, pe, age_units)
return u'{} {} {}'.format(me, mswd, n)
def _get_age_units(self):
a = 'Ma'
if self.analyses:
a = self.analyses[0].arar_constants.age_units
return a
def _set_renderer_selection(self, rs, sel):
meta = {'selections': sel}
for rend in rs:
rend.index.trait_set(metadata=meta)
def _handle_label_move(self, obj, name, old, new):
axps = [a for a in self.options.aux_plots if a.plot_enabled][::-1]
for i, p in enumerate(self.graph.plots):
if next((pp for pp in p.plots.values()
if obj.component == pp[0]), None):
axp = axps[i]
if hasattr(new, '__iter__'):
new = [float(ni) for ni in new]
else:
new = float(new)
axp.set_overlay_position(obj.id, new)
def _handle_overlay_move(self, obj, name, old, new):
axps = [a for a in self.options.aux_plots if a.plot_enabled][::-1]
for i, p in enumerate(self.graph.plots):
if next((pp for pp in p.plots.values()
if obj.component == pp[0]), None):
axp = axps[i]
if hasattr(new, '__iter__'):
new = [float(ni) for ni in new]
else:
new = float(new)
axp.set_overlay_position(obj.id, new)
break
def _analysis_group_hook(self, ag):
pass
# ===============================================================================
# property get/set
# ===============================================================================
@cached_property
def _get_sorted_analyses(self):
return sorted(self.analyses,
key=self._cmp_analyses,
reverse=self._reverse_sorted_analyses)
@cached_property
def _get_analysis_group(self):
ag = self._analysis_group
if ag is None:
ag = self._analysis_group_klass(group_id=self.group_id,
analyses=self.sorted_analyses,
omit_by_tag=self.options.omit_by_tag)
self._analysis_group_hook(ag)
return ag
def _set_analysis_group(self, v):
self._analysis_group = v
# ============= EOF =============================================
| UManPychron/pychron | pychron/pipeline/plot/plotter/arar_figure.py | Python | apache-2.0 | 24,577 |
# Copyright 2016 Matthew Egan Odendahl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module exports a set of expression replacement functions.
`In` substitutes for generator expressions (thus comprehensions also).
`generator` substitutes for `yield from` and `yield` in cases where
it would be incompatible with the statement module.
`Elif/Else`substitutes for nested `if`/`else`. (The expression form of
`if` lacks `elif`.)
Operator functions are already available in Python's included
`operator` module, so they are not provided here.
`entuple`, `enlist`, `enset`, and `edict` subsitute for tuple, list,
set, and dictionary displays. `efset` makes a frozenset, and `ordict`
makes an OrderedDict.
Unlike statements, expressions already work in lambdas and eval,
so why replace them too?
Besides being easier to use with higher-order functions, the stack
and s-expression modules work primarily with function calls, so these
substitutes have uses in metaprogramming. In many cases you can use
expressions directly anyway, or convert a non-call expression to a
call with a lambda, but sometimes you need to manipulate the code of
the expression itself, in which case it must be made of calls to
begin with.
The simple case of addition illustrates the three styles.
>>> from core import identity
>>> from s_expression import S
>>> from operator import add
When used directly it's like a constant as far as S is concerned.
>>> S(identity,1+2)()
3
Wrap in lambda and you can change the arguments
>>> S(lambda x,y:x+y,1,2)()
3
function call version is more natural for s-expressions
>>> S(add,1,2)()
3
A more advanced case with generator expressions.
>>> from expression import entuple; from macro import L1
Direct use acts like a constant
>>> S(identity,[(x,y) for x in (1,2) for y in 'abc'])()
[(1, 'a'), (1, 'b'), (1, 'c'), (2, 'a'), (2, 'b'), (2, 'c')]
lambda version is adjustable with arguments.
>>> S(lambda z:[(x,y) for x in (1,2) for y in z],'abc')()
[(1, 'a'), (1, 'b'), (1, 'c'), (2, 'a'), (2, 'b'), (2, 'c')]
>>> S(list, # function call version using expression.In
... S(In,(1,2),S(L1,S.x,
... S(In,'abc',S(L1,S.y,
... S(entuple,S(entuple,S.x,S.y)))))))()
[(1, 'a'), (1, 'b'), (1, 'c'), (2, 'a'), (2, 'b'), (2, 'c')]
Why use the function call version when it's so much harder? Besides
the new `whilst` feature, the main advantage here is that you can
simplify it with a macro.
>>> from s_expression import macro
>>> @macro
... def genx(expr,*specs):
... if specs:
... return S(In,specs[1],S(L1,specs[0],S(genx,expr,*specs[2:])))
... else:
... return S(entuple,expr)
Now we've got generator s-expressions with arguments in familiar
Python order.
>>> S(list,
... S(genx, S(entuple, S.x, S.y), S.x, (1, 2), S.y, 'abc'))()
[(1, 'a'), (1, 'b'), (1, 'c'), (2, 'a'), (2, 'b'), (2, 'c')]
A more advanced macro could include Python's other features like `if`
filters and unpacking. But more importantly, since you can
metaprogram this, you can add new features in the macro that raw
Python lacks, like whilst.
"""
import threading
import weakref
from collections import OrderedDict
from functools import wraps
from itertools import chain
import sys
from drython.core import partition
from drython.statement import Atom, Pass, Print
if sys.version_info[0] == 2: # pragma: no cover
import Queue as Q
else: # pragma: no cover
import queue as Q
# entuple = unstar(tuple)
def entuple(*args):
"""
returns args as a tuple
>>> entuple(1, 2, 3)
(1, 2, 3)
"""
return tuple(args)
# enlist = unstar(list)
def enlist(*args):
"""
returns args as a list
>>> enlist(1, 2, 3)
[1, 2, 3]
"""
return list(args)
# enset = unstar(set)
def enset(*args):
"""
returns args as a set
>>> enset(1, 2, 3) == {1, 2, 3}
True
"""
return set(args)
# efset = unstar(frozenset)
def efset(*args):
"""
return args as a frozenset
>>> efset(1, 2, 3) == frozenset([1, 2, 3])
True
"""
return frozenset(args)
def edict(*args, **kwargs):
"""
pairs args and makes a dictionary with them
>>> edict(1, 2)
{1: 2}
>>> edict(1, 2, 3, 4, 5, 6)[3]
4
>>> edict(1, 2,
... 3, 4) == {1: 2, 3: 4}
True
kwargs become string keys
>>> edict(1,2, c=3) == {1:2, 'c':3}
True
"""
return dict(chain(partition(args), kwargs.items()))
def ordict(*args):
"""
pairs args (in order) and makes an OrderedDict with them
>>> ordict(1,2, 3,4)
OrderedDict([(1, 2), (3, 4)])
"""
return OrderedDict(partition(args))
def In(target_list, comp_lambda):
"""
Generator expressions made of function calls. Similar to the list
monad in functional languages.
The lexical scoping rules for lambda require the variable term to
be last--unlike Python's comprehensions which put that first. To
enable nesting of In, the comp_lambda must always return an
iterable, even for the innermost In.
`In` is a generator expression substitute, but it can also
substitute for list comprehensions by wrapping with list(),
as in Python:
>>> [c+d for c in 'abc' for d in 'xyz'] # list comprehension
['ax', 'ay', 'az', 'bx', 'by', 'bz', 'cx', 'cy', 'cz']
generator expression acting as the above list comprehension
>>> list(c+d for c in 'abc' for d in 'xyz')
['ax', 'ay', 'az', 'bx', 'by', 'bz', 'cx', 'cy', 'cz']
Two `In` functions acting as the above generator expression
acting as the list comprehension above that.
>>> list(In('abc', lambda c:
... In('xyz', lambda d:
... (c+d,) # comp_lambda ALWAYS returns an iterable
... )))
['ax', 'ay', 'az', 'bx', 'by', 'bz', 'cx', 'cy', 'cz']
dictionary and set comprehensions work similarly:
>>> ({'a', 'b', 'c'} ==
... {c for c in 'abc'} ==
... set(c for c in 'abc') ==
... set(In('abc', lambda c: (c,))))
True
>>> ({'one': 1} ==
... {k:v for k,v in [('one',1)]} ==
... dict((k,v) for k,v in [('one',1)]))
True
The dict translation is a bit trickier. Note the tuple-in-tuple
((k,v),) and star(), similar to statement.For()
>>> from drython.core import star
>>> dict(In([('one',1)], star(lambda k, v: ((k,v),) )))
{'one': 1}
"""
# The double for/yield is a flatten. I would have used
# return itertools.chain.from_iterable(map(comp_lambda,target_list))
# but whilst raises StopIteration, and chain can't handle it.
for target in target_list:
for x in comp_lambda(target):
yield x
# the name "While" was already taken.
def whilst(b, x):
"""
Like using a takewhile in comprehensions. It aborts the remainder
of the iterable.
But unlike a StopIteration, the remaining other loops continue.
>>> from itertools import takewhile
>>> [(x,y) for x in takewhile(lambda x:x<3,range(10))
... for y in takewhile(lambda y:y<2,range(10))]
[(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1)]
>>> list(In(range(10),lambda x:
... whilst(x<3, In(range(10), lambda y:
... whilst(y<2,((x,y),))))))
[(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1)]
Notice that y has to be bound twice in the
list-comprehension/takewhile version, but not using In/whilst.
>>> [x+y for x in 'abc' for y in takewhile(lambda y: x!=y,'zabc')]
['az', 'bz', 'ba', 'cz', 'ca', 'cb']
>>> list(In('abc',lambda x:
... In('zabc',lambda y:
... whilst(x!=y, (x+y,) ))))
['az', 'bz', 'ba', 'cz', 'ca', 'cb']
This is different than if (or `when` inside `In`), which keeps
checking
>>> [x+y for x in 'abc' for y in 'zabc' if x!=y]
['az', 'ab', 'ac', 'bz', 'ba', 'bc', 'cz', 'ca', 'cb']
"""
if b:
return x
else:
raise StopIteration
def when(b, x):
"""
Like Python's `if` in comprehensions.
Named for Clojure's :when keyword, which has the same function in
its comprehensions.
>>> list(x+y for x in 'zazbzcz' if x!='z' for y in 'abc' if x!=y)
['ab', 'ac', 'ba', 'bc', 'ca', 'cb']
>>> list(In('zazbzcz', lambda x:
... when(x!='z', In('abc', lambda y:
... when(x!=y, (x+y,) )))))
['ab', 'ac', 'ba', 'bc', 'ca', 'cb']
"""
return x if b else ()
def generator(f):
"""
Coroutine expression decorator
The generator decorator injects the yield point function as the
first argument, conventionally named `Yield`. Because it's named,
it can cut though nested generators without `yield from`
>>> @generator
... def foo(Yield):
... Yield(1)
... def subgen():
... Yield(2)
... subgen()
... subgen()
>>> list(foo())
[1, 2, 2]
The generator decorator can also do coroutines like the following.
>>> def echo():
... reply = yield
... while True:
... reply = yield reply
>>> my_echo = echo()
>>> my_echo.send(None)
>>> my_echo.send(1)
1
>>> my_echo.send(2)
2
The @generator version of the above works the same way.
>>> @generator
... def echo2(Yield):
... reply = Yield()
... while True:
... reply = Yield(reply)
>>> my_echo2 = echo2()
>>> my_echo2.send(None)
>>> my_echo2.send(1)
1
>>> my_echo2.send(2)
2
Now you can make coroutines out of pure expressions with the help
of the statement module. This is the expression-only equivalent
of the generator above.
>>> from drython.statement import While,let,Atom,loop
>>> echo3 = generator(lambda Yield:
... let(lambda reply=Atom(Yield()):
... While(lambda:True, lambda:
... reply.swap(Yield))))()
>>> echo3.send(None)
>>> echo3.send(1)
1
>>> echo3.send(2)
2
and the more concise version using loop.
>>> echo4 = generator(lambda Yield:
... loop(lambda recur, reply=Yield():
... recur(Yield(reply)))())()
>>> echo4.send(None)
>>> echo4.send(1)
1
>>> echo4.send(2)
2
"""
# Just for id. This can be shared between instances.
raise_signal = object()
@wraps(f)
def wrapper(*args, **kwargs):
yield_q = Q.Queue(maxsize=2)
send_q = Q.Queue(maxsize=2)
# takes from send_q
def Yield(arg=None):
yield_q.put(arg)
res = send_q.get()
if res is raise_signal:
raise send_q.get()
return res
def run():
try:
f(Yield, *args, **kwargs)
raise StopIteration
except BaseException as be:
yield_q.put_nowait(raise_signal)
yield_q.put_nowait(be)
t = threading.Thread(target=run,name='@generator')
t.daemon = True
_terminator = Atom(None)
def genr():
# kills zombie thread when this is gc'd
thread_terminator = _terminator
t.start()
# takes from yield_q
while True:
yielded = yield_q.get()
if yielded is raise_signal:
raise yield_q.get()
try:
sent = (yield yielded)
except BaseException as be:
send_q.put(raise_signal)
send_q.put(be)
else:
send_q.put(sent)
the_generator = genr()
def terminate(ref):
send_q.put_nowait(raise_signal)
send_q.put_nowait(GeneratorExit)
_terminator.reset(weakref.ref(the_generator, terminate))
return the_generator
return wrapper
# a Smalltalk-like implementation of Lisp's COND.
# noinspection PyPep8Naming
def Elif(*thunks, **Else):
"""
Cascading if.
The args are paired. Pairs are checked in order. If the left
evaluates to true, the right is called. If all are false, Else is
called.
>>> Elif() # Else defaults to Pass
>>> Elif(Pass, lambda:1) # Pass() is None
>>> Elif(lambda:True, lambda:1)
1
>>> Elif(Pass, lambda:Print('a'),
... Else=lambda:Print('b'))
b
>>> Elif(Pass, lambda:Print('a'),
... Pass, lambda:Print('b'))
>>> Elif(lambda:True, lambda:Print('a'),
... Pass, lambda:Print('b'))
a
>>> Elif(Pass, lambda:1,
... lambda:Print('a'), lambda:2, # head has to be checked.
... Pass, lambda:3,
... Else=lambda:4)
a
4
>>> Elif(lambda:Print('a'), lambda:2, # Print returns None
... lambda:3, lambda:4, # nonzero is truthy
... lambda:Print('skipped?'), lambda:Print('skipped?'),
... Else=lambda:Print('skipped?'))
a
4
Recall that `a if b else c` is already an expression. These can
be nested, but Elif may be easier to use for deep nesting.
"""
assert len(thunks) % 2 == 0
assert set(Else.keys()) <= efset('Else')
for predicate, thunk in zip(*2 * (iter(thunks),)):
if predicate():
return thunk()
return Else.get('Else', Pass)()
| gilch/drython | drython/expression.py | Python | apache-2.0 | 13,683 |
# This file is part of the GBI project.
# Copyright (C) 2015 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| omniscale/gbi-server | app/gbi_server/search/__init__.py | Python | apache-2.0 | 654 |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import warnings
from builtins import object, str
from contextlib import contextmanager
import mock
from packaging.version import Version
from pants.base.deprecated import (BadDecoratorNestingError, BadSemanticVersionError,
CodeRemovedError, InvalidSemanticVersionOrderingError,
MissingSemanticVersionError, NonDevSemanticVersionError,
deprecated, deprecated_conditional, deprecated_module,
warn_or_error)
from pants.util.collections import assert_single_element
from pants_test.test_base import TestBase
_FAKE_CUR_VERSION = '2.0.0.dev0'
class DeprecatedTest(TestBase):
FUTURE_VERSION = '9999.9.9.dev0'
@contextmanager
def _test_deprecation(self, deprecation_expected=True):
with warnings.catch_warnings(record=True) as seen_warnings:
def assert_deprecation_warning():
if deprecation_expected:
warning = assert_single_element(seen_warnings)
self.assertEqual(warning.category, DeprecationWarning)
return warning.message
else:
self.assertEqual(0, len(seen_warnings))
warnings.simplefilter('always')
self.assertEqual(0, len(seen_warnings))
yield assert_deprecation_warning
assert_deprecation_warning()
def test_deprecated_function(self):
expected_return = 'deprecated_function'
@deprecated(self.FUTURE_VERSION)
def deprecated_function():
return expected_return
with self._test_deprecation():
self.assertEqual(expected_return, deprecated_function())
def test_deprecated_method(self):
expected_return = 'deprecated_method'
class Test(object):
@deprecated(self.FUTURE_VERSION)
def deprecated_method(self):
return expected_return
with self._test_deprecation():
self.assertEqual(expected_return, Test().deprecated_method())
def test_deprecated_conditional_true(self):
predicate = lambda: True
with self._test_deprecation():
deprecated_conditional(predicate, self.FUTURE_VERSION, "test hint message", stacklevel=0)
def test_deprecated_conditional_false(self):
predicate = lambda: False
with self._test_deprecation(deprecation_expected=False):
deprecated_conditional(predicate, self.FUTURE_VERSION, "test hint message", stacklevel=0)
def test_deprecated_property(self):
expected_return = 'deprecated_property'
class Test(object):
@property
@deprecated(self.FUTURE_VERSION)
def deprecated_property(self):
return expected_return
with self._test_deprecation():
self.assertEqual(expected_return, Test().deprecated_property)
def test_deprecated_module(self):
with self._test_deprecation() as extract_deprecation_warning:
# Note: Attempting to import here a dummy module that just calls deprecated_module() does not
# properly trigger the deprecation, due to a bad interaction with pytest that I've not fully
# understood. But we trust python to correctly execute modules on import, so just testing a
# direct call of deprecated_module() here is fine.
deprecated_module(self.FUTURE_VERSION, hint_message='Do not use me.')
warning_message = str(extract_deprecation_warning())
self.assertIn('module will be removed', warning_message)
self.assertIn('Do not use me', warning_message)
def test_deprecation_hint(self):
hint_message = 'Find the foos, fast!'
expected_return = 'deprecated_function'
@deprecated(self.FUTURE_VERSION, hint_message=hint_message)
def deprecated_function():
return expected_return
with self._test_deprecation() as extract_deprecation_warning:
self.assertEqual(expected_return, deprecated_function())
self.assertIn(hint_message, str(extract_deprecation_warning()))
def test_deprecation_subject(self):
subject = '`./pants blah`'
expected_return = 'deprecated_function'
@deprecated(self.FUTURE_VERSION, subject=subject)
def deprecated_function():
return expected_return
with self._test_deprecation() as extract_deprecation_warning:
self.assertEqual(expected_return, deprecated_function())
self.assertIn(subject, str(extract_deprecation_warning()))
def test_removal_version_required(self):
with self.assertRaises(MissingSemanticVersionError):
@deprecated(None)
def test_func():
pass
def test_removal_version_bad(self):
with self.assertRaises(BadSemanticVersionError):
warn_or_error('a.a.a', 'dummy description')
with self.assertRaises(BadSemanticVersionError):
@deprecated('a.a.a')
def test_func0():
pass
with self.assertRaises(BadSemanticVersionError):
warn_or_error(1.0, 'dummy description')
with self.assertRaises(BadSemanticVersionError):
@deprecated(1.0)
def test_func1():
pass
with self.assertRaises(BadSemanticVersionError):
warn_or_error('1.a.0', 'dummy description')
with self.assertRaises(BadSemanticVersionError):
@deprecated('1.a.0')
def test_func1a():
pass
def test_removal_version_non_dev(self):
with self.assertRaises(NonDevSemanticVersionError):
@deprecated('1.0.0')
def test_func1a():
pass
@mock.patch('pants.base.deprecated.PANTS_SEMVER', Version(_FAKE_CUR_VERSION))
def test_removal_version_same(self):
with self.assertRaises(CodeRemovedError):
warn_or_error(_FAKE_CUR_VERSION, 'dummy description')
@deprecated(_FAKE_CUR_VERSION)
def test_func():
pass
with self.assertRaises(CodeRemovedError):
test_func()
def test_removal_version_lower(self):
with self.assertRaises(CodeRemovedError):
warn_or_error('0.0.27.dev0', 'dummy description')
@deprecated('0.0.27.dev0')
def test_func():
pass
with self.assertRaises(CodeRemovedError):
test_func()
def test_bad_decorator_nesting(self):
with self.assertRaises(BadDecoratorNestingError):
class Test(object):
@deprecated(self.FUTURE_VERSION)
@property
def test_prop(this):
pass
def test_deprecation_start_version_validation(self):
with self.assertRaises(BadSemanticVersionError):
warn_or_error(removal_version='1.0.0.dev0',
deprecated_entity_description='dummy',
deprecation_start_version='1.a.0')
with self.assertRaises(InvalidSemanticVersionOrderingError):
warn_or_error(removal_version='0.0.0.dev0',
deprecated_entity_description='dummy',
deprecation_start_version='1.0.0.dev0')
@mock.patch('pants.base.deprecated.PANTS_SEMVER', Version(_FAKE_CUR_VERSION))
def test_deprecation_start_period(self):
with self.assertRaises(CodeRemovedError):
warn_or_error(removal_version=_FAKE_CUR_VERSION,
deprecated_entity_description='dummy',
deprecation_start_version='1.0.0.dev0')
with self.warnings_catcher() as w:
warn_or_error(removal_version='999.999.999.dev999',
deprecated_entity_description='dummy',
deprecation_start_version=_FAKE_CUR_VERSION)
self.assertWarning(w, DeprecationWarning,
'DEPRECATED: dummy will be removed in version 999.999.999.dev999.')
self.assertIsNone(
warn_or_error(removal_version='999.999.999.dev999',
deprecated_entity_description='dummy',
deprecation_start_version='500.0.0.dev0'))
| twitter/pants | tests/python/pants_test/base/test_deprecated.py | Python | apache-2.0 | 7,818 |
#!/usr/bin/env python
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Non-relativistic nuclear spin-rotation tensors for UKS
'''
from pyscf.prop.nsr import uhf as uhf_nsr
from pyscf.prop.nmr import uks as uks_nmr
class NSR(uhf_nsr.NSR):
'''Nuclear-spin rotation tensors for UKS'''
get_fock = uks_nmr.get_fock
solve_mo1 = uks_nmr.solve_mo1
from pyscf import lib
from pyscf import dft
dft.uks.UKS.NSR = dft.uks_symm.UKS.NSR = lib.class_as_method(NSR)
if __name__ == '__main__':
from pyscf import gto
from pyscf import dft
from pyscf import lib
mol = gto.Mole()
mol.verbose = 7
mol.output = '/dev/null'
mol.atom = '''h , 0. 0. 0.917
f , 0. 0. 0.
'''
mol.basis = 'dzp'
mol.build()
mf = dft.UKS(mol).run(xc='b3lyp')
rotg = mf.NSR()
m = rotg.kernel()
print(m[1,0,0] - -301.49652448221707)
print(lib.finger(m) - 28.57893850199683)
rotg.gauge_orig = (0,0,.917/lib.param.BOHR)
m = rotg.kernel()
print(m[0,0,0] - 277.173892536396)
print(lib.finger(m) - 96.92616726791988)
mol.atom = '''C , 0. 0. 0.
O , 0. 0. 1.1283
'''
mol.basis = 'ccpvdz'
mol.nucprop = {'C': {'mass': 13}}
mol.build()
mf = dft.UKS(mol).run(xc='bp86')
rotg = NSR(mf)
m = rotg.kernel()
print(m[0,0,0] - -32.23298865237305)
print(lib.finger(m) - -11.278686427378966)
mol.atom='''O 0. 0. 0.
H 0. -0.757 0.587
H 0. 0.757 0.587'''
mol.basis = 'ccpvdz'
mol.build()
mf = dft.UKS(mol).run()
rotg = NSR(mf)
m = rotg.kernel()
print(lib.finger(m) - -66.94250282318671)
| gkc1000/pyscf | pyscf/prop/nsr/uks.py | Python | apache-2.0 | 2,350 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the storage file CLI arguments helper."""
import argparse
import unittest
from plaso.cli import tools
from plaso.cli.helpers import storage_file
from plaso.lib import errors
from tests.cli import test_lib as cli_test_lib
class StorageFileArgumentsHelperTest(cli_test_lib.CLIToolTestCase):
"""Tests for the storage file CLI arguments helper."""
# pylint: disable=no-member,protected-access
_EXPECTED_OUTPUT = """\
usage: cli_helper.py [STORAGE_FILE]
Test argument parser.
positional arguments:
STORAGE_FILE Path to a storage file.
"""
def testAddArguments(self):
"""Tests the AddArguments function."""
argument_parser = argparse.ArgumentParser(
prog='cli_helper.py', description='Test argument parser.',
add_help=False,
formatter_class=cli_test_lib.SortedArgumentsHelpFormatter)
storage_file.StorageFileArgumentsHelper.AddArguments(argument_parser)
output = self._RunArgparseFormatHelp(argument_parser)
self.assertEqual(output, self._EXPECTED_OUTPUT)
def testParseOptions(self):
"""Tests the ParseOptions function."""
test_tool = tools.CLITool()
options = cli_test_lib.TestOptions()
options.storage_file = self._GetTestFilePath(['test.plaso'])
storage_file.StorageFileArgumentsHelper.ParseOptions(options, test_tool)
self.assertEqual(test_tool._storage_file_path, options.storage_file)
with self.assertRaises(errors.BadConfigObject):
storage_file.StorageFileArgumentsHelper.ParseOptions(options, None)
if __name__ == '__main__':
unittest.main()
| Onager/plaso | tests/cli/helpers/storage_file.py | Python | apache-2.0 | 1,615 |
import datetime
import json
import re
from unittest import mock
import pytest
from multidict import CIMultiDict
from aiohttp import hdrs, signals
from aiohttp.protocol import (HttpVersion, HttpVersion10, HttpVersion11,
RawRequestMessage)
from aiohttp.web import (ContentCoding, Request, Response, StreamResponse,
json_response)
def make_request(method, path, headers=CIMultiDict(),
version=HttpVersion11, **kwargs):
message = RawRequestMessage(method, path, version, headers,
[(k.encode('utf-8'), v.encode('utf-8'))
for k, v in headers.items()],
False, False)
return request_from_message(message, **kwargs)
def request_from_message(message, **kwargs):
app = kwargs.get('app') or mock.Mock()
app._debug = False
app.on_response_prepare = signals.Signal(app)
payload = mock.Mock()
transport = mock.Mock()
reader = mock.Mock()
writer = kwargs.get('writer') or mock.Mock()
req = Request(app, message, payload,
transport, reader, writer)
return req
def test_stream_response_ctor():
resp = StreamResponse()
assert 200 == resp.status
assert resp.keep_alive is None
def test_content_length():
resp = StreamResponse()
assert resp.content_length is None
def test_content_length_setter():
resp = StreamResponse()
resp.content_length = 234
assert 234 == resp.content_length
def test_drop_content_length_header_on_setting_len_to_None():
resp = StreamResponse()
resp.content_length = 1
assert "1" == resp.headers['Content-Length']
resp.content_length = None
assert 'Content-Length' not in resp.headers
def test_set_content_length_to_None_on_non_set():
resp = StreamResponse()
resp.content_length = None
assert 'Content-Length' not in resp.headers
resp.content_length = None
assert 'Content-Length' not in resp.headers
def test_setting_content_type():
resp = StreamResponse()
resp.content_type = 'text/html'
assert 'text/html' == resp.headers['content-type']
def test_setting_charset():
resp = StreamResponse()
resp.content_type = 'text/html'
resp.charset = 'koi8-r'
assert 'text/html; charset=koi8-r' == resp.headers['content-type']
def test_default_charset():
resp = StreamResponse()
assert resp.charset is None
def test_reset_charset():
resp = StreamResponse()
resp.content_type = 'text/html'
resp.charset = None
assert resp.charset is None
def test_reset_charset_after_setting():
resp = StreamResponse()
resp.content_type = 'text/html'
resp.charset = 'koi8-r'
resp.charset = None
assert resp.charset is None
def test_charset_without_content_type():
resp = StreamResponse()
with pytest.raises(RuntimeError):
resp.charset = 'koi8-r'
def test_last_modified_initial():
resp = StreamResponse()
assert resp.last_modified is None
def test_last_modified_string():
resp = StreamResponse()
dt = datetime.datetime(1990, 1, 2, 3, 4, 5, 0, datetime.timezone.utc)
resp.last_modified = 'Mon, 2 Jan 1990 03:04:05 GMT'
assert resp.last_modified == dt
def test_last_modified_timestamp():
resp = StreamResponse()
dt = datetime.datetime(1970, 1, 1, 0, 0, 0, 0, datetime.timezone.utc)
resp.last_modified = 0
assert resp.last_modified == dt
resp.last_modified = 0.0
assert resp.last_modified == dt
def test_last_modified_datetime():
resp = StreamResponse()
dt = datetime.datetime(2001, 2, 3, 4, 5, 6, 0, datetime.timezone.utc)
resp.last_modified = dt
assert resp.last_modified == dt
def test_last_modified_reset():
resp = StreamResponse()
resp.last_modified = 0
resp.last_modified = None
assert resp.last_modified is None
@pytest.mark.run_loop
def test_start():
req = make_request('GET', '/')
resp = StreamResponse()
assert resp.keep_alive is None
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
assert msg.send_headers.called
msg2 = yield from resp.prepare(req)
assert msg is msg2
assert resp.keep_alive
req2 = make_request('GET', '/')
with pytest.raises(RuntimeError):
yield from resp.prepare(req2)
@pytest.mark.run_loop
def test_chunked_encoding():
req = make_request('GET', '/')
resp = StreamResponse()
assert not resp.chunked
resp.enable_chunked_encoding()
assert resp.chunked
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
assert msg.chunked
@pytest.mark.run_loop
def test_chunk_size():
req = make_request('GET', '/')
resp = StreamResponse()
assert not resp.chunked
resp.enable_chunked_encoding(chunk_size=8192)
assert resp.chunked
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
assert msg.chunked
msg.add_chunking_filter.assert_called_with(8192)
assert msg.filter is not None
@pytest.mark.run_loop
def test_chunked_encoding_forbidden_for_http_10():
req = make_request('GET', '/', version=HttpVersion10)
resp = StreamResponse()
resp.enable_chunked_encoding()
with pytest.raises(RuntimeError) as ctx:
yield from resp.prepare(req)
assert re.match("Using chunked encoding is forbidden for HTTP/1.0",
str(ctx.value))
@pytest.mark.run_loop
def test_compression_no_accept():
req = make_request('GET', '/')
resp = StreamResponse()
assert not resp.chunked
assert not resp.compression
resp.enable_compression()
assert resp.compression
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
assert not msg.add_compression_filter.called
@pytest.mark.run_loop
def test_force_compression_no_accept_backwards_compat():
req = make_request('GET', '/')
resp = StreamResponse()
assert not resp.chunked
assert not resp.compression
resp.enable_compression(force=True)
assert resp.compression
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
assert msg.add_compression_filter.called
assert msg.filter is not None
@pytest.mark.run_loop
def test_force_compression_false_backwards_compat():
req = make_request('GET', '/')
resp = StreamResponse()
assert not resp.compression
resp.enable_compression(force=False)
assert resp.compression
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
assert not msg.add_compression_filter.called
@pytest.mark.run_loop
def test_compression_default_coding():
req = make_request(
'GET', '/',
headers=CIMultiDict({hdrs.ACCEPT_ENCODING: 'gzip, deflate'}))
resp = StreamResponse()
assert not resp.chunked
assert not resp.compression
resp.enable_compression()
assert resp.compression
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
msg.add_compression_filter.assert_called_with('deflate')
assert 'deflate' == resp.headers.get(hdrs.CONTENT_ENCODING)
assert msg.filter is not None
@pytest.mark.run_loop
def test_force_compression_deflate():
req = make_request(
'GET', '/',
headers=CIMultiDict({hdrs.ACCEPT_ENCODING: 'gzip, deflate'}))
resp = StreamResponse()
resp.enable_compression(ContentCoding.deflate)
assert resp.compression
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
msg.add_compression_filter.assert_called_with('deflate')
assert 'deflate' == resp.headers.get(hdrs.CONTENT_ENCODING)
@pytest.mark.run_loop
def test_force_compression_no_accept_deflate():
req = make_request('GET', '/')
resp = StreamResponse()
resp.enable_compression(ContentCoding.deflate)
assert resp.compression
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
msg.add_compression_filter.assert_called_with('deflate')
assert 'deflate' == resp.headers.get(hdrs.CONTENT_ENCODING)
@pytest.mark.run_loop
def test_force_compression_gzip():
req = make_request(
'GET', '/',
headers=CIMultiDict({hdrs.ACCEPT_ENCODING: 'gzip, deflate'}))
resp = StreamResponse()
resp.enable_compression(ContentCoding.gzip)
assert resp.compression
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
msg.add_compression_filter.assert_called_with('gzip')
assert 'gzip' == resp.headers.get(hdrs.CONTENT_ENCODING)
@pytest.mark.run_loop
def test_force_compression_no_accept_gzip():
req = make_request('GET', '/')
resp = StreamResponse()
resp.enable_compression(ContentCoding.gzip)
assert resp.compression
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
msg = yield from resp.prepare(req)
msg.add_compression_filter.assert_called_with('gzip')
assert 'gzip' == resp.headers.get(hdrs.CONTENT_ENCODING)
@pytest.mark.run_loop
def test_delete_content_length_if_compression_enabled():
req = make_request('GET', '/')
resp = Response(body=b'answer')
assert 6 == resp.content_length
resp.enable_compression(ContentCoding.gzip)
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
yield from resp.prepare(req)
assert resp.content_length is None
@pytest.mark.run_loop
def test_write_non_byteish():
resp = StreamResponse()
yield from resp.prepare(make_request('GET', '/'))
with pytest.raises(AssertionError):
resp.write(123)
def test_write_before_start():
resp = StreamResponse()
with pytest.raises(RuntimeError):
resp.write(b'data')
@pytest.mark.run_loop
def test_cannot_write_after_eof():
resp = StreamResponse()
writer = mock.Mock()
yield from resp.prepare(make_request('GET', '/', writer=writer))
resp.write(b'data')
writer.drain.return_value = ()
yield from resp.write_eof()
writer.write.reset_mock()
with pytest.raises(RuntimeError):
resp.write(b'next data')
assert not writer.write.called
@pytest.mark.run_loop
def test_cannot_write_eof_before_headers():
resp = StreamResponse()
with pytest.raises(RuntimeError):
yield from resp.write_eof()
@pytest.mark.run_loop
def test_cannot_write_eof_twice():
resp = StreamResponse()
writer = mock.Mock()
yield from resp.prepare(make_request('GET', '/', writer=writer))
resp.write(b'data')
writer.drain.return_value = ()
yield from resp.write_eof()
assert writer.write.called
writer.write.reset_mock()
yield from resp.write_eof()
assert not writer.write.called
@pytest.mark.run_loop
def test_write_returns_drain():
resp = StreamResponse()
yield from resp.prepare(make_request('GET', '/'))
assert () == resp.write(b'data')
@pytest.mark.run_loop
def test_write_returns_empty_tuple_on_empty_data():
resp = StreamResponse()
yield from resp.prepare(make_request('GET', '/'))
assert () == resp.write(b'')
def test_force_close():
resp = StreamResponse()
assert resp.keep_alive is None
resp.force_close()
assert resp.keep_alive is False
def test_response_cookies():
resp = StreamResponse()
assert resp.cookies == {}
assert str(resp.cookies) == ''
resp.set_cookie('name', 'value')
assert str(resp.cookies) == 'Set-Cookie: name=value; Path=/'
resp.set_cookie('name', 'other_value')
assert str(resp.cookies) == 'Set-Cookie: name=other_value; Path=/'
resp.cookies['name'] = 'another_other_value'
resp.cookies['name']['max-age'] = 10
assert (str(resp.cookies) ==
'Set-Cookie: name=another_other_value; Max-Age=10; Path=/')
resp.del_cookie('name')
expected = ('Set-Cookie: name=("")?; '
'expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/')
assert re.match(expected, str(resp.cookies))
resp.set_cookie('name', 'value', domain='local.host')
expected = 'Set-Cookie: name=value; Domain=local.host; Path=/'
assert str(resp.cookies) == expected
def test_response_cookie_path():
resp = StreamResponse()
assert resp.cookies == {}
resp.set_cookie('name', 'value', path='/some/path')
assert str(resp.cookies) == 'Set-Cookie: name=value; Path=/some/path'
resp.set_cookie('name', 'value', expires='123')
assert (str(resp.cookies) ==
'Set-Cookie: name=value; expires=123; Path=/')
resp.set_cookie('name', 'value', domain='example.com',
path='/home', expires='123', max_age='10',
secure=True, httponly=True, version='2.0')
assert (str(resp.cookies).lower() == 'set-cookie: name=value; '
'domain=example.com; '
'expires=123; '
'httponly; '
'max-age=10; '
'path=/home; '
'secure; '
'version=2.0')
def test_response_cookie__issue_del_cookie():
resp = StreamResponse()
assert resp.cookies == {}
assert str(resp.cookies) == ''
resp.del_cookie('name')
expected = ('Set-Cookie: name=("")?; '
'expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/')
assert re.match(expected, str(resp.cookies))
def test_cookie_set_after_del():
resp = StreamResponse()
resp.del_cookie('name')
resp.set_cookie('name', 'val')
# check for Max-Age dropped
expected = 'Set-Cookie: name=val; Path=/'
assert str(resp.cookies) == expected
def test_set_status_with_reason():
resp = StreamResponse()
resp.set_status(200, "Everithing is fine!")
assert 200 == resp.status
assert "Everithing is fine!" == resp.reason
@pytest.mark.run_loop
def test_start_force_close():
req = make_request('GET', '/')
resp = StreamResponse()
resp.force_close()
assert not resp.keep_alive
msg = yield from resp.prepare(req)
assert not resp.keep_alive
assert msg.closing
@pytest.mark.run_loop
def test___repr__():
req = make_request('GET', '/path/to')
resp = StreamResponse(reason=301)
yield from resp.prepare(req)
assert "<StreamResponse 301 GET /path/to >" == repr(resp)
def test___repr__not_started():
resp = StreamResponse(reason=301)
assert "<StreamResponse 301 not started>" == repr(resp)
@pytest.mark.run_loop
def test_keep_alive_http10_default():
message = RawRequestMessage('GET', '/', HttpVersion10, CIMultiDict(),
[], True, False)
req = request_from_message(message)
resp = StreamResponse()
yield from resp.prepare(req)
assert not resp.keep_alive
@pytest.mark.run_loop
def test_keep_alive_http10_switched_on():
headers = CIMultiDict(Connection='keep-alive')
message = RawRequestMessage('GET', '/', HttpVersion10, headers,
[(b'Connection', b'keep-alive')],
False, False)
req = request_from_message(message)
resp = StreamResponse()
yield from resp.prepare(req)
assert resp.keep_alive is True
@pytest.mark.run_loop
def test_keep_alive_http09():
headers = CIMultiDict(Connection='keep-alive')
message = RawRequestMessage('GET', '/', HttpVersion(0, 9), headers,
[(b'Connection', b'keep-alive')],
False, False)
req = request_from_message(message)
resp = StreamResponse()
yield from resp.prepare(req)
assert not resp.keep_alive
def test_start_twice(warning):
req = make_request('GET', '/')
resp = StreamResponse()
with warning(DeprecationWarning):
impl1 = resp.start(req)
impl2 = resp.start(req)
assert impl1 is impl2
@pytest.mark.run_loop
def test_prepare_calls_signal():
app = mock.Mock()
req = make_request('GET', '/', app=app)
resp = StreamResponse()
sig = mock.Mock()
app.on_response_prepare.append(sig)
yield from resp.prepare(req)
sig.assert_called_with(req, resp)
def test_default_nodelay():
resp = StreamResponse()
assert resp.tcp_nodelay
def test_set_tcp_nodelay_before_start():
resp = StreamResponse()
resp.set_tcp_nodelay(False)
assert not resp.tcp_nodelay
resp.set_tcp_nodelay(True)
assert resp.tcp_nodelay
@pytest.mark.run_loop
def test_set_tcp_nodelay_on_start():
req = make_request('GET', '/')
resp = StreamResponse()
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
resp_impl = yield from resp.prepare(req)
resp_impl.transport.set_tcp_nodelay.assert_called_with(True)
resp_impl.transport.set_tcp_cork.assert_called_with(False)
@pytest.mark.run_loop
def test_set_tcp_nodelay_after_start():
req = make_request('GET', '/')
resp = StreamResponse()
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
resp_impl = yield from resp.prepare(req)
resp_impl.transport.set_tcp_cork.assert_called_with(False)
resp_impl.transport.set_tcp_nodelay.assert_called_with(True)
resp.set_tcp_nodelay(False)
assert not resp.tcp_nodelay
resp_impl.transport.set_tcp_nodelay.assert_called_with(False)
resp.set_tcp_nodelay(True)
assert resp.tcp_nodelay
resp_impl.transport.set_tcp_nodelay.assert_called_with(True)
def test_default_cork():
resp = StreamResponse()
assert not resp.tcp_cork
def test_set_tcp_cork_before_start():
resp = StreamResponse()
resp.set_tcp_cork(True)
assert resp.tcp_cork
resp.set_tcp_cork(False)
assert not resp.tcp_cork
@pytest.mark.run_loop
def test_set_tcp_cork_on_start():
req = make_request('GET', '/')
resp = StreamResponse()
resp.set_tcp_cork(True)
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
resp_impl = yield from resp.prepare(req)
resp_impl.transport.set_tcp_nodelay.assert_called_with(False)
resp_impl.transport.set_tcp_cork.assert_called_with(True)
@pytest.mark.run_loop
def test_set_tcp_cork_after_start():
req = make_request('GET', '/')
resp = StreamResponse()
with mock.patch('aiohttp.web_reqrep.ResponseImpl'):
resp_impl = yield from resp.prepare(req)
resp_impl.transport.set_tcp_cork.assert_called_with(False)
resp.set_tcp_cork(True)
assert resp.tcp_cork
resp_impl.transport.set_tcp_cork.assert_called_with(True)
resp.set_tcp_cork(False)
assert not resp.tcp_cork
resp_impl.transport.set_tcp_cork.assert_called_with(False)
# Response class
def test_response_ctor():
resp = Response()
assert 200 == resp.status
assert 'OK' == resp.reason
assert resp.body is None
assert 0 == resp.content_length
assert CIMultiDict([('CONTENT-LENGTH', '0')]) == resp.headers
def test_ctor_with_headers_and_status():
resp = Response(body=b'body', status=201, headers={'Age': '12'})
assert 201 == resp.status
assert b'body' == resp.body
assert 4 == resp.content_length
assert (CIMultiDict([('AGE', '12'), ('CONTENT-LENGTH', '4')]) ==
resp.headers)
def test_ctor_content_type():
resp = Response(content_type='application/json')
assert 200 == resp.status
assert 'OK' == resp.reason
assert (CIMultiDict([('CONTENT-TYPE', 'application/json'),
('CONTENT-LENGTH', '0')]) ==
resp.headers)
def test_ctor_text_body_combined():
with pytest.raises(ValueError):
Response(body=b'123', text='test text')
def test_ctor_text():
resp = Response(text='test text')
assert 200 == resp.status
assert 'OK' == resp.reason
assert (CIMultiDict(
[('CONTENT-TYPE', 'text/plain; charset=utf-8'),
('CONTENT-LENGTH', '9')]) == resp.headers)
assert resp.body == b'test text'
assert resp.text == 'test text'
def test_ctor_charset():
resp = Response(text='текст', charset='koi8-r')
assert 'текст'.encode('koi8-r') == resp.body
assert 'koi8-r' == resp.charset
def test_ctor_charset_default_utf8():
resp = Response(text='test test', charset=None)
assert 'utf-8' == resp.charset
def test_ctor_charset_in_content_type():
with pytest.raises(ValueError):
Response(text='test test', content_type='text/plain; charset=utf-8')
def test_ctor_charset_without_text():
resp = Response(content_type='text/plain', charset='koi8-r')
assert 'koi8-r' == resp.charset
def test_ctor_both_content_type_param_and_header_with_text():
with pytest.raises(ValueError):
Response(headers={'Content-Type': 'application/json'},
content_type='text/html', text='text')
def test_ctor_both_charset_param_and_header_with_text():
with pytest.raises(ValueError):
Response(headers={'Content-Type': 'application/json'},
charset='koi8-r', text='text')
def test_ctor_both_content_type_param_and_header():
with pytest.raises(ValueError):
Response(headers={'Content-Type': 'application/json'},
content_type='text/html')
def test_ctor_both_charset_param_and_header():
with pytest.raises(ValueError):
Response(headers={'Content-Type': 'application/json'},
charset='koi8-r')
def test_assign_nonbyteish_body():
resp = Response(body=b'data')
with pytest.raises(TypeError):
resp.body = 123
assert b'data' == resp.body
assert 4 == resp.content_length
def test_assign_nonstr_text():
resp = Response(text='test')
with pytest.raises(TypeError):
resp.text = b'123'
assert b'test' == resp.body
assert 4 == resp.content_length
@pytest.mark.run_loop
def test_send_headers_for_empty_body():
writer = mock.Mock()
req = make_request('GET', '/', writer=writer)
resp = Response()
writer.drain.return_value = ()
buf = b''
def append(data):
nonlocal buf
buf += data
writer.write.side_effect = append
yield from resp.prepare(req)
yield from resp.write_eof()
txt = buf.decode('utf8')
assert re.match('HTTP/1.1 200 OK\r\nContent-Length: 0\r\n'
'Date: .+\r\nServer: .+\r\n\r\n', txt)
@pytest.mark.run_loop
def test_render_with_body():
writer = mock.Mock()
req = make_request('GET', '/', writer=writer)
resp = Response(body=b'data')
writer.drain.return_value = ()
buf = b''
def append(data):
nonlocal buf
buf += data
writer.write.side_effect = append
yield from resp.prepare(req)
yield from resp.write_eof()
txt = buf.decode('utf8')
assert re.match('HTTP/1.1 200 OK\r\nContent-Length: 4\r\n'
'Date: .+\r\nServer: .+\r\n\r\ndata', txt)
@pytest.mark.run_loop
def test_send_set_cookie_header():
resp = Response()
resp.cookies['name'] = 'value'
writer = mock.Mock()
req = make_request('GET', '/', writer=writer)
writer.drain.return_value = ()
buf = b''
def append(data):
nonlocal buf
buf += data
writer.write.side_effect = append
yield from resp.prepare(req)
yield from resp.write_eof()
txt = buf.decode('utf8')
assert re.match('HTTP/1.1 200 OK\r\nContent-Length: 0\r\n'
'Set-Cookie: name=value\r\n'
'Date: .+\r\nServer: .+\r\n\r\n', txt)
def test_set_text_with_content_type():
resp = Response()
resp.content_type = "text/html"
resp.text = "text"
assert "text" == resp.text
assert b"text" == resp.body
assert "text/html" == resp.content_type
def test_set_text_with_charset():
resp = Response()
resp.content_type = 'text/plain'
resp.charset = "KOI8-R"
resp.text = "текст"
assert "текст" == resp.text
assert "текст".encode('koi8-r') == resp.body
assert "koi8-r" == resp.charset
def test_started_when_not_started():
resp = StreamResponse()
assert not resp.prepared
@pytest.mark.run_loop
def test_started_when_started():
resp = StreamResponse()
yield from resp.prepare(make_request('GET', '/'))
assert resp.prepared
@pytest.mark.run_loop
def test_drain_before_start():
resp = StreamResponse()
with pytest.raises(RuntimeError):
yield from resp.drain()
def test_nonstr_text_in_ctor():
with pytest.raises(TypeError):
Response(text=b'data')
def test_text_in_ctor_with_content_type():
resp = Response(text='data', content_type='text/html')
assert 'data' == resp.text
assert 'text/html' == resp.content_type
def test_text_in_ctor_with_content_type_header():
resp = Response(text='текст',
headers={'Content-Type': 'text/html; charset=koi8-r'})
assert 'текст'.encode('koi8-r') == resp.body
assert 'text/html' == resp.content_type
assert 'koi8-r' == resp.charset
def test_text_with_empty_payload():
resp = Response(status=200)
assert resp.body is None
assert resp.text is None
class TestJSONResponse:
def test_content_type_is_application_json_by_default(self):
resp = json_response('')
assert 'application/json' == resp.content_type
def test_passing_text_only(self):
resp = json_response(text=json.dumps('jaysawn'))
assert resp.text == json.dumps('jaysawn')
def test_data_and_text_raises_value_error(self):
with pytest.raises(ValueError) as excinfo:
json_response(data='foo', text='bar')
expected_message = (
'only one of data, text, or body should be specified'
)
assert expected_message == excinfo.value.args[0]
def test_data_and_body_raises_value_error(self):
with pytest.raises(ValueError) as excinfo:
json_response(data='foo', body=b'bar')
expected_message = (
'only one of data, text, or body should be specified'
)
assert expected_message == excinfo.value.args[0]
def test_text_is_json_encoded(self):
resp = json_response({'foo': 42})
assert json.dumps({'foo': 42}) == resp.text
def test_content_type_is_overrideable(self):
resp = json_response({'foo': 42},
content_type='application/vnd.json+api')
assert 'application/vnd.json+api' == resp.content_type
| mind1master/aiohttp | tests/test_web_response.py | Python | apache-2.0 | 26,418 |
__author__ = 'asifj'
import logging
from kafka import KafkaConsumer
import json
import traceback
from bson.json_util import dumps
from kafka import SimpleProducer, KafkaClient
from utils import Utils
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
inputs = []
consumer = KafkaConsumer("SAPEvent", bootstrap_servers=['172.22.147.242:9092', '172.22.147.232:9092', '172.22.147.243:9092'], auto_commit_enable=False, auto_offset_reset="smallest")
message_no = 1
inputs = consumer.fetch_messages()
'''for message in consumer:
topic = message.topic
partition = message.partition
offset = message.offset
key = message.key
message = message.value
print "================================================================================================================="
if message is not None:
try:
document = json.loads(message)
collection = document.keys()[0]
if collection == "customerMaster":
print "customerMaster"
elif collection == "srAttachements":
#print dumps(document, sort_keys=True)
inputs.append(document)
except Exception, err:
print "CustomException"
print "Kafka Message: "+str(message)
print(traceback.format_exc())
print "================================================================================================================="
print "\n"
message_no += 1
'''
# To send messages synchronously
kafka = KafkaClient('172.22.147.232:9092,172.22.147.242:9092,172.22.147.243:9092')
producer = SimpleProducer(kafka)
for i in inputs:
try:
#producer.send_messages(b'SAPEvent', json.dumps(input))
document = json.loads(str(i.value))
type = document.keys()[0]
if type == "srDetails":
print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
row = []
utils = Utils()
row = utils.validate_sr_details( document['srDetails'], row)
print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
print "\n\n"
except Exception:
print "Kafka: "+str(document)
print Exception.message
print(traceback.format_exc())
| asifhj/Python_SOAP_OSSJ_SAP_Fusion_Kafka_Spark_HBase | KafkaCP.py | Python | apache-2.0 | 2,510 |
#!/usr/bin/env finemonkeyrunner
# -*- coding:utf8 -*-
import sys
sys.path.append(r'D:\learning\python\auto\fineMonkeyRunner')
from com.fine.android.finemonkeyrunner import fineMonkeyRunner
# 导入包路径,否则找不到 ---注意
#sys.path.append(r'C:\Users\wangxu\AppData\Local\Android\sdk\tools\testscript')
#sys.path.append(r'D:\learning\python\auto\fineMonkeyRunner')
finemonkeyrunner = fineMonkeyRunner('emulator-5554')
#finemonkeyrunner.assertfocusedwindowmame('com.mdsd.wiicare/com.mdsd.wiicare.function.LoginActivity_')
#finemonkeyrunner.assertcurrentactivity('com.mdsd.wiicare/com.mdsd.wiicare.function.LoginActivity_')
view = finemonkeyrunner.getviewbyID('id/etAccount')
print finemonkeyrunner.getviewinfo_classname(view)
#print finemonkeyrunner.getelementinfo_locate('id/etAccount')
#print finemonkeyrunner.getviewinfo_visible(view)
#finemonkeyrunner.typebyid('id/etPassword','123')
#ss = finemonkeyrunner.getviewssametext('id/drawerLayout','经鼻气管插管')
#print finemonkeyrunner.viewlist
#finemonkeyrunner.getviewinfo(view)
#finemonkeyrunner.forcestopapp('com.mdsd.wiicare') | william5065/fineMonkeyRunner | test.py | Python | apache-2.0 | 1,099 |
#!/usr/bin/python
#class SendMail via http://www.oschina.net/code/snippet_221343_49994
#A few modifications: Add Cc support ; print -> return string.
# Sample:
# ./pymail.py -f sender@example.com -t recv@somedomain1.com -c "ccrecv1@otherdomain1.com,ccrecv2@otherdomain2.com" -s smtp.somemailserver.com -u sender@example.com -p SendersPassword -S "hello test from pymail" -m "sent via cli" -F attached.file
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email import Utils, Encoders
import mimetypes, sys,smtplib,socket,getopt
class SendMail:
def __init__(self,smtp_server,from_addr,to_addr,cc_addr,user,passwd):
self.mailserver=smtp_server
self.from_addr=from_addr
self.to_addr=to_addr
self.cc_addr=cc_addr
self.username=user
self.password=passwd
def attachment(self,filename):
fd=open(filename,'rb')
filename=filename.split('/')
mimetype,mimeencoding=mimetypes.guess_type(filename[-1])
if (mimeencoding is None) or (mimetype is None):
mimetype='application/octet-stream'
maintype,subtype=mimetype.split('/')
if maintype=='text':
retval=MIMEText(fd.read(), _subtype=subtype, _charset='utf-8')
else:
retval=MIMEBase(maintype,subtype)
retval.set_payload(fd.read())
Encoders.encode_base64(retval)
retval.add_header('Content-Disposition','attachment',filename=filename[-1])
fd.close()
return retval
def msginfo(self,msg,subject,filename):
# message = """Hello, ALL
#This is test message.
#--Anonymous"""
message=msg
msg=MIMEMultipart()
msg['To'] = self.to_addr
msg['From'] = self.from_addr
msg['Cc'] = self.cc_addr
msg['Date'] = Utils.formatdate(localtime=1)
msg['Message-ID'] = Utils.make_msgid()
if subject:
msg['Subject'] = subject
if message:
#utf8-encoding
body=MIMEText(message,_subtype='plain', _charset='utf-8')
msg.attach(body)
#for filename in sys.argv[1:]:
if filename:
msg.attach(self.attachment(filename))
return msg.as_string()
def send(self,msg=None,subject=None,filename=None):
try:
s=smtplib.SMTP(self.mailserver)
try:
s.login(self.username,self.password)
except smtplib.SMTPException,e:
print "Authentication failed:",e
sys.exit(1)
if cc_addr:
to_addrs=self.to_addr+','+self.cc_addr
else:
to_addrs=self.to_addr
s.sendmail(self.from_addr, to_addrs.split(','), self.msginfo(msg,subject,filename))
except (socket.gaierror,socket.error,socket.herror,smtplib.SMTPException),e:
return "*** Your message may not have been sent!\n" + e
else:
return "OK"
#Main
if __name__=='__main__':
def usage():
print """Useage:%s [-h] -s <SMTP Server> -f <FROM_ADDRESS> -t <TO_ADDRESS> -u <USER_NAME> -p <PASSWORD> [-S <MAIL_SUBJECT> -m <MAIL_MESSAGE> -F <ATTACHMENT>]
Mandatory arguments to long options are mandatory for short options too.
-f, --from= Sets the name of the "from" person (i.e., the envelope sender of the mail).
-t, --to= Addressee's address. -t "test@test.com,test1@test.com".
-c, --cc= CC Addressee's address. -c "test2@test.com,test3@test.com".
-u, --user= Login SMTP server username.
-p, --pass= Login SMTP server password.
-S, --subject= Mail subject.
-m, --msg= Mail message.-m "msg, ......."
-F, --file= Attachment file name.
-h, --help Help documen.
""" %sys.argv[0]
sys.exit(3)
try:
options,args=getopt.getopt(sys.argv[1:],"hs:f:t:c:u:p:S:m:F:","--help --server= --from= --to= --cc= --user= --pass= --subject= --msg= --file=",)
except getopt.GetoptError:
usage()
sys.exit(3)
server=None
from_addr=None
to_addr=None
cc_addr=None
username=None
password=None
subject=None
filename=None
msg=None
for name,value in options:
if name in ("-h","--help"):
usage()
if name in ("-s","--server"):
server=value
if name in ("-f","--from"):
from_addr=value
if name in ("-t","--to"):
to_addr=value
if name in ("-c","--cc"):
cc_addr=value
if name in ("-u","--user"):
username=value
if name in ("-p","--pass"):
password=value
if name in ("-S","--subject"):
subject=value
if name in ("-m","--msg"):
msg=value
if name in ("-F","--file"):
filename=value
if server and from_addr and to_addr and username and password:
test=SendMail(server,from_addr,to_addr,cc_addr,username,password)
strResult=test.send(msg,subject,filename)
print strResult
else:
usage()
| Panblack/ezdpl | servers/common/init/files/usr/local/bin/pymail.py | Python | apache-2.0 | 5,118 |
# coding=utf-8
#
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""BIG-IP® utility module
REST URI
``http://localhost/mgmt/tm/util/dig``
GUI Path
N/A
REST Kind
``tm:util:dig:*``
"""
from f5.bigip.mixins import CommandExecutionMixin
from f5.bigip.resource import UnnamedResource
class Dig(UnnamedResource, CommandExecutionMixin):
"""BIG-IP® utility command
.. note::
This is an unnamed resource so it has no ~Partition~Name pattern
at the end of its URI.
"""
def __init__(self, util):
super(Dig, self).__init__(util)
self._meta_data['required_command_parameters'].update(('utilCmdArgs',))
self._meta_data['required_json_kind'] = 'tm:util:dig:runstate'
self._meta_data['allowed_commands'].append('run')
| F5Networks/f5-common-python | f5/bigip/tm/util/dig.py | Python | apache-2.0 | 1,322 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import scipy.stats as osp_stats
from jax import lax
from jax._src.lax.lax import _const as _lax_const
from jax._src.numpy import lax_numpy as jnp
from jax._src.numpy.util import _wraps
from jax.scipy.special import xlog1py
@_wraps(osp_stats.geom.logpmf, update_doc=False)
def logpmf(k, p, loc=0):
k, p, loc = jnp._promote_args_inexact("geom.logpmf", k, p, loc)
zero = _lax_const(k, 0)
one = _lax_const(k, 1)
x = lax.sub(k, loc)
log_probs = xlog1py(lax.sub(x, one), -p) + lax.log(p)
return jnp.where(lax.le(x, zero), -jnp.inf, log_probs)
@_wraps(osp_stats.geom.pmf, update_doc=False)
def pmf(k, p, loc=0):
return jnp.exp(logpmf(k, p, loc))
| google/jax | jax/_src/scipy/stats/geom.py | Python | apache-2.0 | 1,244 |
# Copyright 2021, Google LLC. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for DistributedDiscreteGaussianQuery."""
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from tensorflow_privacy.privacy.dp_query import test_utils
from distributed_dp import discrete_gaussian_utils
from distributed_dp import distributed_discrete_gaussian_query
ddg_sum_query = distributed_discrete_gaussian_query.DistributedDiscreteGaussianSumQuery
def silence_tf_error_messages(func):
"""Decorator that temporarily changes the TF logging levels."""
def wrapper(*args, **kwargs):
cur_verbosity = tf.compat.v1.logging.get_verbosity()
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.FATAL)
func(*args, **kwargs)
tf.compat.v1.logging.set_verbosity(cur_verbosity) # Reset verbosity.
return wrapper
class DistributedDiscreteGaussianQueryTest(tf.test.TestCase,
parameterized.TestCase):
def test_sum_no_noise(self):
with self.cached_session() as sess:
record1 = tf.constant([2, 0], dtype=tf.int32)
record2 = tf.constant([-1, 1], dtype=tf.int32)
query = ddg_sum_query(l2_norm_bound=10, local_scale=0.0)
query_result, _ = test_utils.run_query(query, [record1, record2])
result = sess.run(query_result)
expected = [1, 1]
self.assertAllEqual(result, expected)
@parameterized.product(sample_size=[1, 3])
def test_sum_multiple_shapes(self, sample_size):
with self.cached_session() as sess:
t1 = tf.constant([2, 0], dtype=tf.int32)
t2 = tf.constant([-1, 1, 3], dtype=tf.int32)
t3 = tf.constant([-2], dtype=tf.int32)
record = [t1, t2, t3]
sample = [record] * sample_size
query = ddg_sum_query(l2_norm_bound=10, local_scale=0.0)
query_result, _ = test_utils.run_query(query, sample)
expected = [sample_size * t1, sample_size * t2, sample_size * t3]
result, expected = sess.run([query_result, expected])
# Use `assertAllClose` for nested structures equality (with tolerance=0).
self.assertAllClose(result, expected, atol=0)
@parameterized.product(sample_size=[1, 3])
def test_sum_nested_record_structure(self, sample_size):
with self.cached_session() as sess:
t1 = tf.constant([1, 0], dtype=tf.int32)
t2 = tf.constant([1, 1, 1], dtype=tf.int32)
t3 = tf.constant([1], dtype=tf.int32)
t4 = tf.constant([[1, 1], [1, 1]], dtype=tf.int32)
record = [t1, dict(a=t2, b=[t3, (t4, t1)])]
sample = [record] * sample_size
query = ddg_sum_query(l2_norm_bound=10, local_scale=0.0)
query_result, _ = test_utils.run_query(query, sample)
result = sess.run(query_result)
s = sample_size
expected = [t1 * s, dict(a=t2 * s, b=[t3 * s, (t4 * s, t1 * s)])]
# Use `assertAllClose` for nested structures equality (with tolerance=0)
self.assertAllClose(result, expected, atol=0)
def test_sum_raise_on_float_inputs(self):
with self.cached_session() as sess:
record1 = tf.constant([2, 0], dtype=tf.float32)
record2 = tf.constant([-1, 1], dtype=tf.float32)
query = ddg_sum_query(l2_norm_bound=10, local_scale=0.0)
with self.assertRaises(TypeError):
query_result, _ = test_utils.run_query(query, [record1, record2])
sess.run(query_result)
@parameterized.product(l2_norm_bound=[0, 3, 10, 14.1])
@silence_tf_error_messages
def test_sum_raise_on_l2_norm_excess(self, l2_norm_bound):
with self.cached_session() as sess:
record = tf.constant([10, 10], dtype=tf.int32)
query = ddg_sum_query(l2_norm_bound=l2_norm_bound, local_scale=0.0)
with self.assertRaises(tf.errors.InvalidArgumentError):
query_result, _ = test_utils.run_query(query, [record])
sess.run(query_result)
def test_sum_float_norm_not_rounded(self):
"""Test that the float L2 norm bound doesn't get rounded/casted to integers."""
with self.cached_session() as sess:
# A casted/rounded norm bound would be insufficient.
l2_norm_bound = 14.2
record = tf.constant([10, 10], dtype=tf.int32)
query = ddg_sum_query(l2_norm_bound=l2_norm_bound, local_scale=0.0)
query_result, _ = test_utils.run_query(query, [record])
result = sess.run(query_result)
expected = [10, 10]
self.assertAllEqual(result, expected)
@parameterized.named_parameters([('2_local_scale_1_record', 2, 1),
('10_local_scale_4_records', 10, 4),
('1000_local_scale_1_record', 1000, 1),
('1000_local_scale_25_records', 1000, 25)])
def test_sum_local_noise_shares(self, local_scale, num_records):
"""Test the noise level of the sum of discrete Gaussians applied locally.
The sum of discrete Gaussians is not a discrete Gaussian, but it will be
extremely close for sigma >= 2. We will thus compare the aggregated noise
to a central discrete Gaussian noise with appropriately scaled stddev with
some reasonable tolerance.
Args:
local_scale: The stddev of the local discrete Gaussian noise.
num_records: The number of records to be aggregated.
"""
# Aggregated local noises.
num_trials = 1000
record = tf.zeros([num_trials], dtype=tf.int32)
sample = [record] * num_records
query = ddg_sum_query(l2_norm_bound=10.0, local_scale=local_scale)
query_result, _ = test_utils.run_query(query, sample)
# Central discrete Gaussian noise.
central_scale = np.sqrt(num_records) * local_scale
central_noise = discrete_gaussian_utils.sample_discrete_gaussian(
scale=tf.cast(tf.round(central_scale), record.dtype),
shape=tf.shape(record),
dtype=record.dtype)
agg_noise, central_noise = self.evaluate([query_result, central_noise])
mean_stddev = central_scale * np.sqrt(num_trials) / num_trials
atol = 3.5 * mean_stddev
# Use the atol for mean as a rough default atol for stddev/percentile.
self.assertAllClose(np.mean(agg_noise), np.mean(central_noise), atol=atol)
self.assertAllClose(np.std(agg_noise), np.std(central_noise), atol=atol)
self.assertAllClose(
np.percentile(agg_noise, [25, 50, 75]),
np.percentile(central_noise, [25, 50, 75]),
atol=atol)
if __name__ == '__main__':
tf.test.main()
| google-research/federated | distributed_dp/distributed_discrete_gaussian_query_test.py | Python | apache-2.0 | 6,913 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mayaviviewerwidget.ui'
#
# Created: Mon Nov 11 18:02:00 2013
# by: pyside-uic 0.2.13 running on PySide 1.1.0
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(914, 548)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Dialog.sizePolicy().hasHeightForWidth())
Dialog.setSizePolicy(sizePolicy)
self.horizontalLayout_2 = QtGui.QHBoxLayout(Dialog)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.widget = QtGui.QWidget(Dialog)
self.widget.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widget.sizePolicy().hasHeightForWidth())
self.widget.setSizePolicy(sizePolicy)
self.widget.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.widget.setObjectName("widget")
self.gridLayout = QtGui.QGridLayout(self.widget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.widget1 = QtGui.QWidget(self.widget)
self.widget1.setMaximumSize(QtCore.QSize(500, 16777215))
self.widget1.setObjectName("widget1")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.widget1)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.verticalLayout.setObjectName("verticalLayout")
self.tableWidget = QtGui.QTableWidget(self.widget1)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidget.sizePolicy().hasHeightForWidth())
self.tableWidget.setSizePolicy(sizePolicy)
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(2)
self.tableWidget.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
self.tableWidget.horizontalHeader().setVisible(True)
self.tableWidget.horizontalHeader().setCascadingSectionResizes(False)
self.tableWidget.horizontalHeader().setDefaultSectionSize(100)
self.verticalLayout.addWidget(self.tableWidget)
self.sliceplanegroup = QtGui.QGroupBox(self.widget1)
self.sliceplanegroup.setEnabled(False)
self.sliceplanegroup.setObjectName("sliceplanegroup")
self.horizontalLayout = QtGui.QHBoxLayout(self.sliceplanegroup)
self.horizontalLayout.setObjectName("horizontalLayout")
self.slicePlaneRadioX = QtGui.QRadioButton(self.sliceplanegroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.slicePlaneRadioX.sizePolicy().hasHeightForWidth())
self.slicePlaneRadioX.setSizePolicy(sizePolicy)
self.slicePlaneRadioX.setChecked(False)
self.slicePlaneRadioX.setObjectName("slicePlaneRadioX")
self.horizontalLayout.addWidget(self.slicePlaneRadioX)
self.slicePlaneRadioY = QtGui.QRadioButton(self.sliceplanegroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.slicePlaneRadioY.sizePolicy().hasHeightForWidth())
self.slicePlaneRadioY.setSizePolicy(sizePolicy)
self.slicePlaneRadioY.setChecked(True)
self.slicePlaneRadioY.setObjectName("slicePlaneRadioY")
self.horizontalLayout.addWidget(self.slicePlaneRadioY)
self.slicePlaneRadioZ = QtGui.QRadioButton(self.sliceplanegroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.slicePlaneRadioZ.sizePolicy().hasHeightForWidth())
self.slicePlaneRadioZ.setSizePolicy(sizePolicy)
self.slicePlaneRadioZ.setObjectName("slicePlaneRadioZ")
self.horizontalLayout.addWidget(self.slicePlaneRadioZ)
self.verticalLayout.addWidget(self.sliceplanegroup)
self.screenshotgroup = QtGui.QGroupBox(self.widget1)
self.screenshotgroup.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.screenshotgroup.setObjectName("screenshotgroup")
self.formLayout = QtGui.QFormLayout(self.screenshotgroup)
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setObjectName("formLayout")
self.pixelsXLabel = QtGui.QLabel(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pixelsXLabel.sizePolicy().hasHeightForWidth())
self.pixelsXLabel.setSizePolicy(sizePolicy)
self.pixelsXLabel.setObjectName("pixelsXLabel")
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.pixelsXLabel)
self.screenshotPixelXLineEdit = QtGui.QLineEdit(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.screenshotPixelXLineEdit.sizePolicy().hasHeightForWidth())
self.screenshotPixelXLineEdit.setSizePolicy(sizePolicy)
self.screenshotPixelXLineEdit.setObjectName("screenshotPixelXLineEdit")
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.screenshotPixelXLineEdit)
self.pixelsYLabel = QtGui.QLabel(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pixelsYLabel.sizePolicy().hasHeightForWidth())
self.pixelsYLabel.setSizePolicy(sizePolicy)
self.pixelsYLabel.setObjectName("pixelsYLabel")
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.pixelsYLabel)
self.screenshotPixelYLineEdit = QtGui.QLineEdit(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.screenshotPixelYLineEdit.sizePolicy().hasHeightForWidth())
self.screenshotPixelYLineEdit.setSizePolicy(sizePolicy)
self.screenshotPixelYLineEdit.setObjectName("screenshotPixelYLineEdit")
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.screenshotPixelYLineEdit)
self.screenshotFilenameLabel = QtGui.QLabel(self.screenshotgroup)
self.screenshotFilenameLabel.setObjectName("screenshotFilenameLabel")
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.screenshotFilenameLabel)
self.screenshotFilenameLineEdit = QtGui.QLineEdit(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.screenshotFilenameLineEdit.sizePolicy().hasHeightForWidth())
self.screenshotFilenameLineEdit.setSizePolicy(sizePolicy)
self.screenshotFilenameLineEdit.setObjectName("screenshotFilenameLineEdit")
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.screenshotFilenameLineEdit)
self.screenshotSaveButton = QtGui.QPushButton(self.screenshotgroup)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.screenshotSaveButton.sizePolicy().hasHeightForWidth())
self.screenshotSaveButton.setSizePolicy(sizePolicy)
self.screenshotSaveButton.setObjectName("screenshotSaveButton")
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.screenshotSaveButton)
self.verticalLayout.addWidget(self.screenshotgroup)
self.closeButton = QtGui.QPushButton(self.widget1)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.closeButton.sizePolicy().hasHeightForWidth())
self.closeButton.setSizePolicy(sizePolicy)
self.closeButton.setLayoutDirection(QtCore.Qt.LeftToRight)
self.closeButton.setObjectName("closeButton")
self.verticalLayout.addWidget(self.closeButton)
self.verticalLayout_3.addLayout(self.verticalLayout)
self.gridLayout.addWidget(self.widget1, 0, 0, 1, 1)
self.MayaviScene = MayaviSceneWidget(self.widget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.MayaviScene.sizePolicy().hasHeightForWidth())
self.MayaviScene.setSizePolicy(sizePolicy)
self.MayaviScene.setObjectName("MayaviScene")
self.gridLayout.addWidget(self.MayaviScene, 0, 1, 1, 1)
self.horizontalLayout_2.addWidget(self.widget)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Model Viewer", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.setSortingEnabled(False)
self.tableWidget.horizontalHeaderItem(0).setText(QtGui.QApplication.translate("Dialog", "Visible", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.horizontalHeaderItem(1).setText(QtGui.QApplication.translate("Dialog", "Type", None, QtGui.QApplication.UnicodeUTF8))
self.sliceplanegroup.setTitle(QtGui.QApplication.translate("Dialog", "Image Slice Plane", None, QtGui.QApplication.UnicodeUTF8))
self.slicePlaneRadioX.setText(QtGui.QApplication.translate("Dialog", "X", None, QtGui.QApplication.UnicodeUTF8))
self.slicePlaneRadioY.setText(QtGui.QApplication.translate("Dialog", "Y", None, QtGui.QApplication.UnicodeUTF8))
self.slicePlaneRadioZ.setText(QtGui.QApplication.translate("Dialog", "Z", None, QtGui.QApplication.UnicodeUTF8))
self.screenshotgroup.setTitle(QtGui.QApplication.translate("Dialog", "Screenshot", None, QtGui.QApplication.UnicodeUTF8))
self.pixelsXLabel.setText(QtGui.QApplication.translate("Dialog", "Pixels X:", None, QtGui.QApplication.UnicodeUTF8))
self.screenshotPixelXLineEdit.setText(QtGui.QApplication.translate("Dialog", "800", None, QtGui.QApplication.UnicodeUTF8))
self.pixelsYLabel.setText(QtGui.QApplication.translate("Dialog", "Pixels Y:", None, QtGui.QApplication.UnicodeUTF8))
self.screenshotPixelYLineEdit.setText(QtGui.QApplication.translate("Dialog", "600", None, QtGui.QApplication.UnicodeUTF8))
self.screenshotFilenameLabel.setText(QtGui.QApplication.translate("Dialog", "Filename:", None, QtGui.QApplication.UnicodeUTF8))
self.screenshotFilenameLineEdit.setText(QtGui.QApplication.translate("Dialog", "screenshot.png", None, QtGui.QApplication.UnicodeUTF8))
self.screenshotSaveButton.setText(QtGui.QApplication.translate("Dialog", "Save Screenshot", None, QtGui.QApplication.UnicodeUTF8))
self.closeButton.setText(QtGui.QApplication.translate("Dialog", "Close", None, QtGui.QApplication.UnicodeUTF8))
from mayaviscenewidget import MayaviSceneWidget
| MusculoskeletalAtlasProject/mapclient-tests | test_resources/updater_test/mayaviviewerstep-master/mapclientplugins/mayaviviewerstep/widgets/ui_mayaviviewerwidget.py | Python | apache-2.0 | 12,774 |
# Copyright (c) 2014. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
HPV T-cell antigens and MHC ligands from Dana Farber CVC
http://cvc.dfci.harvard.edu/hpv/HTML/help.html
"""
from __future__ import print_function, division, absolute_import
from os.path import join
import pandas as pd
from .common import bad_amino_acids
from .features import make_unlabeled_ngram_dataset_from_args
from .reduced_alphabet import make_alphabet_transformer
from .static_data import DATA_DIR
def _load_dataframe(
path,
epitope_column_name,
mhc_class=None,
hla_type=None,
exclude_hla_type=None, # regex pattern i.e. '(HLA-A2)|(HLA-A\*02)'
peptide_length=None,
reduced_alphabet=None,
nrows=None):
df = pd.read_csv(path, skipinitialspace=True, nrows=nrows)
epitopes = df[epitope_column_name]
hla = df['HLA allele']
mask = ~(epitopes.str.contains(bad_amino_acids, na=False).astype('bool'))
if mhc_class == 1:
a = hla.str.startswith('A')
b = hla.str.startswith('B')
c = hla.str.startswith('C')
mask &= (a | b | c)
elif mhc_class == 2:
mask &= hla.str.startswith('D')
if hla_type:
mask &= hla.str.contains(hla_type, na=False).astype('bool')
if exclude_hla_type:
mask &= ~(hla.str.contains(exclude_hla_type, na=True).astype('bool'))
if peptide_length:
mask &= epitopes.str.len() == peptide_length
df = df[mask]
if reduced_alphabet:
epitopes = df[epitope_column_name]
df[epitope_column_name] = \
epitopes.map(make_alphabet_transformer(reduced_alphabet))
return df
def load_tcell(*args, **kwargs):
tcell_path = join(DATA_DIR, 'cvc_hpv_tcell.csv')
return _load_dataframe(tcell_path, 'Epitope sequence', *args, **kwargs)
def load_tcell_set(*args, **kwargs):
df = load_tcell(*args, **kwargs)
return set(df['Epitope sequence'])
def load_tcell_ngrams(*args, **kwargs):
return make_unlabeled_ngram_dataset_from_args(
load_tcell_set, *args, **kwargs)
def load_mhc(*args, **kwargs):
mhc_path = join(DATA_DIR, 'cvc_hpv_ligand.csv')
return _load_dataframe(mhc_path, 'Ligand sequence', *args, **kwargs)
def load_mhc_set(*args, **kwargs):
df = load_mhc(*args, **kwargs)
return set(df['Ligand sequence'])
def load_mhc_ngrams(*args, **kwargs):
return make_unlabeled_ngram_dataset_from_args(load_mhc_set, *args, **kwargs) | dmnfarrell/epitopemap | modules/pepdata/hpv.py | Python | apache-2.0 | 2,977 |
from setuptools import setup, find_packages
setup(
name="panic-deployer",
version="0.1",
packages=find_packages(),
requires=['kamaki',
'paramiko'],
author='Giannis Giannakopoulos',
author_email='ggian@cslab.ece.ntua.gr'
) | project-asap/IReS-Platform | panic/panic-deployer/setup.py | Python | apache-2.0 | 260 |
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing images.
"""
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from horizon_lib import exceptions
from horizon_lib import forms
from horizon_lib import tabs
from horizon_lib.utils import memoized
from openstack_horizon import api
from openstack_horizon.dashboards.project.images.images \
import forms as project_forms
from openstack_horizon.dashboards.project.images.images \
import tables as project_tables
from openstack_horizon.dashboards.project.images.images \
import tabs as project_tabs
class CreateView(forms.ModalFormView):
form_class = project_forms.CreateImageForm
template_name = 'project/images/images/create.html'
context_object_name = 'image'
success_url = reverse_lazy("horizon:project:images:index")
class UpdateView(forms.ModalFormView):
form_class = project_forms.UpdateImageForm
template_name = 'project/images/images/update.html'
success_url = reverse_lazy("horizon:project:images:index")
@memoized.memoized_method
def get_object(self):
try:
return api.glance.image_get(self.request, self.kwargs['image_id'])
except Exception:
msg = _('Unable to retrieve image.')
url = reverse('horizon:project:images:index')
exceptions.handle(self.request, msg, redirect=url)
def get_context_data(self, **kwargs):
context = super(UpdateView, self).get_context_data(**kwargs)
context['image'] = self.get_object()
return context
def get_initial(self):
image = self.get_object()
properties = getattr(image, 'properties', {})
return {'image_id': self.kwargs['image_id'],
'name': getattr(image, 'name', None) or image.id,
'description': properties.get('description', ''),
'kernel': properties.get('kernel_id', ''),
'ramdisk': properties.get('ramdisk_id', ''),
'architecture': properties.get('architecture', ''),
'disk_format': getattr(image, 'disk_format', None),
'minimum_ram': getattr(image, 'min_ram', None),
'minimum_disk': getattr(image, 'min_disk', None),
'public': getattr(image, 'is_public', None),
'protected': getattr(image, 'protected', None)}
class DetailView(tabs.TabView):
tab_group_class = project_tabs.ImageDetailTabs
template_name = 'project/images/images/detail.html'
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
image = self.get_data()
table = project_tables.ImagesTable(self.request)
context["image"] = image
context["url"] = self.get_redirect_url()
context["actions"] = table.render_row_actions(image)
return context
@staticmethod
def get_redirect_url():
return reverse_lazy('horizon:project:images:index')
@memoized.memoized_method
def get_data(self):
try:
return api.glance.image_get(self.request, self.kwargs['image_id'])
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve image details.'),
redirect=self.get_redirect_url())
def get_tabs(self, request, *args, **kwargs):
image = self.get_data()
return self.tab_group_class(request, image=image, **kwargs)
| mrunge/openstack_horizon | openstack_horizon/dashboards/project/images/images/views.py | Python | apache-2.0 | 4,298 |
# Copyright (C) 2013 Cisco Systems Inc.
# All rights reserved
import ipaddress
import re
from .feature import Feature
from .vrf import VRF
from .cisco_secret import CiscoSecret
from .nxcli import *
import nxos_utils
class OSPFSession(Feature):
'''
Use this class to configure the OSPF feature.
'''
def __init__(self, Instance=None, vrf='default'):
self._name = 'ospf'
if Instance == None:
raise ValueError, 'Instance Name is Null.'
self.Instance = Instance
self.OSPFInterface.Instance = self.Instance
self.set_vrf(vrf)
def _run_cfg_at_router_ospf_level(self, cmds):
'''
Run the given list of commands at router ospf config level.
Arguments:
cmds: List of commands to be configured at router ospf config level.
Returns: True on success
'''
string = 'router ospf %s' % self.Instance
if self.vrf.get_name() != 'default':
if not self.vrf.exists():
self.vrf.create()
string += ' ; vrf %s' % self.vrf.get_name()
string += ' ; %s' % cmds
return NXCLI._run_cfg(string)
def set_Instance(self, Instance):
self.Instance = Instance
def set_vrf(self, vrf):
'''
Set the VRF (Virtual Routing and Forwarding) context for subsequent API
calls on this OSPFSession object. Any configuration done on this
OSPFSession object will be applied to this VRF.
Arguments:
vrf: VRF name (string) or the VRF ID (int).
Returns: Nothing
'''
if type(vrf) in [int, str]:
self.vrf = VRF(vrf)
elif isinstance(vrf, VRF):
self.vrf = vrf
else:
raise ValueError, ('Invalid argument type for vrf, acceptable types'
' are VRF, int and str')
def start(self):
'''
Arguments: None
Returns: True on success
'''
if self.is_shutdown():
return self._run_cfg_at_router_ospf_level('no shutdown')
if not self.is_enabled():
self.enable()
return self._run_cfg_at_router_ospf_level('')
def shutdown(self):
'''
Shutdown the OSPF routing process. All existing OSPF configurations will
be preserved.
Arguments: None
Returns: True on success
'''
return self._run_cfg_at_router_ospf_level('shutdown')
def is_shutdown(self):
'''
Check if the OSPF routing process is shutdown.
Arguments: None
Returns:
True if the OSPF process is enabled and shutdown.
False if the OSPF process is running or if OSPF is not enabled.
'''
if self.is_enabled():
output = nxos_utils.cli_ex('show run ospf | include shutdown')
rows = output.split('\n')
for row in rows:
if row.strip() == 'shutdown':
return True
return False
def cfg_router_id(self, router_id, **kwargs):
'''
Specify the IP address to use as router-id. To remove this
configuration set the optional 'no' argument to True.
Arguments:
router_id: A string in dotted quad format ('A.B.C.D') representing
the IP Address of the router.
Optional Arguments:
no: A boolean, set to True to remove the router-id.
Returns: True on success
'''
cmd = NXCLI._add_no_if_present(NXCLI._read_arg(router_id, 'router_id',
'router-id %', {str: 'socket.inet_aton(router_id)'}), kwargs)
return self._run_cfg_at_router_ospf_level(cmd)
def cfg_distance(self, dist, **kwargs):
'''
Configure administrative distance for this OSPFv2 instance.
To set the distances back to the default set the optional 'no' argument
to True.
Arguments:
dist: Distance for ospf routes, an integer ranging from 1 to
255.
Optional Arguments:
no: Set to True to set distances back to the default values.
Returns: True on success
'''
cmd = NXCLI._add_no_if_present('distance', kwargs)
cmd += NXCLI._read_arg(dist, 'dist', ' %', {int:
'dist >= 1 and dist <= 255'})
return self._run_cfg_at_router_ospf_level(cmd)
def log_adjacency_changes(self, **kwargs):
'''
Log a message for neighbor up/down event. To disable this behavior set
the optional 'no' argument to True.
Optional Arguments:
no: A boolean, set to True to disable this feature.
Returns: True on success
'''
cmd = NXCLI._add_no_if_present('log-adjacency-changes', kwargs)
return self._run_cfg_at_router_ospf_level(cmd)
def cfg_maximum_paths(self, max, **kwargs):
'''
Configures the maximum number of parallel routes that the OSPF
can support. To restore the default number of parallel
routes, set the optional 'no' argument to True.
Arguments:
max: Maximum number of parallel routes that an IP routing protocol
installs in a routing table. The range is from 1 to 64
Optional Arguments:
no: Set to True to restore the default number of parallel routes
Returns: True on success
'''
cmd = NXCLI._add_no_if_present(NXCLI._read_arg(max, 'max',
'maximum-paths %', {int: 'max >= 1 and max <= 64'}), kwargs)
return self._run_cfg_at_router_ospf_level(cmd)
class OSPFInterface(object):
def __init__(self, if_name, area, **kwargs):
self.if_name = NXCLI._read_arg(if_name, 'if_name', ' %', {str:None})
self.area = NXCLI._read_arg(area, 'area', ' %', {str:None})
def _run_cfg_at_interface_level(self, cmds):
'''
Run the given list of commands at interface config level.
Arguments:
cmds: List of commands to be configured at interface level.
Returns: True on success
'''
string = 'interface %s' % self.if_name
string += ' ; %s' % cmds
# print string
return NXCLI._run_cfg(string)
def add(self):
'''
Add this interface to OSPFv2 instance and area.
Arguments:
None.
Optional Arguments:
None.
Returns: True on success
'''
cmd = 'ip router ospf %s area %s' % (self.Instance,self.area)
return self._run_cfg_at_interface_level(cmd)
def cfg_ospf_cost(self, ospf_cost=60, **kwargs):
'''
Configure OSPFv2 cost for this interface.
Arguments:
priority: ip ospf cost (int). Acceptable Range 1 to 65535.
Optional Arguments:
no: A boolean, set to True to remove the ip ospf config.
Returns: True on success
'''
cmd = NXCLI._add_no_if_present(NXCLI._read_arg(ospf_cost, 'ospf_cost',
'ip ospf cost %',
{int: 'ospf_cost >= 0 and ospf_cost <= 65535'}), kwargs)
return self._run_cfg_at_interface_level(cmd)
def cfg_hello_interval(self, hello_interval=60, **kwargs):
'''
Configure OSPFv2 hello interval for this interface.
Arguments:
priority: ip ospf hello interval (int). Acceptable Range 1 to 65535.
Optional Arguments:
no: A boolean, set to True to remove the ip ospf hello interval config.
Returns: True on success
'''
cmd = NXCLI._add_no_if_present(NXCLI._read_arg(hello_interval, 'hello_interval',
'ip ospf hello-interval %',
{int: 'hello_interval >= 1 and hello_interval <= 65535'}), kwargs)
return self._run_cfg_at_interface_level(cmd)
def cfg_dead_interval(self, dead_interval=60, **kwargs):
'''
Configure OSPFv2 dead interval for this interface.
Arguments:
priority: ip ospf dead interval (int). Acceptable Range 1 to 65535.
Optional Arguments:
no: A boolean, set to True to remove the ip ospf dead interval config.
Returns: True on success
'''
cmd = NXCLI._add_no_if_present(NXCLI._read_arg(dead_interval, 'dead_interval',
'ip ospf dead-interval %',
{int: 'dead_interval >= 1 and dead_interval <= 65535'}), kwargs)
return self._run_cfg_at_interface_level(cmd)
def cfg_ospf_priority(self, ospf_priority=60, **kwargs):
'''
Configure OSPFv2 priority for this interface. Priority is used to determine
DR election in area.
Arguments:
priority: ip ospf priority (int). Acceptable Range 0 to 255.
Optional Arguments:
no: A boolean, set to True to remove the ip ospf priority config.
Returns: True on success
'''
cmd = NXCLI._add_no_if_present(NXCLI._read_arg(ospf_priority, 'ospf_priority',
'ip ospf priority %',
{int: 'ospf_priority >= 0 and ospf_priority <= 255'}), kwargs)
return self._run_cfg_at_interface_level(cmd)
def cfg_mtu_ignore(self, **kwargs):
'''
Configure OSPFv2 to ignore any IP MTU mismatch with a neighbor.
Arguments: None
Optional Arguments:
no: A boolean, set to True to remove the ip ospf mtu-ignore
config.
Returns: True on success
'''
cmd = NXCLI._add_no_if_present('ip ospf passive-interface', kwargs)
return self._run_cfg_at_interface_level('ip ospf mtu-ignore')
def cfg_passive_interface(self, **kwargs):
'''
Supress OSPF routing updates on this interface.
Arguments: None
Optional Arguments:
no: A boolean, set to True to remove the ip ospf passive-interface
config.
Returns: True on success
'''
cmd = NXCLI._add_no_if_present('ip ospf passive-interface', kwargs)
return self._run_cfg_at_interface_level(cmd)
def shutdown(self, **kwargs):
'''
Shutdown the OSPF on this interface. All existing OSPF
configurations will be preserved.
Arguments: None
Optional Arguments:
no: A boolean, set to True to remove the ip ospf shutdown config.
Returns: True on success
'''
cmd = NXCLI._add_no_if_present('ip ospf shutdown', kwargs)
return self._run_cfg_at_interface_level(cmd)
| fmichalo/n9k-programmability | Python nxapi scripts/cisco/ospf.py | Python | apache-2.0 | 11,049 |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class authenticationradiuspolicy_systemglobal_binding(base_resource) :
""" Binding class showing the systemglobal that can be bound to authenticationradiuspolicy.
"""
def __init__(self) :
self._boundto = ""
self._priority = 0
self._activepolicy = 0
self._name = ""
self.___count = 0
@property
def name(self) :
"""Name of the RADIUS authentication policy.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of the RADIUS authentication policy.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def boundto(self) :
"""The entity name to which policy is bound.
"""
try :
return self._boundto
except Exception as e:
raise e
@boundto.setter
def boundto(self, boundto) :
"""The entity name to which policy is bound.
"""
try :
self._boundto = boundto
except Exception as e:
raise e
@property
def priority(self) :
try :
return self._priority
except Exception as e:
raise e
@property
def activepolicy(self) :
try :
return self._activepolicy
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(authenticationradiuspolicy_systemglobal_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.authenticationradiuspolicy_systemglobal_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
""" Use this API to fetch authenticationradiuspolicy_systemglobal_binding resources.
"""
try :
obj = authenticationradiuspolicy_systemglobal_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
""" Use this API to fetch filtered set of authenticationradiuspolicy_systemglobal_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = authenticationradiuspolicy_systemglobal_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
""" Use this API to count authenticationradiuspolicy_systemglobal_binding resources configued on NetScaler.
"""
try :
obj = authenticationradiuspolicy_systemglobal_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
""" Use this API to count the filtered set of authenticationradiuspolicy_systemglobal_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = authenticationradiuspolicy_systemglobal_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class authenticationradiuspolicy_systemglobal_binding_response(base_response) :
def __init__(self, length=1) :
self.authenticationradiuspolicy_systemglobal_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.authenticationradiuspolicy_systemglobal_binding = [authenticationradiuspolicy_systemglobal_binding() for _ in range(length)]
| mahabs/nitro | nssrc/com/citrix/netscaler/nitro/resource/config/authentication/authenticationradiuspolicy_systemglobal_binding.py | Python | apache-2.0 | 5,395 |
#!/usr/bin/env python
"""Download Wzx gene sequences from Genbank
Example:
$ python download_wzx_genes.py .
"""
import argparse
import logging
import re
from utils import DownloadUtils, SubtypeParser, GeneFilter
__author__ = "Matthew Whiteside"
__copyright__ = "Copyright Government of Canada 2012-2015. Funded by the Government of Canada Genomics Research and Development Initiative"
__license__ = "APL"
__version__ = "2.0"
__maintainer__ = "Matthew Whiteside"
__email__ = "mdwhitesi@gmail.com"
logging.basicConfig(
filename='download_wzx_genes.log',
level=logging.DEBUG,
format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p',
filemode='w')
if __name__ == "__main__":
"""Run pipeline downloading sequences
"""
# Parse command-line args
parser = argparse.ArgumentParser(description='Download and store NCBI genes sequences')
parser.add_argument('output_directory', action="store")
parser.add_argument('acc_file', action="store")
args = parser.parse_args()
# Initialize gene filter
seq_tests = [lambda x: len(x) > 1000]
gfilter = GeneFilter(sequence_tests=seq_tests)
# Initialize Subtype parser
opattern = r"(?:\b|serogroup\:)([o0]x?\d+(?:[a-z]{1,2})?)(?:\b|\:)"
parser = SubtypeParser([re.compile(opattern, flags=re.IGNORECASE)],
source_fields=['organism','strain','serotype','serovar','note'],
annotation_fields=['source','serotype','organism','serovar'])
# Initialize Download object
dutil = DownloadUtils(args.output_directory, 'Escherichia coli', ['wzx', 'O-antigen flippase'], parser, gfilter)
# Download
dutil.download_by_accession(args.acc_file)
# Parse genbank files for known intimin types
dutil.parse()
| superphy/insilico-subtyping | phylotyper/data/download/download_wzx_genes.py | Python | apache-2.0 | 1,774 |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
##
## Test surrogatepass encoding error handler
##
import unittest
import codecs
from iptest import run_test
class SurrogatePassTest(unittest.TestCase):
def test_ascii(self):
self.assertEqual("abc".encode("ascii", errors="surrogatepass"), b"abc")
self.assertEqual(b"abc".decode("ascii", errors="surrogatepass"), "abc")
def test_utf_7(self):
self.assertEqual("abc\ud810xyz".encode("utf_7", errors="surrogatepass"), b"abc+2BA-xyz")
self.assertEqual(b"abc+2BA-xyz".decode("utf_7", errors="surrogatepass"), "abc\ud810xyz")
def test_utf_8(self):
self.assertEqual("abc\ud810xyz".encode("utf_8", errors="surrogatepass"), b"abc\xed\xa0\x90xyz")
self.assertEqual(b"abc\xed\xa0\x90xyz".decode("utf_8", errors="surrogatepass"), "abc\ud810xyz")
def test_utf_16_le(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_16_le", errors="surrogatepass"), b"\x10\xd8")
self.assertEqual(b"\x10\xd8".decode("utf_16_le", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_16_le", errors="surrogatepass"), b"\n\xdc")
self.assertEqual(b"\n\xdc".decode("utf_16_le", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_16_le", errors="surrogatepass"), b"Q\xde/\xda")
self.assertEqual(b"Q\xde/\xda".decode("utf_16_le", errors="surrogatepass"), "\ude51\uda2f")
def test_utf_16_be(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_16_be", errors="surrogatepass"), b"\xd8\x10")
self.assertEqual(b"\xd8\x10".decode("utf_16_be", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_16_be", errors="surrogatepass"), b"\xdc\n")
self.assertEqual(b"\xdc\n".decode("utf_16_be", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_16_be", errors="surrogatepass"), b"\xdeQ\xda/")
self.assertEqual(b"\xdeQ\xda/".decode("utf_16_be", errors="surrogatepass"), "\ude51\uda2f")
def test_utf_32_le(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_32_le", errors="surrogatepass"), b"\x10\xd8\x00\x00")
self.assertEqual(b"\x10\xd8\x00\x00".decode("utf_32_le", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_32_le", errors="surrogatepass"), b"\n\xdc\x00\x00")
self.assertEqual(b"\n\xdc\x00\x00".decode("utf_32_le", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_32_le", errors="surrogatepass"), b"Q\xde\x00\x00/\xda\x00\x00")
self.assertEqual(b"Q\xde\x00\x00/\xda\x00\x00".decode("utf_32_le", errors="surrogatepass"), "\ude51\uda2f")
def test_utf_32_be(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_32_be", errors="surrogatepass"), b"\x00\x00\xd8\x10")
self.assertEqual(b"\x00\x00\xd8\x10".decode("utf_32_be", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_32_be", errors="surrogatepass"), b"\x00\x00\xdc\n")
self.assertEqual(b"\x00\x00\xdc\n".decode("utf_32_be", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_32_be", errors="surrogatepass"), b"\x00\x00\xdeQ\x00\x00\xda/")
self.assertEqual(b"\x00\x00\xdeQ\x00\x00\xda/".decode("utf_32_be", errors="surrogatepass"), "\ude51\uda2f")
run_test(__name__)
| IronLanguages/ironpython3 | Tests/test_surrogatepass.py | Python | apache-2.0 | 4,002 |
# Copyright 2020 Samsung Electronics Co., Ltd
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from urllib import parse as urllib
from oslo_serialization import jsonutils as json
from tempest.lib.common import rest_client
class ProtocolsClient(rest_client.RestClient):
def add_protocol_to_identity_provider(self, idp_id, protocol_id,
**kwargs):
"""Add protocol to identity provider.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/identity/v3-ext/index.html#add-protocol-to-identity-provider
"""
post_body = json.dumps({'protocol': kwargs})
resp, body = self.put(
'OS-FEDERATION/identity_providers/%s/protocols/%s'
% (idp_id, protocol_id), post_body)
self.expected_success(201, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def list_protocols_of_identity_provider(self, idp_id, **kwargs):
"""List protocols of identity provider.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/identity/v3-ext/index.html#list-protocols-of-identity-provider
"""
url = 'OS-FEDERATION/identity_providers/%s/protocols' % idp_id
if kwargs:
url += '?%s' % urllib.urlencode(kwargs)
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def get_protocol_for_identity_provider(self, idp_id, protocol_id):
"""Get protocol for identity provider.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/identity/v3-ext/index.html#get-protocol-for-identity-provider
"""
resp, body = self.get(
'OS-FEDERATION/identity_providers/%s/protocols/%s'
% (idp_id, protocol_id))
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def update_mapping_for_identity_provider(self, idp_id, protocol_id,
**kwargs):
"""Update attribute mapping for identity provider.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/identity/v3-ext/index.html#update-attribute-mapping-for-identity-provider
"""
post_body = json.dumps({'protocol': kwargs})
resp, body = self.patch(
'OS-FEDERATION/identity_providers/%s/protocols/%s'
% (idp_id, protocol_id), post_body)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def delete_protocol_from_identity_provider(self, idp_id, protocol_id):
"""Delete a protocol from identity provider.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/identity/v3-ext/index.html#delete-a-protocol-from-identity-provider
"""
resp, body = self.delete(
'OS-FEDERATION/identity_providers/%s/protocols/%s'
% (idp_id, protocol_id))
self.expected_success(204, resp.status)
return rest_client.ResponseBody(resp, body)
| openstack/tempest | tempest/lib/services/identity/v3/protocols_client.py | Python | apache-2.0 | 4,091 |
# #!/usr/bin/env python
#
# Copyright 2016 Sungard Availability Services
# Copyright 2016 Red Hat
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslotest import base
from oslotest import mockpatch
from ceilometer.agent import manager
from ceilometer.agent import plugin_base
from ceilometer.network import floatingip
class _BaseTestFloatingIPPollster(base.BaseTestCase):
@mock.patch('ceilometer.pipeline.setup_pipeline', mock.MagicMock())
def setUp(self):
super(_BaseTestFloatingIPPollster, self).setUp()
self.manager = manager.AgentManager()
plugin_base._get_keystone = mock.Mock()
class TestFloatingIPPollster(_BaseTestFloatingIPPollster):
def setUp(self):
super(TestFloatingIPPollster, self).setUp()
self.pollster = floatingip.FloatingIPPollster()
fake_fip = self.fake_get_fip_service()
self.useFixture(mockpatch.Patch('ceilometer.neutron_client.Client.'
'fip_get_all',
return_value=fake_fip))
@staticmethod
def fake_get_fip_service():
return [{'router_id': 'e24f8a37-1bb7-49e4-833c-049bb21986d2',
'status': 'ACTIVE',
'tenant_id': '54a00c50ee4c4396b2f8dc220a2bed57',
'floating_network_id':
'f41f399e-d63e-47c6-9a19-21c4e4fbbba0',
'fixed_ip_address': '10.0.0.6',
'floating_ip_address': '65.79.162.11',
'port_id': '93a0d2c7-a397-444c-9d75-d2ac89b6f209',
'id': '18ca27bf-72bc-40c8-9c13-414d564ea367'},
{'router_id': 'astf8a37-1bb7-49e4-833c-049bb21986d2',
'status': 'DOWN',
'tenant_id': '34a00c50ee4c4396b2f8dc220a2bed57',
'floating_network_id':
'gh1f399e-d63e-47c6-9a19-21c4e4fbbba0',
'fixed_ip_address': '10.0.0.7',
'floating_ip_address': '65.79.162.12',
'port_id': '453a0d2c7-a397-444c-9d75-d2ac89b6f209',
'id': 'jkca27bf-72bc-40c8-9c13-414d564ea367'},
{'router_id': 'e2478937-1bb7-49e4-833c-049bb21986d2',
'status': 'error',
'tenant_id': '54a0gggg50ee4c4396b2f8dc220a2bed57',
'floating_network_id':
'po1f399e-d63e-47c6-9a19-21c4e4fbbba0',
'fixed_ip_address': '10.0.0.8',
'floating_ip_address': '65.79.162.13',
'port_id': '67a0d2c7-a397-444c-9d75-d2ac89b6f209',
'id': '90ca27bf-72bc-40c8-9c13-414d564ea367'}]
def test_default_discovery(self):
self.assertEqual('endpoint:network', self.pollster.default_discovery)
def test_fip_get_samples(self):
samples = list(self.pollster.get_samples(
self.manager, {},
resources=['http://localhost:9696/']))
self.assertEqual(1, len(samples))
self.assertEqual('18ca27bf-72bc-40c8-9c13-414d564ea367',
samples[0].resource_id)
self.assertEqual("65.79.162.11", samples[0].resource_metadata[
"floating_ip_address"])
self.assertEqual("10.0.0.6", samples[0].resource_metadata[
"fixed_ip_address"])
def test_fip_volume(self):
samples = list(self.pollster.get_samples(
self.manager, {},
resources=['http://localhost:9696/']))
self.assertEqual(1, samples[0].volume)
def test_get_fip_meter_names(self):
samples = list(self.pollster.get_samples(
self.manager, {},
resources=['http://localhost:9696/']))
self.assertEqual(set(['ip.floating']),
set([s.name for s in samples]))
| idegtiarov/ceilometer | ceilometer/tests/unit/network/test_floating_ip.py | Python | apache-2.0 | 4,313 |
#!/usr/bin/env python
#
# Copyright 2001 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Script that generates the build.ninja for ninja itself.
Projects that use ninja themselves should either write a similar script
or use a meta-build system that supports Ninja output."""
from __future__ import print_function
from optparse import OptionParser
import os
import pipes
import string
import subprocess
import sys
sourcedir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(sourcedir, 'misc'))
import ninja_syntax
class Platform(object):
"""Represents a host/target platform and its specific build attributes."""
def __init__(self, platform):
self._platform = platform
if self._platform is not None:
return
self._platform = sys.platform
if self._platform.startswith('linux'):
self._platform = 'linux'
elif self._platform.startswith('freebsd'):
self._platform = 'freebsd'
elif self._platform.startswith('gnukfreebsd'):
self._platform = 'freebsd'
elif self._platform.startswith('openbsd'):
self._platform = 'openbsd'
elif self._platform.startswith('solaris') or self._platform == 'sunos5':
self._platform = 'solaris'
elif self._platform.startswith('mingw'):
self._platform = 'mingw'
elif self._platform.startswith('win'):
self._platform = 'msvc'
elif self._platform.startswith('bitrig'):
self._platform = 'bitrig'
elif self._platform.startswith('netbsd'):
self._platform = 'netbsd'
elif self._platform.startswith('aix'):
self._platform = 'aix'
elif self._platform.startswith('dragonfly'):
self._platform = 'dragonfly'
@staticmethod
def known_platforms():
return ['linux', 'darwin', 'freebsd', 'openbsd', 'solaris', 'sunos5',
'mingw', 'msvc', 'gnukfreebsd', 'bitrig', 'netbsd', 'aix',
'dragonfly']
def platform(self):
return self._platform
def is_linux(self):
return self._platform == 'linux'
def is_mingw(self):
return self._platform == 'mingw'
def is_msvc(self):
return self._platform == 'msvc'
def msvc_needs_fs(self):
popen = subprocess.Popen(['cl', '/nologo', '/?'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = popen.communicate()
return b'/FS' in out
def is_windows(self):
return self.is_mingw() or self.is_msvc()
def is_solaris(self):
return self._platform == 'solaris'
def is_aix(self):
return self._platform == 'aix'
def uses_usr_local(self):
return self._platform in ('freebsd', 'openbsd', 'bitrig', 'dragonfly', 'netbsd')
def supports_ppoll(self):
return self._platform in ('freebsd', 'linux', 'openbsd', 'bitrig',
'dragonfly')
def supports_ninja_browse(self):
return (not self.is_windows()
and not self.is_solaris()
and not self.is_aix())
def can_rebuild_in_place(self):
return not (self.is_windows() or self.is_aix())
class Bootstrap:
"""API shim for ninja_syntax.Writer that instead runs the commands.
Used to bootstrap Ninja from scratch. In --bootstrap mode this
class is used to execute all the commands to build an executable.
It also proxies all calls to an underlying ninja_syntax.Writer, to
behave like non-bootstrap mode.
"""
def __init__(self, writer, verbose=False):
self.writer = writer
self.verbose = verbose
# Map of variable name => expanded variable value.
self.vars = {}
# Map of rule name => dict of rule attributes.
self.rules = {
'phony': {}
}
def comment(self, text):
return self.writer.comment(text)
def newline(self):
return self.writer.newline()
def variable(self, key, val):
# In bootstrap mode, we have no ninja process to catch /showIncludes
# output.
self.vars[key] = self._expand(val).replace('/showIncludes', '')
return self.writer.variable(key, val)
def rule(self, name, **kwargs):
self.rules[name] = kwargs
return self.writer.rule(name, **kwargs)
def build(self, outputs, rule, inputs=None, **kwargs):
ruleattr = self.rules[rule]
cmd = ruleattr.get('command')
if cmd is None: # A phony rule, for example.
return
# Implement just enough of Ninja variable expansion etc. to
# make the bootstrap build work.
local_vars = {
'in': self._expand_paths(inputs),
'out': self._expand_paths(outputs)
}
for key, val in kwargs.get('variables', []):
local_vars[key] = ' '.join(ninja_syntax.as_list(val))
self._run_command(self._expand(cmd, local_vars))
return self.writer.build(outputs, rule, inputs, **kwargs)
def default(self, paths):
return self.writer.default(paths)
def _expand_paths(self, paths):
"""Expand $vars in an array of paths, e.g. from a 'build' block."""
paths = ninja_syntax.as_list(paths)
return ' '.join(map(self._shell_escape, (map(self._expand, paths))))
def _expand(self, str, local_vars={}):
"""Expand $vars in a string."""
return ninja_syntax.expand(str, self.vars, local_vars)
def _shell_escape(self, path):
"""Quote paths containing spaces."""
return '"%s"' % path if ' ' in path else path
def _run_command(self, cmdline):
"""Run a subcommand, quietly. Prints the full command on error."""
try:
if self.verbose:
print(cmdline)
subprocess.check_call(cmdline, shell=True)
except subprocess.CalledProcessError:
print('when running: ', cmdline)
raise
parser = OptionParser()
profilers = ['gmon', 'pprof']
parser.add_option('--bootstrap', action='store_true',
help='bootstrap a ninja binary from nothing')
parser.add_option('--verbose', action='store_true',
help='enable verbose build')
parser.add_option('--platform',
help='target platform (' +
'/'.join(Platform.known_platforms()) + ')',
choices=Platform.known_platforms())
parser.add_option('--host',
help='host platform (' +
'/'.join(Platform.known_platforms()) + ')',
choices=Platform.known_platforms())
parser.add_option('--debug', action='store_true',
help='enable debugging extras',)
parser.add_option('--profile', metavar='TYPE',
choices=profilers,
help='enable profiling (' + '/'.join(profilers) + ')',)
parser.add_option('--with-gtest', metavar='PATH', help='ignored')
parser.add_option('--with-python', metavar='EXE',
help='use EXE as the Python interpreter',
default=os.path.basename(sys.executable))
parser.add_option('--force-pselect', action='store_true',
help='ppoll() is used by default where available, '
'but some platforms may need to use pselect instead',)
(options, args) = parser.parse_args()
if args:
print('ERROR: extra unparsed command-line arguments:', args)
sys.exit(1)
platform = Platform(options.platform)
if options.host:
host = Platform(options.host)
else:
host = platform
BUILD_FILENAME = 'build.ninja'
ninja_writer = ninja_syntax.Writer(open(BUILD_FILENAME, 'w'))
n = ninja_writer
if options.bootstrap:
# Make the build directory.
try:
os.mkdir('build')
except OSError:
pass
# Wrap ninja_writer with the Bootstrapper, which also executes the
# commands.
print('bootstrapping ninja...')
n = Bootstrap(n, verbose=options.verbose)
n.comment('This file is used to build ninja itself.')
n.comment('It is generated by ' + os.path.basename(__file__) + '.')
n.newline()
n.variable('ninja_required_version', '1.3')
n.newline()
n.comment('The arguments passed to configure.py, for rerunning it.')
configure_args = sys.argv[1:]
if '--bootstrap' in configure_args:
configure_args.remove('--bootstrap')
n.variable('configure_args', ' '.join(configure_args))
env_keys = set(['CXX', 'AR', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS'])
configure_env = dict((k, os.environ[k]) for k in os.environ if k in env_keys)
if configure_env:
config_str = ' '.join([k + '=' + pipes.quote(configure_env[k])
for k in configure_env])
n.variable('configure_env', config_str + '$ ')
n.newline()
CXX = configure_env.get('CXX', 'g++')
objext = '.o'
if platform.is_msvc():
CXX = 'cl'
objext = '.obj'
def src(filename):
return os.path.join('$root', 'src', filename)
def built(filename):
return os.path.join('$builddir', filename)
def doc(filename):
return os.path.join('$root', 'doc', filename)
def cc(name, **kwargs):
return n.build(built(name + objext), 'cxx', src(name + '.c'), **kwargs)
def cxx(name, **kwargs):
return n.build(built(name + objext), 'cxx', src(name + '.cc'), **kwargs)
def binary(name):
if platform.is_windows():
exe = name + '.exe'
n.build(name, 'phony', exe)
return exe
return name
root = sourcedir
if root == os.getcwd():
# In the common case where we're building directly in the source
# tree, simplify all the paths to just be cwd-relative.
root = '.'
n.variable('root', root)
n.variable('builddir', 'build')
n.variable('cxx', CXX)
if platform.is_msvc():
n.variable('ar', 'link')
else:
n.variable('ar', configure_env.get('AR', 'ar'))
if platform.is_msvc():
cflags = ['/showIncludes',
'/nologo', # Don't print startup banner.
'/Zi', # Create pdb with debug info.
'/W4', # Highest warning level.
'/WX', # Warnings as errors.
'/wd4530', '/wd4100', '/wd4706', '/wd4244',
'/wd4512', '/wd4800', '/wd4702', '/wd4819',
# Disable warnings about constant conditional expressions.
'/wd4127',
# Disable warnings about passing "this" during initialization.
'/wd4355',
# Disable warnings about ignored typedef in DbgHelp.h
'/wd4091',
'/GR-', # Disable RTTI.
# Disable size_t -> int truncation warning.
# We never have strings or arrays larger than 2**31.
'/wd4267',
'/DNOMINMAX', '/D_CRT_SECURE_NO_WARNINGS',
'/D_HAS_EXCEPTIONS=0',
'/DNINJA_PYTHON="%s"' % options.with_python]
if platform.msvc_needs_fs():
cflags.append('/FS')
ldflags = ['/DEBUG', '/libpath:$builddir']
if not options.debug:
cflags += ['/Ox', '/DNDEBUG', '/GL']
ldflags += ['/LTCG', '/OPT:REF', '/OPT:ICF']
else:
cflags = ['-g', '-Wall', '-Wextra',
'-Wno-deprecated',
'-Wno-missing-field-initializers',
'-Wno-unused-parameter',
'-fno-rtti',
'-fno-exceptions',
'-fvisibility=hidden', '-pipe',
'-DNINJA_PYTHON="%s"' % options.with_python]
if options.debug:
cflags += ['-D_GLIBCXX_DEBUG', '-D_GLIBCXX_DEBUG_PEDANTIC']
cflags.remove('-fno-rtti') # Needed for above pedanticness.
else:
cflags += ['-O2', '-DNDEBUG']
try:
proc = subprocess.Popen(
[CXX, '-fdiagnostics-color', '-c', '-x', 'c++', '/dev/null',
'-o', '/dev/null'],
stdout=open(os.devnull, 'wb'), stderr=subprocess.STDOUT)
if proc.wait() == 0:
cflags += ['-fdiagnostics-color']
except:
pass
if platform.is_mingw():
cflags += ['-D_WIN32_WINNT=0x0501']
ldflags = ['-L$builddir']
if platform.uses_usr_local():
cflags.append('-I/usr/local/include')
ldflags.append('-L/usr/local/lib')
if platform.is_aix():
# printf formats for int64_t, uint64_t; large file support
cflags.append('-D__STDC_FORMAT_MACROS')
cflags.append('-D_LARGE_FILES')
libs = []
if platform.is_mingw():
cflags.remove('-fvisibility=hidden');
ldflags.append('-static')
elif platform.is_solaris():
cflags.remove('-fvisibility=hidden')
elif platform.is_aix():
cflags.remove('-fvisibility=hidden')
elif platform.is_msvc():
pass
else:
if options.profile == 'gmon':
cflags.append('-pg')
ldflags.append('-pg')
elif options.profile == 'pprof':
cflags.append('-fno-omit-frame-pointer')
libs.extend(['-Wl,--no-as-needed', '-lprofiler'])
if platform.supports_ppoll() and not options.force_pselect:
cflags.append('-DUSE_PPOLL')
if platform.supports_ninja_browse():
cflags.append('-DNINJA_HAVE_BROWSE')
# Search for generated headers relative to build dir.
cflags.append('-I.')
def shell_escape(str):
"""Escape str such that it's interpreted as a single argument by
the shell."""
# This isn't complete, but it's just enough to make NINJA_PYTHON work.
if platform.is_windows():
return str
if '"' in str:
return "'%s'" % str.replace("'", "\\'")
return str
if 'CFLAGS' in configure_env:
cflags.append(configure_env['CFLAGS'])
ldflags.append(configure_env['CFLAGS'])
if 'CXXFLAGS' in configure_env:
cflags.append(configure_env['CXXFLAGS'])
ldflags.append(configure_env['CXXFLAGS'])
n.variable('cflags', ' '.join(shell_escape(flag) for flag in cflags))
if 'LDFLAGS' in configure_env:
ldflags.append(configure_env['LDFLAGS'])
n.variable('ldflags', ' '.join(shell_escape(flag) for flag in ldflags))
n.newline()
if platform.is_msvc():
n.rule('cxx',
command='$cxx $cflags -c $in /Fo$out',
description='CXX $out',
deps='msvc' # /showIncludes is included in $cflags.
)
else:
n.rule('cxx',
command='$cxx -MMD -MT $out -MF $out.d $cflags -c $in -o $out',
depfile='$out.d',
deps='gcc',
description='CXX $out')
n.newline()
if host.is_msvc():
n.rule('ar',
command='lib /nologo /ltcg /out:$out $in',
description='LIB $out')
elif host.is_mingw():
n.rule('ar',
command='cmd /c $ar cqs $out.tmp $in && move /Y $out.tmp $out',
description='AR $out')
else:
n.rule('ar',
command='rm -f $out && $ar crs $out $in',
description='AR $out')
n.newline()
if platform.is_msvc():
n.rule('link',
command='$cxx $in $libs /nologo /link $ldflags /out:$out',
description='LINK $out')
else:
n.rule('link',
command='$cxx $ldflags -o $out $in $libs',
description='LINK $out')
n.newline()
objs = []
if platform.supports_ninja_browse():
n.comment('browse_py.h is used to inline browse.py.')
n.rule('inline',
command='"%s"' % src('inline.sh') + ' $varname < $in > $out',
description='INLINE $out')
n.build(built('browse_py.h'), 'inline', src('browse.py'),
implicit=src('inline.sh'),
variables=[('varname', 'kBrowsePy')])
n.newline()
objs += cxx('browse', order_only=built('browse_py.h'))
n.newline()
n.comment('the depfile parser and ninja lexers are generated using re2c.')
def has_re2c():
try:
proc = subprocess.Popen(['re2c', '-V'], stdout=subprocess.PIPE)
return int(proc.communicate()[0], 10) >= 1103
except OSError:
return False
if has_re2c():
n.rule('re2c',
command='re2c -b -i --no-generation-date -o $out $in',
description='RE2C $out')
# Generate the .cc files in the source directory so we can check them in.
n.build(src('depfile_parser.cc'), 're2c', src('depfile_parser.in.cc'))
n.build(src('lexer.cc'), 're2c', src('lexer.in.cc'))
else:
print("warning: A compatible version of re2c (>= 0.11.3) was not found; "
"changes to src/*.in.cc will not affect your build.")
n.newline()
n.comment('Core source files all build into ninja library.')
for name in ['build',
'build_log',
'clean',
'clparser',
'debug_flags',
'depfile_parser',
'deps_log',
'disk_interface',
'edit_distance',
'eval_env',
'graph',
'graphviz',
'lexer',
'line_printer',
'manifest_parser',
'metrics',
'state',
'string_piece_util',
'util',
'version']:
objs += cxx(name)
if platform.is_windows():
for name in ['subprocess-win32',
'includes_normalize-win32',
'msvc_helper-win32',
'msvc_helper_main-win32']:
objs += cxx(name)
if platform.is_msvc():
objs += cxx('minidump-win32')
objs += cc('getopt')
else:
objs += cxx('subprocess-posix')
if platform.is_aix():
objs += cc('getopt')
if platform.is_msvc():
ninja_lib = n.build(built('ninja.lib'), 'ar', objs)
else:
ninja_lib = n.build(built('libninja.a'), 'ar', objs)
n.newline()
if platform.is_msvc():
libs.append('ninja.lib')
else:
libs.append('-lninja')
if platform.is_aix():
libs.append('-lperfstat')
all_targets = []
n.comment('Main executable is library plus main() function.')
objs = cxx('ninja')
ninja = n.build(binary('ninja'), 'link', objs, implicit=ninja_lib,
variables=[('libs', libs)])
n.newline()
all_targets += ninja
if options.bootstrap:
# We've built the ninja binary. Don't run any more commands
# through the bootstrap executor, but continue writing the
# build.ninja file.
n = ninja_writer
n.comment('Tests all build into ninja_test executable.')
objs = []
for name in ['build_log_test',
'build_test',
'clean_test',
'clparser_test',
'depfile_parser_test',
'deps_log_test',
'disk_interface_test',
'edit_distance_test',
'graph_test',
'lexer_test',
'manifest_parser_test',
'ninja_test',
'state_test',
'string_piece_util_test',
'subprocess_test',
'test',
'util_test']:
objs += cxx(name)
if platform.is_windows():
for name in ['includes_normalize_test', 'msvc_helper_test']:
objs += cxx(name)
ninja_test = n.build(binary('ninja_test'), 'link', objs, implicit=ninja_lib,
variables=[('libs', libs)])
n.newline()
all_targets += ninja_test
n.comment('Ancillary executables.')
for name in ['build_log_perftest',
'canon_perftest',
'depfile_parser_perftest',
'hash_collision_bench',
'manifest_parser_perftest',
'clparser_perftest']:
objs = cxx(name)
all_targets += n.build(binary(name), 'link', objs,
implicit=ninja_lib, variables=[('libs', libs)])
n.newline()
n.comment('Generate a graph using the "graph" tool.')
n.rule('gendot',
command='./ninja -t graph all > $out')
n.rule('gengraph',
command='dot -Tpng $in > $out')
dot = n.build(built('graph.dot'), 'gendot', ['ninja', 'build.ninja'])
n.build('graph.png', 'gengraph', dot)
n.newline()
n.comment('Generate the manual using asciidoc.')
n.rule('asciidoc',
command='asciidoc -b docbook -d book -o $out $in',
description='ASCIIDOC $out')
n.rule('xsltproc',
command='xsltproc --nonet doc/docbook.xsl $in > $out',
description='XSLTPROC $out')
docbookxml = n.build(built('manual.xml'), 'asciidoc', doc('manual.asciidoc'))
manual = n.build(doc('manual.html'), 'xsltproc', docbookxml,
implicit=[doc('style.css'), doc('docbook.xsl')])
n.build('manual', 'phony',
order_only=manual)
n.newline()
n.rule('dblatex',
command='dblatex -q -o $out -p doc/dblatex.xsl $in',
description='DBLATEX $out')
n.build(doc('manual.pdf'), 'dblatex', docbookxml,
implicit=[doc('dblatex.xsl')])
n.comment('Generate Doxygen.')
n.rule('doxygen',
command='doxygen $in',
description='DOXYGEN $in')
n.variable('doxygen_mainpage_generator',
src('gen_doxygen_mainpage.sh'))
n.rule('doxygen_mainpage',
command='$doxygen_mainpage_generator $in > $out',
description='DOXYGEN_MAINPAGE $out')
mainpage = n.build(built('doxygen_mainpage'), 'doxygen_mainpage',
['README', 'COPYING'],
implicit=['$doxygen_mainpage_generator'])
n.build('doxygen', 'doxygen', doc('doxygen.config'),
implicit=mainpage)
n.newline()
if not host.is_mingw():
n.comment('Regenerate build files if build script changes.')
n.rule('configure',
command='${configure_env}%s $root/configure.py $configure_args' %
options.with_python,
generator=True)
n.build('build.ninja', 'configure',
implicit=['$root/configure.py',
os.path.normpath('$root/misc/ninja_syntax.py')])
n.newline()
n.default(ninja)
n.newline()
if host.is_linux():
n.comment('Packaging')
n.rule('rpmbuild',
command="misc/packaging/rpmbuild.sh",
description='Building rpms..')
n.build('rpm', 'rpmbuild')
n.newline()
n.build('all', 'phony', all_targets)
n.close()
print('wrote %s.' % BUILD_FILENAME)
if options.bootstrap:
print('bootstrap complete. rebuilding...')
rebuild_args = []
if platform.can_rebuild_in_place():
rebuild_args.append('./ninja')
else:
if platform.is_windows():
bootstrap_exe = 'ninja.bootstrap.exe'
final_exe = 'ninja.exe'
else:
bootstrap_exe = './ninja.bootstrap'
final_exe = './ninja'
if os.path.exists(bootstrap_exe):
os.unlink(bootstrap_exe)
os.rename(final_exe, bootstrap_exe)
rebuild_args.append(bootstrap_exe)
if options.verbose:
rebuild_args.append('-v')
subprocess.check_call(rebuild_args)
| fuchsia-mirror/third_party-ninja | configure.py | Python | apache-2.0 | 22,852 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
try:
from oslo_utils import encodeutils
except ImportError:
from oslo.utils import encodeutils
import six
from .._i18n import _
from . import exceptions
from .. import uuidutils
def find_resource(manager, name_or_id, **find_args):
"""Look for resource in a given manager.
Used as a helper for the _find_* methods.
Example:
.. code-block:: python
def _find_hypervisor(cs, hypervisor):
#Get a hypervisor by name or ID.
return cliutils.find_resource(cs.hypervisors, hypervisor)
"""
# first try to get entity as integer id
try:
return manager.get(int(name_or_id))
except (TypeError, ValueError, exceptions.NotFound):
pass
# now try to get entity as uuid
try:
if six.PY2:
tmp_id = encodeutils.safe_encode(name_or_id)
else:
tmp_id = encodeutils.safe_decode(name_or_id)
if uuidutils.is_uuid_like(tmp_id):
return manager.get(tmp_id)
except (TypeError, ValueError, exceptions.NotFound):
pass
# for str id which is not uuid
if getattr(manager, 'is_alphanum_id_allowed', False):
try:
return manager.get(name_or_id)
except exceptions.NotFound:
pass
try:
try:
return manager.find(human_id=name_or_id, **find_args)
except exceptions.NotFound:
pass
# finally try to find entity by name
try:
resource = getattr(manager, 'resource_class', None)
name_attr = resource.NAME_ATTR if resource else 'name'
kwargs = {name_attr: name_or_id}
kwargs.update(find_args)
return manager.find(**kwargs)
except exceptions.NotFound:
msg = _("No %(name)s with a name or "
"ID of '%(name_or_id)s' exists.") % \
{
"name": manager.resource_class.__name__.lower(),
"name_or_id": name_or_id
}
raise exceptions.CommandError(msg)
except exceptions.NoUniqueMatch:
msg = _("Multiple %(name)s matches found for "
"'%(name_or_id)s', use an ID to be more specific.") % \
{
"name": manager.resource_class.__name__.lower(),
"name_or_id": name_or_id
}
raise exceptions.CommandError(msg)
| nttcom/eclcli | eclcli/bare/bareclient/ecl/common/apiclient/utils.py | Python | apache-2.0 | 2,975 |
import pygame
from pygame.colordict import THECOLORS
import data
class Platform(pygame.sprite.Sprite):
def __init__(self, width, height):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.Surface([width, height])
self.image.fill(THECOLORS["green"])
self.rect = self.image.get_rect()
class Trampoline(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = data.load_image("trampoline.png")
self.rect = self.image.get_rect()
| Sveder/pyweek24 | gamelib/platforms.py | Python | apache-2.0 | 537 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils
from magnum.common import urlfetch
from magnum.conductor.monitors import MonitorBase
class MesosMonitor(MonitorBase):
def __init__(self, context, bay):
super(MesosMonitor, self).__init__(context, bay)
self.data = {}
@property
def metrics_spec(self):
return {
'memory_util': {
'unit': '%',
'func': 'compute_memory_util',
},
'cpu_util': {
'unit': '%',
'func': 'compute_cpu_util',
},
}
def _build_url(self, url, protocol='http', port='80', path='/'):
return protocol + '://' + url + ':' + port + path
def _is_leader(self, state):
return state['leader'] == state['pid']
def pull_data(self):
self.data['mem_total'] = 0
self.data['mem_used'] = 0
self.data['cpu_total'] = 0
self.data['cpu_used'] = 0
for master_addr in self.bay.master_addresses:
mesos_master_url = self._build_url(master_addr, port='5050',
path='/state')
master = jsonutils.loads(urlfetch.get(mesos_master_url))
if self._is_leader(master):
for slave in master['slaves']:
self.data['mem_total'] += slave['resources']['mem']
self.data['mem_used'] += slave['used_resources']['mem']
self.data['cpu_total'] += slave['resources']['cpus']
self.data['cpu_used'] += slave['used_resources']['cpus']
break
def compute_memory_util(self):
if self.data['mem_total'] == 0 or self.data['mem_used'] == 0:
return 0
else:
return self.data['mem_used'] * 100 / self.data['mem_total']
def compute_cpu_util(self):
if self.data['cpu_used'] == 0:
return 0
else:
return self.data['cpu_used'] * 100 / self.data['cpu_total']
| jay-lau/magnum | magnum/conductor/mesos_monitor.py | Python | apache-2.0 | 2,587 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from testtools import skipIf
from heat.common import exception
from heat.common import template_format
from heat.engine import clients
from heat.engine import scheduler
from heat.engine.resources.neutron import loadbalancer
from heat.openstack.common.importutils import try_import
from heat.tests import fakes
from heat.tests import utils
from heat.tests.common import HeatTestCase
from heat.tests.v1_1 import fakes as nova_fakes
neutronclient = try_import('neutronclient.v2_0.client')
health_monitor_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to test load balancer resources",
"Parameters" : {},
"Resources" : {
"monitor": {
"Type": "OS::Neutron::HealthMonitor",
"Properties": {
"type": "HTTP",
"delay": 3,
"max_retries": 5,
"timeout": 10
}
}
}
}
'''
pool_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to test load balancer resources",
"Parameters" : {},
"Resources" : {
"pool": {
"Type": "OS::Neutron::Pool",
"Properties": {
"protocol": "HTTP",
"subnet_id": "sub123",
"lb_method": "ROUND_ROBIN",
"vip": {
"protocol_port": 80
}
}
}
}
}
'''
member_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to test load balancer member",
"Resources" : {
"member": {
"Type": "OS::Neutron::PoolMember",
"Properties": {
"protocol_port": 8080,
"pool_id": "pool123",
"address": "1.2.3.4"
}
}
}
}
'''
lb_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to test load balancer resources",
"Parameters" : {},
"Resources" : {
"lb": {
"Type": "OS::Neutron::LoadBalancer",
"Properties": {
"protocol_port": 8080,
"pool_id": "pool123",
"members": ["1234"]
}
}
}
}
'''
pool_with_session_persistence_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to test load balancer resources wit",
"Parameters" : {},
"Resources" : {
"pool": {
"Type": "OS::Neutron::Pool",
"Properties": {
"protocol": "HTTP",
"subnet_id": "sub123",
"lb_method": "ROUND_ROBIN",
"vip": {
"protocol_port": 80,
"session_persistence": {
"type": "APP_COOKIE",
"cookie_name": "cookie"
}
}
}
}
}
}
'''
@skipIf(neutronclient is None, 'neutronclient unavailable')
class HealthMonitorTest(HeatTestCase):
def setUp(self):
super(HealthMonitorTest, self).setUp()
self.m.StubOutWithMock(neutronclient.Client, 'create_health_monitor')
self.m.StubOutWithMock(neutronclient.Client, 'delete_health_monitor')
self.m.StubOutWithMock(neutronclient.Client, 'show_health_monitor')
self.m.StubOutWithMock(neutronclient.Client, 'update_health_monitor')
self.m.StubOutWithMock(clients.OpenStackClients, 'keystone')
utils.setup_dummy_db()
def create_health_monitor(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_health_monitor({
'health_monitor': {
'delay': 3, 'max_retries': 5, 'type': u'HTTP',
'timeout': 10, 'admin_state_up': True}}
).AndReturn({'health_monitor': {'id': '5678'}})
snippet = template_format.parse(health_monitor_template)
stack = utils.parse_stack(snippet)
return loadbalancer.HealthMonitor(
'monitor', snippet['Resources']['monitor'], stack)
def test_create(self):
rsrc = self.create_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_create_failed(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_health_monitor({
'health_monitor': {
'delay': 3, 'max_retries': 5, 'type': u'HTTP',
'timeout': 10, 'admin_state_up': True}}
).AndRaise(loadbalancer.NeutronClientException())
self.m.ReplayAll()
snippet = template_format.parse(health_monitor_template)
stack = utils.parse_stack(snippet)
rsrc = loadbalancer.HealthMonitor(
'monitor', snippet['Resources']['monitor'], stack)
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.create))
self.assertEqual(
'NeutronClientException: An unknown exception occurred.',
str(error))
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
self.m.VerifyAll()
def test_delete(self):
neutronclient.Client.delete_health_monitor('5678')
neutronclient.Client.show_health_monitor('5678').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
rsrc = self.create_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_already_gone(self):
neutronclient.Client.delete_health_monitor('5678').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
rsrc = self.create_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_failed(self):
neutronclient.Client.delete_health_monitor('5678').AndRaise(
loadbalancer.NeutronClientException(status_code=400))
rsrc = self.create_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.delete))
self.assertEqual(
'NeutronClientException: An unknown exception occurred.',
str(error))
self.assertEqual((rsrc.DELETE, rsrc.FAILED), rsrc.state)
self.m.VerifyAll()
def test_attribute(self):
rsrc = self.create_health_monitor()
neutronclient.Client.show_health_monitor('5678').MultipleTimes(
).AndReturn(
{'health_monitor': {'admin_state_up': True, 'delay': 3}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertIs(True, rsrc.FnGetAtt('admin_state_up'))
self.assertEqual(3, rsrc.FnGetAtt('delay'))
self.m.VerifyAll()
def test_attribute_failed(self):
rsrc = self.create_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
error = self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.FnGetAtt, 'subnet_id')
self.assertEqual(
'The Referenced Attribute (monitor subnet_id) is incorrect.',
str(error))
self.m.VerifyAll()
def test_update(self):
rsrc = self.create_health_monitor()
neutronclient.Client.update_health_monitor(
'5678', {'health_monitor': {'delay': 10}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['delay'] = 10
scheduler.TaskRunner(rsrc.update, update_template)()
self.m.VerifyAll()
@skipIf(neutronclient is None, 'neutronclient unavailable')
class PoolTest(HeatTestCase):
def setUp(self):
super(PoolTest, self).setUp()
self.m.StubOutWithMock(neutronclient.Client, 'create_pool')
self.m.StubOutWithMock(neutronclient.Client, 'delete_pool')
self.m.StubOutWithMock(neutronclient.Client, 'show_pool')
self.m.StubOutWithMock(neutronclient.Client, 'update_pool')
self.m.StubOutWithMock(neutronclient.Client,
'associate_health_monitor')
self.m.StubOutWithMock(neutronclient.Client,
'disassociate_health_monitor')
self.m.StubOutWithMock(neutronclient.Client, 'create_vip')
self.m.StubOutWithMock(neutronclient.Client, 'delete_vip')
self.m.StubOutWithMock(neutronclient.Client, 'show_vip')
self.m.StubOutWithMock(clients.OpenStackClients, 'keystone')
utils.setup_dummy_db()
def create_pool(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_pool({
'pool': {
'subnet_id': 'sub123', 'protocol': u'HTTP',
'name': utils.PhysName('test_stack', 'pool'),
'lb_method': 'ROUND_ROBIN', 'admin_state_up': True}}
).AndReturn({'pool': {'id': '5678'}})
neutronclient.Client.create_vip({
'vip': {
'protocol': u'HTTP', 'name': 'pool.vip',
'admin_state_up': True, 'subnet_id': u'sub123',
'pool_id': '5678', 'protocol_port': 80}}
).AndReturn({'vip': {'id': 'xyz'}})
neutronclient.Client.show_pool('5678').AndReturn(
{'pool': {'status': 'ACTIVE'}})
neutronclient.Client.show_vip('xyz').AndReturn(
{'vip': {'status': 'ACTIVE'}})
snippet = template_format.parse(pool_template)
stack = utils.parse_stack(snippet)
return loadbalancer.Pool(
'pool', snippet['Resources']['pool'], stack)
def test_create(self):
rsrc = self.create_pool()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_create_pending(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_pool({
'pool': {
'subnet_id': 'sub123', 'protocol': u'HTTP',
'name': utils.PhysName('test_stack', 'pool'),
'lb_method': 'ROUND_ROBIN', 'admin_state_up': True}}
).AndReturn({'pool': {'id': '5678'}})
neutronclient.Client.create_vip({
'vip': {
'protocol': u'HTTP', 'name': 'pool.vip',
'admin_state_up': True, 'subnet_id': u'sub123',
'pool_id': '5678', 'protocol_port': 80}}
).AndReturn({'vip': {'id': 'xyz'}})
neutronclient.Client.show_pool('5678').AndReturn(
{'pool': {'status': 'PENDING_CREATE'}})
neutronclient.Client.show_pool('5678').MultipleTimes().AndReturn(
{'pool': {'status': 'ACTIVE'}})
neutronclient.Client.show_vip('xyz').AndReturn(
{'vip': {'status': 'PENDING_CREATE'}})
neutronclient.Client.show_vip('xyz').AndReturn(
{'vip': {'status': 'ACTIVE'}})
snippet = template_format.parse(pool_template)
stack = utils.parse_stack(snippet)
rsrc = loadbalancer.Pool(
'pool', snippet['Resources']['pool'], stack)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_create_failed_unexpected_status(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_pool({
'pool': {
'subnet_id': 'sub123', 'protocol': u'HTTP',
'name': utils.PhysName('test_stack', 'pool'),
'lb_method': 'ROUND_ROBIN', 'admin_state_up': True}}
).AndReturn({'pool': {'id': '5678'}})
neutronclient.Client.create_vip({
'vip': {
'protocol': u'HTTP', 'name': 'pool.vip',
'admin_state_up': True, 'subnet_id': u'sub123',
'pool_id': '5678', 'protocol_port': 80}}
).AndReturn({'vip': {'id': 'xyz'}})
neutronclient.Client.show_pool('5678').AndReturn(
{'pool': {'status': 'ERROR', 'name': '5678'}})
snippet = template_format.parse(pool_template)
stack = utils.parse_stack(snippet)
rsrc = loadbalancer.Pool(
'pool', snippet['Resources']['pool'], stack)
self.m.ReplayAll()
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.create))
self.assertEqual(
'Error: neutron report unexpected pool '
'resource[5678] status[ERROR]',
str(error))
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
self.m.VerifyAll()
def test_create_failed_unexpected_vip_status(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_pool({
'pool': {
'subnet_id': 'sub123', 'protocol': u'HTTP',
'name': utils.PhysName('test_stack', 'pool'),
'lb_method': 'ROUND_ROBIN', 'admin_state_up': True}}
).AndReturn({'pool': {'id': '5678'}})
neutronclient.Client.create_vip({
'vip': {
'protocol': u'HTTP', 'name': 'pool.vip',
'admin_state_up': True, 'subnet_id': u'sub123',
'pool_id': '5678', 'protocol_port': 80}}
).AndReturn({'vip': {'id': 'xyz'}})
neutronclient.Client.show_pool('5678').MultipleTimes().AndReturn(
{'pool': {'status': 'ACTIVE'}})
neutronclient.Client.show_vip('xyz').AndReturn(
{'vip': {'status': 'ERROR', 'name': 'xyz'}})
snippet = template_format.parse(pool_template)
stack = utils.parse_stack(snippet)
rsrc = loadbalancer.Pool(
'pool', snippet['Resources']['pool'], stack)
self.m.ReplayAll()
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.create))
self.assertEqual(
'Error: neutron reported unexpected vip '
'resource[xyz] status[ERROR]',
str(error))
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
self.m.VerifyAll()
def test_create_failed(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_pool({
'pool': {
'subnet_id': 'sub123', 'protocol': u'HTTP',
'name': utils.PhysName('test_stack', 'pool'),
'lb_method': 'ROUND_ROBIN', 'admin_state_up': True}}
).AndRaise(loadbalancer.NeutronClientException())
self.m.ReplayAll()
snippet = template_format.parse(pool_template)
stack = utils.parse_stack(snippet)
rsrc = loadbalancer.Pool(
'pool', snippet['Resources']['pool'], stack)
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.create))
self.assertEqual(
'NeutronClientException: An unknown exception occurred.',
str(error))
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
self.m.VerifyAll()
def test_create_with_session_persistence(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_pool({
'pool': {
'subnet_id': 'sub123', 'protocol': u'HTTP',
'name': utils.PhysName('test_stack', 'pool'),
'lb_method': 'ROUND_ROBIN', 'admin_state_up': True}}
).AndReturn({'pool': {'id': '5678'}})
neutronclient.Client.create_vip({
'vip': {
'protocol': u'HTTP', 'name': 'pool.vip',
'admin_state_up': True, 'subnet_id': u'sub123',
'pool_id': '5678', 'protocol_port': 80,
'session_persistence': {
'type': 'APP_COOKIE',
'cookie_name': 'cookie'}}}
).AndReturn({'vip': {'id': 'xyz'}})
neutronclient.Client.show_pool('5678').AndReturn(
{'pool': {'status': 'ACTIVE'}})
neutronclient.Client.show_vip('xyz').AndReturn(
{'vip': {'status': 'ACTIVE'}})
snippet = template_format.parse(pool_with_session_persistence_template)
stack = utils.parse_stack(snippet)
rsrc = loadbalancer.Pool(
'pool', snippet['Resources']['pool'], stack)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_failing_validation_with_session_persistence(self):
msg = _('Property cookie_name is required, when '
'session_persistence type is set to APP_COOKIE.')
snippet = template_format.parse(pool_with_session_persistence_template)
pool = snippet['Resources']['pool']
persistence = pool['Properties']['vip']['session_persistence']
#When persistence type is set to APP_COOKIE, cookie_name is required
persistence['type'] = 'APP_COOKIE'
persistence['cookie_name'] = None
resource = loadbalancer.Pool('pool', pool, utils.parse_stack(snippet))
error = self.assertRaises(exception.StackValidationFailed,
resource.validate)
self.assertEqual(msg, str(error))
def test_validation_not_failing_without_session_persistence(self):
snippet = template_format.parse(pool_template)
pool = snippet['Resources']['pool']
resource = loadbalancer.Pool('pool', pool, utils.parse_stack(snippet))
self.assertIsNone(resource.validate())
def test_properties_are_prepared_for_session_persistence(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_pool({
'pool': {
'subnet_id': 'sub123', 'protocol': u'HTTP',
'name': utils.PhysName('test_stack', 'pool'),
'lb_method': 'ROUND_ROBIN', 'admin_state_up': True}}
).AndReturn({'pool': {'id': '5678'}})
neutronclient.Client.create_vip({
'vip': {
'protocol': u'HTTP', 'name': 'pool.vip',
'admin_state_up': True, 'subnet_id': u'sub123',
'pool_id': '5678', 'protocol_port': 80,
'session_persistence': {'type': 'HTTP_COOKIE'}}}
).AndReturn({'vip': {'id': 'xyz'}})
neutronclient.Client.show_pool('5678').AndReturn(
{'pool': {'status': 'ACTIVE'}})
neutronclient.Client.show_vip('xyz').AndReturn(
{'vip': {'status': 'ACTIVE'}})
snippet = template_format.parse(pool_with_session_persistence_template)
pool = snippet['Resources']['pool']
persistence = pool['Properties']['vip']['session_persistence']
#change persistence type to HTTP_COOKIE that not require cookie_name
persistence['type'] = 'HTTP_COOKIE'
del persistence['cookie_name']
resource = loadbalancer.Pool('pool', pool, utils.parse_stack(snippet))
#assert that properties contain cookie_name property with None value
persistence = resource.properties['vip']['session_persistence']
self.assertIn('cookie_name', persistence)
self.assertIsNone(persistence['cookie_name'])
self.m.ReplayAll()
scheduler.TaskRunner(resource.create)()
self.assertEqual((resource.CREATE, resource.COMPLETE), resource.state)
self.m.VerifyAll()
def test_delete(self):
rsrc = self.create_pool()
neutronclient.Client.delete_vip('xyz')
neutronclient.Client.show_vip('xyz').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
neutronclient.Client.delete_pool('5678')
neutronclient.Client.show_pool('5678').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_already_gone(self):
neutronclient.Client.delete_vip('xyz').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
neutronclient.Client.delete_pool('5678').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
rsrc = self.create_pool()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_vip_failed(self):
neutronclient.Client.delete_vip('xyz').AndRaise(
loadbalancer.NeutronClientException(status_code=400))
rsrc = self.create_pool()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.delete))
self.assertEqual(
'NeutronClientException: An unknown exception occurred.',
str(error))
self.assertEqual((rsrc.DELETE, rsrc.FAILED), rsrc.state)
self.m.VerifyAll()
def test_delete_failed(self):
neutronclient.Client.delete_vip('xyz').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
neutronclient.Client.delete_pool('5678').AndRaise(
loadbalancer.NeutronClientException(status_code=400))
rsrc = self.create_pool()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.delete))
self.assertEqual(
'NeutronClientException: An unknown exception occurred.',
str(error))
self.assertEqual((rsrc.DELETE, rsrc.FAILED), rsrc.state)
self.m.VerifyAll()
def test_attribute(self):
rsrc = self.create_pool()
neutronclient.Client.show_pool('5678').MultipleTimes(
).AndReturn(
{'pool': {'admin_state_up': True, 'lb_method': 'ROUND_ROBIN'}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertIs(True, rsrc.FnGetAtt('admin_state_up'))
self.assertEqual('ROUND_ROBIN', rsrc.FnGetAtt('lb_method'))
self.m.VerifyAll()
def test_vip_attribute(self):
rsrc = self.create_pool()
neutronclient.Client.show_vip('xyz').AndReturn(
{'vip': {'address': '10.0.0.3', 'name': 'xyz'}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual({'address': '10.0.0.3', 'name': 'xyz'},
rsrc.FnGetAtt('vip'))
self.m.VerifyAll()
def test_attribute_failed(self):
rsrc = self.create_pool()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
error = self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.FnGetAtt, 'net_id')
self.assertEqual(
'The Referenced Attribute (pool net_id) is incorrect.',
str(error))
self.m.VerifyAll()
def test_update(self):
rsrc = self.create_pool()
neutronclient.Client.update_pool(
'5678', {'pool': {'admin_state_up': False}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['admin_state_up'] = False
scheduler.TaskRunner(rsrc.update, update_template)()
self.m.VerifyAll()
def test_update_monitors(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_pool({
'pool': {
'subnet_id': 'sub123', 'protocol': u'HTTP',
'name': utils.PhysName('test_stack', 'pool'),
'lb_method': 'ROUND_ROBIN', 'admin_state_up': True}}
).AndReturn({'pool': {'id': '5678'}})
neutronclient.Client.associate_health_monitor(
'5678', {'health_monitor': {'id': 'mon123'}})
neutronclient.Client.associate_health_monitor(
'5678', {'health_monitor': {'id': 'mon456'}})
neutronclient.Client.create_vip({
'vip': {
'protocol': u'HTTP', 'name': 'pool.vip',
'admin_state_up': True, 'subnet_id': u'sub123',
'pool_id': '5678', 'protocol_port': 80}}
).AndReturn({'vip': {'id': 'xyz'}})
neutronclient.Client.show_pool('5678').AndReturn(
{'pool': {'status': 'ACTIVE'}})
neutronclient.Client.show_vip('xyz').AndReturn(
{'vip': {'status': 'ACTIVE'}})
neutronclient.Client.disassociate_health_monitor(
'5678', {'health_monitor': {'id': 'mon456'}})
neutronclient.Client.associate_health_monitor(
'5678', {'health_monitor': {'id': 'mon789'}})
snippet = template_format.parse(pool_template)
stack = utils.parse_stack(snippet)
snippet['Resources']['pool']['Properties']['monitors'] = [
'mon123', 'mon456']
rsrc = loadbalancer.Pool(
'pool', snippet['Resources']['pool'], stack)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['monitors'] = ['mon123', 'mon789']
scheduler.TaskRunner(rsrc.update, update_template)()
self.m.VerifyAll()
@skipIf(neutronclient is None, 'neutronclient unavailable')
class PoolMemberTest(HeatTestCase):
def setUp(self):
super(PoolMemberTest, self).setUp()
self.fc = nova_fakes.FakeClient()
self.m.StubOutWithMock(neutronclient.Client, 'create_member')
self.m.StubOutWithMock(neutronclient.Client, 'delete_member')
self.m.StubOutWithMock(neutronclient.Client, 'update_member')
self.m.StubOutWithMock(neutronclient.Client, 'show_member')
self.m.StubOutWithMock(clients.OpenStackClients, 'keystone')
utils.setup_dummy_db()
def create_member(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_member({
'member': {
'pool_id': 'pool123', 'protocol_port': 8080,
'address': '1.2.3.4', 'admin_state_up': True}}
).AndReturn({'member': {'id': 'member5678'}})
snippet = template_format.parse(member_template)
stack = utils.parse_stack(snippet)
return loadbalancer.PoolMember(
'member', snippet['Resources']['member'], stack)
def test_create(self):
rsrc = self.create_member()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual('member5678', rsrc.resource_id)
self.m.VerifyAll()
def test_create_optional_parameters(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
neutronclient.Client.create_member({
'member': {
'pool_id': 'pool123', 'protocol_port': 8080,
'weight': 100, 'admin_state_up': False,
'address': '1.2.3.4'}}
).AndReturn({'member': {'id': 'member5678'}})
snippet = template_format.parse(member_template)
snippet['Resources']['member']['Properties']['admin_state_up'] = False
snippet['Resources']['member']['Properties']['weight'] = 100
stack = utils.parse_stack(snippet)
rsrc = loadbalancer.PoolMember(
'member', snippet['Resources']['member'], stack)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual('member5678', rsrc.resource_id)
self.m.VerifyAll()
def test_attribute(self):
rsrc = self.create_member()
neutronclient.Client.show_member('member5678').MultipleTimes(
).AndReturn(
{'member': {'admin_state_up': True, 'weight': 5}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertIs(True, rsrc.FnGetAtt('admin_state_up'))
self.assertEqual(5, rsrc.FnGetAtt('weight'))
self.m.VerifyAll()
def test_update(self):
rsrc = self.create_member()
neutronclient.Client.update_member(
'member5678', {'member': {'pool_id': 'pool456'}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['pool_id'] = 'pool456'
scheduler.TaskRunner(rsrc.update, update_template)()
self.m.VerifyAll()
def test_delete(self):
rsrc = self.create_member()
neutronclient.Client.delete_member(u'member5678')
neutronclient.Client.show_member(u'member5678').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_missing_member(self):
rsrc = self.create_member()
neutronclient.Client.delete_member(u'member5678').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
@skipIf(neutronclient is None, 'neutronclient unavailable')
class LoadBalancerTest(HeatTestCase):
def setUp(self):
super(LoadBalancerTest, self).setUp()
self.fc = nova_fakes.FakeClient()
self.m.StubOutWithMock(neutronclient.Client, 'create_member')
self.m.StubOutWithMock(neutronclient.Client, 'delete_member')
self.m.StubOutWithMock(clients.OpenStackClients, 'keystone')
self.m.StubOutWithMock(clients.OpenStackClients, 'nova')
utils.setup_dummy_db()
def create_load_balancer(self):
clients.OpenStackClients.keystone().AndReturn(
fakes.FakeKeystoneClient())
clients.OpenStackClients.nova("compute").MultipleTimes().AndReturn(
self.fc)
neutronclient.Client.create_member({
'member': {
'pool_id': 'pool123', 'protocol_port': 8080,
'address': '1.2.3.4'}}
).AndReturn({'member': {'id': 'member5678'}})
snippet = template_format.parse(lb_template)
stack = utils.parse_stack(snippet)
return loadbalancer.LoadBalancer(
'lb', snippet['Resources']['lb'], stack)
def test_create(self):
rsrc = self.create_load_balancer()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update(self):
rsrc = self.create_load_balancer()
neutronclient.Client.delete_member(u'member5678')
neutronclient.Client.create_member({
'member': {
'pool_id': 'pool123', 'protocol_port': 8080,
'address': '4.5.6.7'}}
).AndReturn({'member': {'id': 'memberxyz'}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['members'] = ['5678']
scheduler.TaskRunner(rsrc.update, update_template)()
self.m.VerifyAll()
def test_update_missing_member(self):
rsrc = self.create_load_balancer()
neutronclient.Client.delete_member(u'member5678').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['members'] = []
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete(self):
rsrc = self.create_load_balancer()
neutronclient.Client.delete_member(u'member5678')
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_missing_member(self):
rsrc = self.create_load_balancer()
neutronclient.Client.delete_member(u'member5678').AndRaise(
loadbalancer.NeutronClientException(status_code=404))
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
| ntt-sic/heat | heat/tests/test_neutron_loadbalancer.py | Python | apache-2.0 | 34,298 |
from bing_search_api import BingSearchAPI
my_key = "MEL5FOrb1H5G1E78YY8N5mkfcvUK2hNBYsZl1aAEEbE"
def query(query_string):
bing = BingSearchAPI(my_key)
params = {'ImageFilters':'"Face:Face"',
'$format': 'json',
'$top': 10,
'$skip': 0}
results = bing.search('web',query_string,params).json() # requests 1.0+
return [result['Url'] for result in results['d']['results'][0]['Web']]
if __name__ == "__main__":
query_string = "Your Query"
print query(query_string)
| mzweilin/HashTag-Understanding | test/test_bing_search.py | Python | apache-2.0 | 529 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-22 22:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0055_v340_add_grafana_notification'),
]
operations = [
migrations.AddField(
model_name='inventoryupdate',
name='custom_virtualenv',
field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
),
migrations.AddField(
model_name='job',
name='custom_virtualenv',
field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
),
]
| GoogleCloudPlatform/sap-deployment-automation | third_party/github.com/ansible/awx/awx/main/migrations/0056_v350_custom_venv_history.py | Python | apache-2.0 | 879 |
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/segment-routing/srlbs/srlb/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the SRLB.
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__local_id",
"__dataplane_type",
"__mpls_label_block",
"__ipv6_prefix",
)
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__local_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="local-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
self.__dataplane_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"MPLS": {}, "IPV6": {}},
),
is_leaf=True,
yang_name="dataplane-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="sr-dataplane-type",
is_config=True,
)
self.__mpls_label_block = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="mpls-label-block",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__ipv6_prefix = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
is_leaf=True,
yang_name="ipv6-prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-prefix",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"segment-routing",
"srlbs",
"srlb",
"config",
]
def _get_local_id(self):
"""
Getter method for local_id, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/local_id (string)
YANG Description: A unique local identifier used for the Segment Routing Local Block.
The identifier is used when referencing the SRLB within other
contexts.
"""
return self.__local_id
def _set_local_id(self, v, load=False):
"""
Setter method for local_id, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/local_id (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_id() directly.
YANG Description: A unique local identifier used for the Segment Routing Local Block.
The identifier is used when referencing the SRLB within other
contexts.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="local-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """local_id must be of a type compatible with string""",
"defined-type": "string",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="local-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=True)""",
}
)
self.__local_id = t
if hasattr(self, "_set"):
self._set()
def _unset_local_id(self):
self.__local_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="local-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
def _get_dataplane_type(self):
"""
Getter method for dataplane_type, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/dataplane_type (sr-dataplane-type)
YANG Description: The dataplane that is to be used for the Segment Routing Local Block.
When MPLS is specified, the local block corresponds to a block of MPLS
labels; when IPv6 is specified it corresponds to an IPv6 prefix.
"""
return self.__dataplane_type
def _set_dataplane_type(self, v, load=False):
"""
Setter method for dataplane_type, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/dataplane_type (sr-dataplane-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_dataplane_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dataplane_type() directly.
YANG Description: The dataplane that is to be used for the Segment Routing Local Block.
When MPLS is specified, the local block corresponds to a block of MPLS
labels; when IPv6 is specified it corresponds to an IPv6 prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"MPLS": {}, "IPV6": {}},
),
is_leaf=True,
yang_name="dataplane-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="sr-dataplane-type",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """dataplane_type must be of a type compatible with sr-dataplane-type""",
"defined-type": "openconfig-network-instance:sr-dataplane-type",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'MPLS': {}, 'IPV6': {}},), is_leaf=True, yang_name="dataplane-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='sr-dataplane-type', is_config=True)""",
}
)
self.__dataplane_type = t
if hasattr(self, "_set"):
self._set()
def _unset_dataplane_type(self):
self.__dataplane_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"MPLS": {}, "IPV6": {}},
),
is_leaf=True,
yang_name="dataplane-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="sr-dataplane-type",
is_config=True,
)
def _get_mpls_label_block(self):
"""
Getter method for mpls_label_block, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/mpls_label_block (leafref)
YANG Description: A reference to the MPLS label block that is used to contain the
SIDs of the SRLB.
"""
return self.__mpls_label_block
def _set_mpls_label_block(self, v, load=False):
"""
Setter method for mpls_label_block, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/mpls_label_block (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_mpls_label_block is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mpls_label_block() directly.
YANG Description: A reference to the MPLS label block that is used to contain the
SIDs of the SRLB.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="mpls-label-block",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """mpls_label_block must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="mpls-label-block", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__mpls_label_block = t
if hasattr(self, "_set"):
self._set()
def _unset_mpls_label_block(self):
self.__mpls_label_block = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="mpls-label-block",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_ipv6_prefix(self):
"""
Getter method for ipv6_prefix, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/ipv6_prefix (inet:ipv6-prefix)
YANG Description: The IPv6 prefix that is used for the SRLB.
"""
return self.__ipv6_prefix
def _set_ipv6_prefix(self, v, load=False):
"""
Setter method for ipv6_prefix, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/ipv6_prefix (inet:ipv6-prefix)
If this variable is read-only (config: false) in the
source YANG file, then _set_ipv6_prefix is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ipv6_prefix() directly.
YANG Description: The IPv6 prefix that is used for the SRLB.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
is_leaf=True,
yang_name="ipv6-prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-prefix",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ipv6_prefix must be of a type compatible with inet:ipv6-prefix""",
"defined-type": "inet:ipv6-prefix",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))'}), is_leaf=True, yang_name="ipv6-prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ipv6-prefix', is_config=True)""",
}
)
self.__ipv6_prefix = t
if hasattr(self, "_set"):
self._set()
def _unset_ipv6_prefix(self):
self.__ipv6_prefix = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
is_leaf=True,
yang_name="ipv6-prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-prefix",
is_config=True,
)
local_id = __builtin__.property(_get_local_id, _set_local_id)
dataplane_type = __builtin__.property(_get_dataplane_type, _set_dataplane_type)
mpls_label_block = __builtin__.property(
_get_mpls_label_block, _set_mpls_label_block
)
ipv6_prefix = __builtin__.property(_get_ipv6_prefix, _set_ipv6_prefix)
_pyangbind_elements = OrderedDict(
[
("local_id", local_id),
("dataplane_type", dataplane_type),
("mpls_label_block", mpls_label_block),
("ipv6_prefix", ipv6_prefix),
]
)
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/segment-routing/srlbs/srlb/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the SRLB.
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__local_id",
"__dataplane_type",
"__mpls_label_block",
"__ipv6_prefix",
)
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__local_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="local-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
self.__dataplane_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"MPLS": {}, "IPV6": {}},
),
is_leaf=True,
yang_name="dataplane-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="sr-dataplane-type",
is_config=True,
)
self.__mpls_label_block = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="mpls-label-block",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__ipv6_prefix = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
is_leaf=True,
yang_name="ipv6-prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-prefix",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"segment-routing",
"srlbs",
"srlb",
"config",
]
def _get_local_id(self):
"""
Getter method for local_id, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/local_id (string)
YANG Description: A unique local identifier used for the Segment Routing Local Block.
The identifier is used when referencing the SRLB within other
contexts.
"""
return self.__local_id
def _set_local_id(self, v, load=False):
"""
Setter method for local_id, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/local_id (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_id() directly.
YANG Description: A unique local identifier used for the Segment Routing Local Block.
The identifier is used when referencing the SRLB within other
contexts.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="local-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """local_id must be of a type compatible with string""",
"defined-type": "string",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="local-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=True)""",
}
)
self.__local_id = t
if hasattr(self, "_set"):
self._set()
def _unset_local_id(self):
self.__local_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="local-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=True,
)
def _get_dataplane_type(self):
"""
Getter method for dataplane_type, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/dataplane_type (sr-dataplane-type)
YANG Description: The dataplane that is to be used for the Segment Routing Local Block.
When MPLS is specified, the local block corresponds to a block of MPLS
labels; when IPv6 is specified it corresponds to an IPv6 prefix.
"""
return self.__dataplane_type
def _set_dataplane_type(self, v, load=False):
"""
Setter method for dataplane_type, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/dataplane_type (sr-dataplane-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_dataplane_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dataplane_type() directly.
YANG Description: The dataplane that is to be used for the Segment Routing Local Block.
When MPLS is specified, the local block corresponds to a block of MPLS
labels; when IPv6 is specified it corresponds to an IPv6 prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"MPLS": {}, "IPV6": {}},
),
is_leaf=True,
yang_name="dataplane-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="sr-dataplane-type",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """dataplane_type must be of a type compatible with sr-dataplane-type""",
"defined-type": "openconfig-network-instance:sr-dataplane-type",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'MPLS': {}, 'IPV6': {}},), is_leaf=True, yang_name="dataplane-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='sr-dataplane-type', is_config=True)""",
}
)
self.__dataplane_type = t
if hasattr(self, "_set"):
self._set()
def _unset_dataplane_type(self):
self.__dataplane_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"MPLS": {}, "IPV6": {}},
),
is_leaf=True,
yang_name="dataplane-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="sr-dataplane-type",
is_config=True,
)
def _get_mpls_label_block(self):
"""
Getter method for mpls_label_block, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/mpls_label_block (leafref)
YANG Description: A reference to the MPLS label block that is used to contain the
SIDs of the SRLB.
"""
return self.__mpls_label_block
def _set_mpls_label_block(self, v, load=False):
"""
Setter method for mpls_label_block, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/mpls_label_block (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_mpls_label_block is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mpls_label_block() directly.
YANG Description: A reference to the MPLS label block that is used to contain the
SIDs of the SRLB.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="mpls-label-block",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """mpls_label_block must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="mpls-label-block", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__mpls_label_block = t
if hasattr(self, "_set"):
self._set()
def _unset_mpls_label_block(self):
self.__mpls_label_block = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="mpls-label-block",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_ipv6_prefix(self):
"""
Getter method for ipv6_prefix, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/ipv6_prefix (inet:ipv6-prefix)
YANG Description: The IPv6 prefix that is used for the SRLB.
"""
return self.__ipv6_prefix
def _set_ipv6_prefix(self, v, load=False):
"""
Setter method for ipv6_prefix, mapped from YANG variable /network_instances/network_instance/segment_routing/srlbs/srlb/config/ipv6_prefix (inet:ipv6-prefix)
If this variable is read-only (config: false) in the
source YANG file, then _set_ipv6_prefix is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ipv6_prefix() directly.
YANG Description: The IPv6 prefix that is used for the SRLB.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
is_leaf=True,
yang_name="ipv6-prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-prefix",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ipv6_prefix must be of a type compatible with inet:ipv6-prefix""",
"defined-type": "inet:ipv6-prefix",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))'}), is_leaf=True, yang_name="ipv6-prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ipv6-prefix', is_config=True)""",
}
)
self.__ipv6_prefix = t
if hasattr(self, "_set"):
self._set()
def _unset_ipv6_prefix(self):
self.__ipv6_prefix = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))"
},
),
is_leaf=True,
yang_name="ipv6-prefix",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ipv6-prefix",
is_config=True,
)
local_id = __builtin__.property(_get_local_id, _set_local_id)
dataplane_type = __builtin__.property(_get_dataplane_type, _set_dataplane_type)
mpls_label_block = __builtin__.property(
_get_mpls_label_block, _set_mpls_label_block
)
ipv6_prefix = __builtin__.property(_get_ipv6_prefix, _set_ipv6_prefix)
_pyangbind_elements = OrderedDict(
[
("local_id", local_id),
("dataplane_type", dataplane_type),
("mpls_label_block", mpls_label_block),
("ipv6_prefix", ipv6_prefix),
]
)
| napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/segment_routing/srlbs/srlb/config/__init__.py | Python | apache-2.0 | 36,989 |
import numpy as np
import os
import sys
import math
#import class files
# sys.path.append('../../../')
from source import bioRead as br
from source import classify as cl
#import PyInsect for measuring similarity
#sys.path.append('../../../../')
from PyINSECT import representations as REP
from PyINSECT import comparators as CMP
from multiprocessing import Pool
import multiprocessing
# Local function
def __getSimilaritiesForIndex(setting):
i, l, S, ngg = setting # Explode
for j in range(i,l):
dTmp = sop.getSimilarityDouble(ngg[i],ngg[j])
if (math.isnan(dTmp)):
raise Exception("Invalid similarity! Check similarity implementation.")
S[i,j] = dTmp
# End local function
# If we have cached the main analysis data
if os.path.exists('SimilaritiesAndDictionaries/UCNE.npz'):
# Use them
npz = np.load('SimilaritiesAndDictionaries/UCNE.npz')
hd = npz['hd']
cd = npz['cd']
S = npz['S']
l1 = npz['l1']
l2 = npz['l2']
l = npz['l']
L = np.append(np.zeros(l1),np.ones(l2),axis=0)
print "WARNING: Using cached data!"
else:
# else start reading
sr = br.SequenceReader()
# Get Human UCNE fasta data
sr.read('./biodata/UCNEs/hg19_UCNEs.fasta')
# sr.read('./biodata/UCNEs/hg19_UCNEs-10.fasta')
hd = sr.getDictionary()
print "Gained Human Dictionary"
# Get Chicken UCNE fasta data
sr.read('./biodata/UCNEs/galGal3_UCNEs.fasta')
# sr.read('./biodata/UCNEs/galGal3_UCNEs-10.fasta')
cd = sr.getDictionary()
print "Gained Chicken Dictionary"
# Set n-gram graph analysis parameters
n=3
Dwin=2
subjectMap = {}
ngg = {}
# Get number of UNCEs (for either type of UNCE)
l1 = len(hd.keys())
l2 = len(cd.keys())
l = l1 + l2
print "Found %d human UNCEs"%(l1)
print "Found %d chicken UNCEs"%(l2)
# For every human UNCE
i = 0
for key,a in hd.iteritems():
# Assign appropriate label
subjectMap[i] = (key,'humans')
# Create corresponding graph
ngg[i] = REP.DocumentNGramGraph(n,Dwin,a)
i += 1
print "Graphs Created for Humans"
for key,b in cd.iteritems():
subjectMap[i] = (key,'chickens')
ngg[i] = REP.DocumentNGramGraph(n,Dwin,b)
i += 1
print "Graphs Created for Chickens"
S = np.empty([l, l])
L = np.empty([l])
sop = CMP.SimilarityNVS()
print "Getting human similarities..."
# TODO: Examine default (problems with locking S)
# pThreadPool = Pool(1);
qToExecute = list() # Reset tasks
for i in range(0,l1):
print i," ",
L[i] = 0 #0 for humans
qToExecute.append((i,l,S,ngg))
# pThreadPool.map(__getSimilaritiesForIndex, qToExecute)
map(__getSimilaritiesForIndex,qToExecute)
print ""
print "Getting human similarities... Done."
qToExecute = list() # Reset tasks
print "Getting chicken similarities..."
for i in range(l1,l):
print i," ",
L[i] = 1 #0 for chickens
qToExecute.append((i,l,S,ngg))
# pThreadPool.map(__getSimilaritiesForIndex, qToExecute)
map(__getSimilaritiesForIndex, qToExecute)
# for i in range(l1,l):
# print i," ",
# L[i] = 1 #1 for chickens
# for j in range(i,l):
# S[i,j] = sop.getSimilarityDouble(ngg[i],ngg[j])
print ""
print "Getting chicken similarities... Done"
# Update symmetric matrix, based on current findings
for i in range(0,l):
for j in range(0,i):
S[i,j] = S[j,i]
print "Similarity matrix constructed.."
if not os.path.exists('SimilaritiesAndDictionaries'):
os.mkdir('SimilaritiesAndDictionaries')
np.savez('SimilaritiesAndDictionaries/UCNE.npz', hd=hd, cd=cd, l1=l1, l2=l2,
l=l, S=S)
reps = 10
L1 = L[0:l1]
L2 = L[l1:]
metrics = dict()
cm = dict()
class_types = {0:"No kernelization",1:"Spectrum Clip",2:"Spectrum Flip",3:"Spectrum Shift",4:"Spectrum Square"}
print "Testing for different kernelization methods..\n\n"
for i in range(0, len(class_types)):
try:
print class_types[i],"\n"
evaluator = cl.Evaluator(cl.SVM())
Sp = cl.kernelization(S,i)
S1 = Sp[0:l1,:]
S2 = Sp[l1:,:]
metrics[class_types[i]],cm[class_types[i]] = evaluator.Randomized_kfold((S1,S2),(L1,L2),reps,verbose=True)
print ""
except Exception as e:
print "Approach %s failed for reason:\n%s"%(class_types[i], str(e))
np.savez('SimilaritiesAndDictionaries/metrics.npz', metrics=metrics, cm=cm)
| ysig/BioClassSim | test/BioTest/UCNEs/UCNES_Class.py | Python | apache-2.0 | 4,554 |
from insights.parsers import df, ParseException
from insights.tests import context_wrap
import pytest
DF_ALP = """
Filesystem 1024-blocks Used Available Capacity Mounted on
/dev/mapper/vg_lxcrhel6sat56-lv_root 98571884 4244032 89313940 5% /
sysfs 0 0 0 - /sys
proc 0 0 0 - /proc
devtmpfs 5988480 0 5988480 0% /dev
securityfs 0 0 0 - /sys/kernel/security
tmpfs 5998736 491660 5507076 9% /dev/shm
devpts 0 0 0 - /dev/pts
tmpfs 5998736 1380 5997356 1% /run
tmpfs 5998736 0 5998736 0% /sys/fs/cgroup
""".strip()
DF_LI = """
Filesystem Inodes IUsed IFree IUse% Mounted on
/dev/mapper/vg_lxcrhel6sat56-lv_root
6275072 124955 6150117 2% /
devtmpfs 1497120 532 1496588 1% /dev
tmpfs 1499684 331 1499353 1% /dev/shm
tmpfs 1499684 728 1498956 1% /run
tmpfs 1499684 16 1499668 1% /sys/fs/cgroup
tmpfs 1499684 54 1499630 1% /tmp
/dev/sda2 106954752 298662 106656090 1% /home
/dev/sda1 128016 429 127587 1% /boot
tmpfs 1499684 6 1499678 1% /V M T o o l s
tmpfs 1499684 15 1499669 1% /VM Tools
""".strip()
DF_AL = """
Filesystem 1K-blocks Used Available Use% Mounted on
/dev/mapper/vg_lxcrhel6sat56-lv_root 98571884 4244032 89313940 5% /
sysfs 0 0 0 - /sys
proc 0 0 0 - /proc
devtmpfs 5988480 0 5988480 0% /dev
securityfs 0 0 0 - /sys/kernel/security
tmpfs 5998736 491660 5507076 9% /dev/shm
devpts 0 0 0 - /dev/pts
tmpfs 5998736 1380 5997356 1% /run
tmpfs 5998736 0 5998736 0% /sys/fs/cgroup
""".strip()
def test_df_li():
df_list = df.DiskFree_LI(context_wrap(DF_LI))
assert len(df_list) == 10
assert len(df_list.mounts) == 10
assert len(df_list.filesystems) == 5
assert '/home' in df_list.mounts
r = df.Record(
filesystem='/dev/sda2',
total='106954752',
used='298662',
available='106656090',
capacity='1%',
mounted_on='/home'
)
assert df_list.get_mount('/home') == r
assert '/dev/sda2' in df_list.filesystems
assert len(df_list.get_filesystem('/dev/sda2')) == 1
assert df_list.get_filesystem('/dev/sda2')[0] == r
assert len(df_list.get_filesystem('tmpfs')) == 6
assert df_list.get_mount('/dev').filesystem == 'devtmpfs'
assert df_list.get_mount('/run').total == '1499684'
assert df_list.get_mount('/tmp').used == '54'
assert df_list.get_mount('/boot').available == '127587'
assert df_list.get_filesystem('/dev/sda2')[0].capacity == '1%'
assert df_list.get_filesystem('/dev/sda2')[0].available == '106656090'
assert df_list.get_filesystem('devtmpfs')[0].mounted_on == '/dev'
assert df_list.get_mount('/V M T o o l s').available == '1499678'
assert df_list.get_filesystem('/dev/mapper/vg_lxcrhel6sat56-lv_root')[0].mounted_on == '/'
sorted_mount_names = sorted([
'/', '/dev', '/dev/shm', '/run', '/sys/fs/cgroup', '/tmp', '/home',
'/boot', '/V M T o o l s', '/VM Tools'
])
assert sorted([d.mounted_on for d in df_list]) == sorted_mount_names
assert sorted(df_list.mount_names) == sorted_mount_names
assert sorted(df_list.filesystem_names) == sorted([
'/dev/mapper/vg_lxcrhel6sat56-lv_root', 'devtmpfs', 'tmpfs',
'/dev/sda2', '/dev/sda1'
])
def test_df_alP():
df_list = df.DiskFree_ALP(context_wrap(DF_ALP))
assert len(df_list) == 9
assert len(df_list.mounts) == 9
assert len(df_list.filesystems) == 7
assert '/' in df_list.mounts
r = df.Record(
filesystem='/dev/mapper/vg_lxcrhel6sat56-lv_root',
total='98571884',
used='4244032',
available='89313940',
capacity='5%',
mounted_on='/'
)
assert df_list.get_mount('/') == r
assert '/dev/mapper/vg_lxcrhel6sat56-lv_root' in df_list.filesystems
assert len(df_list.get_filesystem('/dev/mapper/vg_lxcrhel6sat56-lv_root')) == 1
assert df_list.get_filesystem('/dev/mapper/vg_lxcrhel6sat56-lv_root')[0] == r
assert len(df_list.get_filesystem('tmpfs')) == 3
assert df_list.get_mount('/sys').filesystem == 'sysfs'
assert df_list.get_mount('/proc').total == '0'
assert df_list.get_mount('/dev').used == '0'
assert df_list.get_mount('/run').available == '5997356'
assert df_list.get_mount('/sys/fs/cgroup').capacity == '0%'
assert df_list.get_mount('/').filesystem == '/dev/mapper/vg_lxcrhel6sat56-lv_root'
assert df_list.get_mount('/').capacity == '5%'
def test_df_al():
df_list = df.DiskFree_AL(context_wrap(DF_AL))
assert len(df_list) == 9
assert len(df_list.mounts) == 9
assert len(df_list.filesystems) == 7
assert '/' in df_list.mounts
r = df.Record(
filesystem='/dev/mapper/vg_lxcrhel6sat56-lv_root',
total='98571884',
used='4244032',
available='89313940',
capacity='5%',
mounted_on='/'
)
assert df_list.get_mount('/') == r
assert '/dev/mapper/vg_lxcrhel6sat56-lv_root' in df_list.filesystems
assert len(df_list.get_filesystem('/dev/mapper/vg_lxcrhel6sat56-lv_root')) == 1
assert df_list.get_filesystem('/dev/mapper/vg_lxcrhel6sat56-lv_root')[0] == r
assert len(df_list.get_filesystem('tmpfs')) == 3
assert df_list.get_mount('/sys').filesystem == 'sysfs'
assert df_list.get_mount('/proc').total == '0'
assert df_list.get_mount('/dev').used == '0'
assert df_list.get_mount('/run').available == '5997356'
assert df_list.get_mount('/sys/fs/cgroup').capacity == '0%'
assert df_list.get_mount('/').filesystem == '/dev/mapper/vg_lxcrhel6sat56-lv_root'
assert df_list.get_mount('/').capacity == '5%'
DF_AL_BAD = """
Filesystem 1K-blocks Used Available Use% Mounted on
/dev/mapper/vg_lxcrhel6sat56-lv_root 98571884 4244032 89313940 5% /
sysfs 0
"""
def test_df_al_bad():
with pytest.raises(ParseException) as exc:
df_list = df.DiskFree_AL(context_wrap(DF_AL_BAD))
assert len(df_list) == 2
assert 'Could not parse line' in str(exc)
| PaulWay/insights-core | insights/parsers/tests/test_df.py | Python | apache-2.0 | 7,089 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for SSH connections."""
import os
import warnings
from base64 import decodebytes
from io import StringIO
from typing import Dict, Optional, Tuple, Union
import paramiko
from paramiko.config import SSH_PORT
from sshtunnel import SSHTunnelForwarder
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
try:
from airflow.utils.platform import getuser
except ImportError:
from getpass import getuser
class SSHHook(BaseHook):
"""
Hook for ssh remote execution using Paramiko.
ref: https://github.com/paramiko/paramiko
This hook also lets you create ssh tunnel and serve as basis for SFTP file transfer
:param ssh_conn_id: :ref:`ssh connection id<howto/connection:ssh>` from airflow
Connections from where all the required parameters can be fetched like
username, password or key_file. Thought the priority is given to the
param passed during init
:type ssh_conn_id: str
:param remote_host: remote host to connect
:type remote_host: str
:param username: username to connect to the remote_host
:type username: str
:param password: password of the username to connect to the remote_host
:type password: str
:param key_file: path to key file to use to connect to the remote_host
:type key_file: str
:param port: port of remote host to connect (Default is paramiko SSH_PORT)
:type port: int
:param timeout: timeout for the attempt to connect to the remote_host.
:type timeout: int
:param keepalive_interval: send a keepalive packet to remote host every
keepalive_interval seconds
:type keepalive_interval: int
"""
# List of classes to try loading private keys as, ordered (roughly) by most common to least common
_pkey_loaders = (
paramiko.RSAKey,
paramiko.ECDSAKey,
paramiko.Ed25519Key,
paramiko.DSSKey,
)
_host_key_mappings = {
'rsa': paramiko.RSAKey,
'dss': paramiko.DSSKey,
'ecdsa': paramiko.ECDSAKey,
'ed25519': paramiko.Ed25519Key,
}
conn_name_attr = 'ssh_conn_id'
default_conn_name = 'ssh_default'
conn_type = 'ssh'
hook_name = 'SSH'
@staticmethod
def get_ui_field_behaviour() -> Dict:
"""Returns custom field behaviour"""
return {
"hidden_fields": ['schema'],
"relabeling": {
'login': 'Username',
},
}
def __init__(
self,
ssh_conn_id: Optional[str] = None,
remote_host: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
key_file: Optional[str] = None,
port: Optional[int] = None,
timeout: int = 10,
keepalive_interval: int = 30,
) -> None:
super().__init__()
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.username = username
self.password = password
self.key_file = key_file
self.pkey = None
self.port = port
self.timeout = timeout
self.keepalive_interval = keepalive_interval
# Default values, overridable from Connection
self.compress = True
self.no_host_key_check = True
self.allow_host_key_change = False
self.host_proxy = None
self.host_key = None
self.look_for_keys = True
# Placeholder for deprecated __enter__
self.client = None
# Use connection to override defaults
if self.ssh_conn_id is not None:
conn = self.get_connection(self.ssh_conn_id)
if self.username is None:
self.username = conn.login
if self.password is None:
self.password = conn.password
if self.remote_host is None:
self.remote_host = conn.host
if self.port is None:
self.port = conn.port
if conn.extra is not None:
extra_options = conn.extra_dejson
if "key_file" in extra_options and self.key_file is None:
self.key_file = extra_options.get("key_file")
private_key = extra_options.get('private_key')
private_key_passphrase = extra_options.get('private_key_passphrase')
if private_key:
self.pkey = self._pkey_from_private_key(private_key, passphrase=private_key_passphrase)
if "timeout" in extra_options:
self.timeout = int(extra_options["timeout"], 10)
if "compress" in extra_options and str(extra_options["compress"]).lower() == 'false':
self.compress = False
host_key = extra_options.get("host_key")
no_host_key_check = extra_options.get("no_host_key_check")
if no_host_key_check is not None:
no_host_key_check = str(no_host_key_check).lower() == "true"
if host_key is not None and no_host_key_check:
raise ValueError("Must check host key when provided")
self.no_host_key_check = no_host_key_check
if (
"allow_host_key_change" in extra_options
and str(extra_options["allow_host_key_change"]).lower() == 'true'
):
self.allow_host_key_change = True
if (
"look_for_keys" in extra_options
and str(extra_options["look_for_keys"]).lower() == 'false'
):
self.look_for_keys = False
if host_key is not None:
if host_key.startswith("ssh-"):
key_type, host_key = host_key.split(None)[:2]
key_constructor = self._host_key_mappings[key_type[4:]]
else:
key_constructor = paramiko.RSAKey
decoded_host_key = decodebytes(host_key.encode('utf-8'))
self.host_key = key_constructor(data=decoded_host_key)
self.no_host_key_check = False
if self.pkey and self.key_file:
raise AirflowException(
"Params key_file and private_key both provided. Must provide no more than one."
)
if not self.remote_host:
raise AirflowException("Missing required param: remote_host")
# Auto detecting username values from system
if not self.username:
self.log.debug(
"username to ssh to host: %s is not specified for connection id"
" %s. Using system's default provided by getpass.getuser()",
self.remote_host,
self.ssh_conn_id,
)
self.username = getuser()
user_ssh_config_filename = os.path.expanduser('~/.ssh/config')
if os.path.isfile(user_ssh_config_filename):
ssh_conf = paramiko.SSHConfig()
with open(user_ssh_config_filename) as config_fd:
ssh_conf.parse(config_fd)
host_info = ssh_conf.lookup(self.remote_host)
if host_info and host_info.get('proxycommand'):
self.host_proxy = paramiko.ProxyCommand(host_info.get('proxycommand'))
if not (self.password or self.key_file):
if host_info and host_info.get('identityfile'):
self.key_file = host_info.get('identityfile')[0]
self.port = self.port or SSH_PORT
def get_conn(self) -> paramiko.SSHClient:
"""
Opens a ssh connection to the remote host.
:rtype: paramiko.client.SSHClient
"""
self.log.debug('Creating SSH client for conn_id: %s', self.ssh_conn_id)
client = paramiko.SSHClient()
if not self.allow_host_key_change:
self.log.warning(
'Remote Identification Change is not verified. '
'This wont protect against Man-In-The-Middle attacks'
)
client.load_system_host_keys()
if self.no_host_key_check:
self.log.warning('No Host Key Verification. This wont protect against Man-In-The-Middle attacks')
# Default is RejectPolicy
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
else:
if self.host_key is not None:
client_host_keys = client.get_host_keys()
if self.port == SSH_PORT:
client_host_keys.add(self.remote_host, self.host_key.get_name(), self.host_key)
else:
client_host_keys.add(
f"[{self.remote_host}]:{self.port}", self.host_key.get_name(), self.host_key
)
else:
pass # will fallback to system host keys if none explicitly specified in conn extra
connect_kwargs = dict(
hostname=self.remote_host,
username=self.username,
timeout=self.timeout,
compress=self.compress,
port=self.port,
sock=self.host_proxy,
look_for_keys=self.look_for_keys,
)
if self.password:
password = self.password.strip()
connect_kwargs.update(password=password)
if self.pkey:
connect_kwargs.update(pkey=self.pkey)
if self.key_file:
connect_kwargs.update(key_filename=self.key_file)
client.connect(**connect_kwargs)
if self.keepalive_interval:
client.get_transport().set_keepalive(self.keepalive_interval)
self.client = client
return client
def __enter__(self) -> 'SSHHook':
warnings.warn(
'The contextmanager of SSHHook is deprecated.'
'Please use get_conn() as a contextmanager instead.'
'This method will be removed in Airflow 2.0',
category=DeprecationWarning,
)
return self
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
if self.client is not None:
self.client.close()
self.client = None
def get_tunnel(
self, remote_port: int, remote_host: str = "localhost", local_port: Optional[int] = None
) -> SSHTunnelForwarder:
"""
Creates a tunnel between two hosts. Like ssh -L <LOCAL_PORT>:host:<REMOTE_PORT>.
:param remote_port: The remote port to create a tunnel to
:type remote_port: int
:param remote_host: The remote host to create a tunnel to (default localhost)
:type remote_host: str
:param local_port: The local port to attach the tunnel to
:type local_port: int
:return: sshtunnel.SSHTunnelForwarder object
"""
if local_port:
local_bind_address: Union[Tuple[str, int], Tuple[str]] = ('localhost', local_port)
else:
local_bind_address = ('localhost',)
tunnel_kwargs = dict(
ssh_port=self.port,
ssh_username=self.username,
ssh_pkey=self.key_file or self.pkey,
ssh_proxy=self.host_proxy,
local_bind_address=local_bind_address,
remote_bind_address=(remote_host, remote_port),
logger=self.log,
)
if self.password:
password = self.password.strip()
tunnel_kwargs.update(
ssh_password=password,
)
else:
tunnel_kwargs.update(
host_pkey_directories=[],
)
client = SSHTunnelForwarder(self.remote_host, **tunnel_kwargs)
return client
def create_tunnel(
self, local_port: int, remote_port: int, remote_host: str = "localhost"
) -> SSHTunnelForwarder:
"""
Creates tunnel for SSH connection [Deprecated].
:param local_port: local port number
:param remote_port: remote port number
:param remote_host: remote host
:return:
"""
warnings.warn(
'SSHHook.create_tunnel is deprecated, Please'
'use get_tunnel() instead. But please note that the'
'order of the parameters have changed'
'This method will be removed in Airflow 2.0',
category=DeprecationWarning,
)
return self.get_tunnel(remote_port, remote_host, local_port)
def _pkey_from_private_key(self, private_key: str, passphrase: Optional[str] = None) -> paramiko.PKey:
"""
Creates appropriate paramiko key for given private key
:param private_key: string containing private key
:return: ``paramiko.PKey`` appropriate for given key
:raises AirflowException: if key cannot be read
"""
for pkey_class in self._pkey_loaders:
try:
key = pkey_class.from_private_key(StringIO(private_key), password=passphrase)
# Test it acutally works. If Paramiko loads an openssh generated key, sometimes it will
# happily load it as the wrong type, only to fail when actually used.
key.sign_ssh_data(b'')
return key
except (paramiko.ssh_exception.SSHException, ValueError):
continue
raise AirflowException(
'Private key provided cannot be read by paramiko.'
'Ensure key provided is valid for one of the following'
'key formats: RSA, DSS, ECDSA, or Ed25519'
)
| dhuang/incubator-airflow | airflow/providers/ssh/hooks/ssh.py | Python | apache-2.0 | 14,364 |
# Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import numpy as np
import tensorflow as tf
from lucid.optvis.param import lowres_tensor
def multi_interpolation_basis(n_objectives=6, n_interp_steps=5, width=128,
channels=3):
"""A paramaterization for interpolating between each pair of N objectives.
Sometimes you want to interpolate between optimizing a bunch of objectives,
in a paramaterization that encourages images to align.
Args:
n_objectives: number of objectives you want interpolate between
n_interp_steps: number of interpolation steps
width: width of intepolated images
channel
Returns:
A [n_objectives, n_objectives, n_interp_steps, width, width, channel]
shaped tensor, t, where the final [width, width, channel] should be
seen as images, such that the following properties hold:
t[a, b] = t[b, a, ::-1]
t[a, i, 0] = t[a, j, 0] for all i, j
t[a, a, i] = t[a, a, j] for all i, j
t[a, b, i] = t[b, a, -i] for all i
"""
N, M, W, Ch = n_objectives, n_interp_steps, width, channels
const_term = sum([lowres_tensor([W, W, Ch], [W//k, W//k, Ch])
for k in [1, 2, 4, 8]])
const_term = tf.reshape(const_term, [1, 1, 1, W, W, Ch])
example_interps = [
sum([lowres_tensor([M, W, W, Ch], [2, W//k, W//k, Ch])
for k in [1, 2, 4, 8]])
for _ in range(N)]
example_basis = []
for n in range(N):
col = []
for m in range(N):
interp = example_interps[n] + example_interps[m][::-1]
col.append(interp)
example_basis.append(col)
interp_basis = []
for n in range(N):
col = [interp_basis[m][N-n][::-1] for m in range(n)]
col.append(tf.zeros([M, W, W, 3]))
for m in range(n+1, N):
interp = sum([lowres_tensor([M, W, W, Ch], [M, W//k, W//k, Ch])
for k in [1, 2]])
col.append(interp)
interp_basis.append(col)
basis = []
for n in range(N):
col_ex = tf.stack(example_basis[n])
col_in = tf.stack(interp_basis[n])
basis.append(col_ex + col_in)
basis = tf.stack(basis)
return basis + const_term
| tensorflow/lucid | lucid/recipes/image_interpolation_params.py | Python | apache-2.0 | 2,768 |
print " A BBBBBB CCCC DDDDD EEEEEEE FFFFFFF GGGG H H IIIII JJJJJJ"
print " A A B B C C D D E F G G H H I J"
print " A A B B C D D E F G H H I J"
print "AAAAAAA BBBBBB C D D EEEEE FFFFF G GGG HHHHHHH I J"
print "A A B B C D D E F G G H H I J"
print "A A B B C C D D E F G G H H I J J"
print "A A BBBBBB CCCC DDDDD EEEEEEE F GGGG H H IIIII JJJJ"
| kelvinongtoronto/numbers | letters.py | Python | artistic-2.0 | 650 |
#!/usr/bin/env python3
from PIL import Image
import os.path
import sys
if __name__ == '__main__':
img = Image.open(sys.argv[1])
img.load()
name = os.path.splitext(os.path.basename(sys.argv[1]))[0]
frames = 0
for i in range(65536):
try:
img.seek(i)
except EOFError:
break
frames += 1
for n in range(frames):
print('static u_char _%s_frame%d[] = {' % (name, n))
img.seek(n)
pix = img.load()
for y in range(img.size[1]):
line = []
p = 1
for x in range(img.size[0]):
if p == pix[x, y]:
continue
p = pix[x, y]
line.append(x)
line.insert(0, len(line))
print(' %s,' % ', '.join(map(str, line)))
print('};')
print('')
print('static AnimSpanT %s = {' % name)
print(' .width = %d, .height = %d,' % img.size)
print(' .current = 0, .count = %d,' % frames)
print(' .frame = {')
for n in range(frames):
print(' _%s_frame%d,' % (name, n))
print(' }')
print('};')
| cahirwpz/demoscene | effects/anim/data/gen-anim.py | Python | artistic-2.0 | 1,148 |
# -*- coding: utf-8 -*-
# Copyright (C) 2017 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., University of Heidelberg, and University of
# of Connecticut School of Medicine.
# All rights reserved.
# Copyright (C) 2010 - 2016 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., University of Heidelberg, and The University
# of Manchester.
# All rights reserved.
# Copyright (C) 2008 - 2009 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., EML Research, gGmbH, University of Heidelberg,
# and The University of Manchester.
# All rights reserved.
# Copyright (C) 2006 - 2007 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc. and EML Research, gGmbH.
# All rights reserved.
import COPASI
import unittest
from types import *
class Test_CDataObject(unittest.TestCase):
def setUp(self):
self.datamodel=COPASI.CRootContainer.addDatamodel()
self.model=self.datamodel.getModel()
self.compartment=self.model.createCompartment("Comp1")
self.object=self.model.createMetabolite("Metab1","Comp1")
self.model.compileIfNecessary()
def test_getObjectName(self):
t=self.object.getObjectName()
self.assert_(type(t)==StringType)
def test_setObjectName(self):
NAME="MyObject"
self.object.setObjectName(NAME)
self.assert_(self.object.getObjectName()==NAME)
def test_getObjectDisplayName(self):
t=self.object.getObjectDisplayName()
self.assert_(type(t)==StringType)
def test_getObjectType(self):
t=self.object.getObjectType()
self.assert_(type(t)==StringType)
def test_getObjectParent(self):
parent=self.object.getObjectParent()
self.assert_(parent!=None)
self.assert_(parent.__class__==COPASI.MetabVectorNS)
self.assert_(parent.getKey()==self.compartment.getMetabolites().getKey())
def test_getCN(self):
cn=self.object.getCN()
self.assert_(cn.__class__==COPASI.CCommonName)
def test_isContainer(self):
result=self.object.isContainer()
self.assert_(type(result)==BooleanType)
def test_isVector(self):
result=self.object.isVector()
self.assert_(type(result)==BooleanType)
def test_isMatrix(self):
result=self.object.isMatrix()
self.assert_(type(result)==BooleanType)
def test_isNameVector(self):
result=self.object.isNameVector()
self.assert_(type(result)==BooleanType)
def test_isReference(self):
result=self.object.isReference()
self.assert_(type(result)==BooleanType)
def test_isValueBool(self):
result=self.object.isValueBool()
self.assert_(type(result)==BooleanType)
def test_isValueInt(self):
result=self.object.isValueInt()
self.assert_(type(result)==BooleanType)
def test_isValueDbl(self):
result=self.object.isValueDbl()
self.assert_(type(result)==BooleanType)
def test_isNonUniqueName(self):
result=self.object.isNonUniqueName()
self.assert_(type(result)==BooleanType)
def test_isStaticString(self):
result=self.object.isStaticString()
self.assert_(type(result)==BooleanType)
def test_isValueString(self):
result=self.object.isValueString()
self.assert_(type(result)==BooleanType)
def test_isSeparator(self):
result=self.object.isSeparator()
self.assert_(type(result)==BooleanType)
def test_getKey(self):
key=self.object.getKey()
self.assert_(type(key)==StringType)
def suite():
tests=[
'test_getObjectName'
,'test_setObjectName'
,'test_getObjectDisplayName'
,'test_getObjectType'
,'test_getObjectParent'
,'test_getCN'
,'test_isContainer'
,'test_isVector'
,'test_isMatrix'
,'test_isNameVector'
,'test_isReference'
,'test_isValueBool'
,'test_isValueInt'
,'test_isValueDbl'
,'test_isNonUniqueName'
,'test_isStaticString'
,'test_isValueString'
,'test_isSeparator'
,'test_getKey'
]
return unittest.TestSuite(map(Test_CDataObject,tests))
if(__name__ == '__main__'):
unittest.TextTestRunner(verbosity=2).run(suite())
| jonasfoe/COPASI | copasi/bindings/python/unittests/Test_CCopasiObject.py | Python | artistic-2.0 | 4,243 |
#!/usr/bin/python
import os, sys, re
args = sys.argv
if not len(args) in (3, 4):
print "Usage: add_test.py <rawdata file> <report file> [tests stash file]"
print "If the last is not specified, `tests_stash.txt' is assumed"
sys.exit(1)
rawdata = []
for line in open(args[1], "rt").readlines():
lookfor = ["basic_cpuid", "ext_cpuid", "intel_fn4", "intel_fn11"]
good = False
for match in lookfor:
if line.find(match) != -1:
good = True
break
if good:
rawdata.append(line.strip())
repdata = []
rexp = re.compile('(-?[0-9]+).*')
for line in open(args[2], "rt").readlines():
s = line.strip()
if s.find(":") == -1:
continue
numeric = ["family", "model", "stepping", "ext_family", "ext_model",
"num_cores", "num_logical", "L1 D cache", "L1 I cache",
"L2 cache", "L3 cache", "L1D assoc.", "L2 assoc.",
"L3 assoc.", "L1D line sz", "L2 line sz", "L3 line sz"]
field = s[:s.find(":")].strip()
if field in numeric:
value = s[s.find(":")+1:].strip()
if not rexp.match(value):
raise "Bad format of value: [%s]" % s
repdata.append(rexp.findall(value)[0])
if field == "code name":
value = s[s.find("`") + 1: s.find("'")]
repdata.append(value)
if field == "features":
value = s[s.find(":") + 2:]
repdata.append(value)
if field == "SSE units":
value = s[s.find(":") + 2:]
# the value here is something like "XX bits (authoritative)". We remove the "bits" part:
i = value.find("bits")
if i != -1:
value = value[:i] + value[i + 5:]
repdata.append(value)
stash = "tests_stash.txt"
if len(args) == 4:
stash = args[3]
fout = open(stash, "at")
delimiter = "-" * 80
lines = rawdata + [delimiter] + repdata + [delimiter]
fout.writelines(map(lambda s: s + "\n", lines))
fout.close()
| joestringer/libcpuid | tests/add_test.py | Python | bsd-2-clause | 1,755 |
import psyco
psyco.full()
import linker.coff
from linker import store
m,n='python26.dll','Py_DecRef'
localname = None,'__imp__<%s!%s>'%(m,n)
if True:
# this should import from python26.lib,Py_DecRef
# this should export ia32.obj,stuff
a = linker.coff.object.open('~/work/syringe/src/ia32.obj')
# imports None,Py_DecRef
# this should import from python26.dll,Py_DecRef
# this should export Py_DecRef
b = linker.coff.library.open('~/python26/libs/python26.lib')
# imports python26.dll,Py_DecRef
# exports None,Py_DecRef
# this should import from whatever
# and export whatever
c = linker.coff.executable.open('~/../../windows/syswow64/python26.dll')
# expots python26.dll,Py_DecRef
d = linker.coff.executable.open('~/../../windows/syswow64/msvcr100.dll')
# raise NotImplementedError("symbol consolidation isn't working")
if True:
z = b
z[store.BaseAddress] = 0x10000000
for x in z.undefined:
z[x] = 0xbbbbbbbb
out = file('blah','wb')
for x in z.segments:
y = z.getsegment(x)
y = z.relocatesegment(x, y)
out.write(y)
out.close()
if False:
#print a
#print c
if True:
z = linker.new()
print a
z.addstore(a)
print b
z.addstore(b)
print c
z.addstore(c)
print d
z.addstore(d)
if False:
m,n='msvcr100.dll','_heapmin'
print True,(None,n) in d.globals
print False,(None,n) in z.globals
print False,(m,n) in d.globals
print True,(m,n) in z.globals
if False:
paths = '~/../../windows/syswow64','~/python26/dlls'
# dlls = 'ntdll.dll','kernel32.dll','python26.dll','msvcr100.dll','shell32.dll','user32.dll','gdi32.dll','pcwum.dll','advapi32.dll','shlwapi.dll','cryptsp.dll','msvcrt.dll','kernelbase.dll','shunimpl.dll','sspicli.dll'
dlls = 'msvcr100.dll',
for filename in dlls:
print 'loading %s'% filename
for p in paths:
try:
z.addstore(linker.coff.executable.open('%s/%s'%(p,filename)))
break
except IOError:
pass
continue
continue
print [(m,n) for m,n in z.undefined if m is None]
if False:
modules = set((m for m,n in z.undefined if m is not None))
print [(m,n) for m,n in z.undefined if m is None]
for filename in modules:
if '-' in filename:
continue
print 'loading %s'% filename
for p in paths:
try:
z.addstore(linker.coff.executable.open('%s/%s'%(p,filename)))
break
except IOError:
pass
continue
continue
if True:
z[store.BaseAddress] = 0x10000000
for x in z.undefined:
z[x] = 0xbbbbbbbb
if True:
print '-'*25
out = file('blah','wb')
for x in z.segments:
y = z.getsegment(x)
y = z.relocatesegment(x, y)
out.write(y)
out.close()
if False:
print '-'*25
for x in a.externals:
a[x] = 0xbbbbbbbb
a[store.BaseAddress] = 0x10000000
b = a.getsegment('.text')
c = a.relocatesegment('.text',b)
# import ptypes
# print ptypes.hexdump(c, a['.text'])
| arizvisa/syringe | src/hooktest.py | Python | bsd-2-clause | 3,457 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask, render_template, session, redirect, url_for, flash
from flask_wtf import FlaskForm
from flask_bootstrap import Bootstrap
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager, Shell
basedir = os.path.abspath(os.path.dirname(__file__))
app = Flask(__name__)
app.config['SECRET_KEY'] = 'had to guess string'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(basedir, 'data.sqlite')
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
Bootstrap(app)
db = SQLAlchemy(app)
manager = Manager(app)
migrate = Migrate(app, db)
class NameForm(FlaskForm):
name = StringField('What is your name? ', validators=[DataRequired()])
submit = SubmitField('Submit :233')
# model definition
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
users = db.relationship('User', backref='role', lazy='dynamic')
def __repr__(self):
return '<Role %r>' % self.name
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), unique=True, index=True)
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
def __repr__(self):
return '<User %r>' % self.username
# View Functions
@app.route('/', methods=['GET', 'POST'])
def index():
form = NameForm()
if form.validate_on_submit():
old_name = session.get('name')
if old_name is not None and old_name != form.name.data:
flash('名字已经修改完成!')
user = User.query.filter_by(username=form.name.data).first()
if user is None:
user = User(username=form.name.data)
db.session.add(user)
session['known'] = False
else:
session['known'] = True
session['name'] = form.name.data
form.name.data = ''
return redirect(url_for('index'))
return render_template('index.html',
form=form,
name=session.get('name', None),
known=session.get('known', False))
def make_shell_context():
return dict(app=app, db=db, User=User, Role=Role)
if __name__ == "__main__":
# app.run(debug=True)
# 自动在 shell 中导入 app db User Role
manager.add_command('shell', Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
manager.run()
| sharkspeed/dororis | packages/python/flask/flask-dog-book/5-chapter/main.py | Python | bsd-2-clause | 2,710 |
# Copyright (c) 2020, DjaoDjin inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from django.conf.urls import url, include
urlpatterns = [
url(r'^campaign/', include('survey.urls.api.campaigns')),
url(r'^', include('survey.urls.api.matrix')), # No trailing '/'
# because of PATH_RE.
url(r'^sample/', include('survey.urls.api.sample')),
]
| djaodjin/djaodjin-survey | survey/urls/api/__init__.py | Python | bsd-2-clause | 1,664 |
import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 100*(x2 - x1**2)**2 + (1-x1)**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[0][0]), (x2,xs[0][1])])
lambda_xs = H_xs.eigenvals()
count = 0
for i in lambda_xs.keys():
if i.evalf() <= 0:
count += 1
if count == 0:
print 'Local minima'
elif count == len(lambda_xs.keys()):
print 'Lacal maxima'
else:
print 'Saddle point'
| escorciav/amcs211 | hw3/hw3_2a.py | Python | bsd-2-clause | 502 |
# -----------------------------------------------------------------------------
# Copyright (c) 2014, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
# -----------------------------------------------------------------------------
"""
Hook for cryptography module from the Python Cryptography Authority.
"""
import os.path
import glob
from PyInstaller.hooks.hookutils import (collect_submodules as cs,
get_module_file_attribute)
from PyInstaller.hooks.hookutils import PY_EXTENSION_SUFFIXES
# add the OpenSSL FFI binding modules as hidden imports
hiddenimports = cs('cryptography.hazmat.bindings.openssl') # pragma: no cover
def hook(mod):
"""
Include the cffi extensions as binaries in a subfolder named like the
package. The cffi verifier expects to find them inside the package
directory for the main module. We cannot use hiddenimports because that
would add the modules outside the package.
"""
crypto_dir = os.path.dirname(get_module_file_attribute('cryptography'))
for ext in PY_EXTENSION_SUFFIXES:
ffimods = glob.glob(os.path.join(crypto_dir,
'*_cffi_*%s*' % ext))
for f in ffimods:
name = os.path.join('cryptography', os.path.basename(f))
# TODO fix this hook to use attribute 'binaries'.
mod.pyinstaller_binaries.append((name, f, 'BINARY'))
return mod
| timeyyy/PyUpdater | pyupdater/hooks/hook-cryptography.py | Python | bsd-2-clause | 1,600 |
import re
from django.conf.urls import url, patterns, include
from django.conf import settings
from django.contrib import admin
from django.views.generic import TemplateView
from django.template import add_to_builtins
from fabrydb.admin import fadmin
add_to_builtins('avocado.templatetags.avocado_tags')
admin.autodiscover()
urlpatterns = patterns('',
# Landing Page
url(r'^$', 'fabry.views.home', name='home'),
url(r'^$', 'fabrydb.views.landing', name='landing'),
url(r'^accounts/login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^accounts/logout/$', 'django.contrib.auth.views.logout', name='logout'),
# Cilantro Pages
url(r'^workspace/', TemplateView.as_view(template_name='index.html'), name='workspace'),
url(r'^query/', TemplateView.as_view(template_name='index.html'), name='query'),
url(r'^results/', TemplateView.as_view(template_name='index.html'), name='results'),
# Serrano-compatible Endpoint
url(r'^api/', include('serrano.urls')),
# Administrative components
url(r'^admin/', include(admin.site.urls)),
url(r'^fadmin/', include(fadmin.urls), name='fadmin'),
)
# if not settings.DEBUG:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
# In production, these two locations must be served up statically
urlpatterns += patterns('django.views.static',
url(r'^{0}(?P<path>.*)$'.format(re.escape(settings.MEDIA_URL.lstrip('/'))), 'serve', {
'document_root': settings.MEDIA_ROOT
}),
url(r'^{0}(?P<path>.*)$'.format(re.escape(settings.STATIC_URL.lstrip('/'))), 'serve', {
'document_root': settings.STATIC_ROOT
}),
)
| glabilloy/fabrydb | fabrydb/conf/urls.py | Python | bsd-2-clause | 1,723 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
import re
from .utils import normalize_whitespace
HEADINGS_PATTERN = re.compile(r"\bh\d\b")
class Paragraph(object):
"""Object representing one block of text in HTML."""
def __init__(self, path):
self.dom_path = path.dom
self.xpath = path.xpath
self.text_nodes = []
self.chars_count_in_links = 0
self.tags_count = 0
self.class_type = "" # short | neargood | good | bad
@property
def is_heading(self):
return bool(HEADINGS_PATTERN.search(self.dom_path))
@property
def is_boilerplate(self):
return self.class_type != "good"
@property
def text(self):
text = "".join(self.text_nodes)
return normalize_whitespace(text.strip())
def __len__(self):
return len(self.text)
@property
def words_count(self):
return len(self.text.split())
def contains_text(self):
return bool(self.text_nodes)
def append_text(self, text):
text = normalize_whitespace(text)
self.text_nodes.append(text)
return text
def stopwords_count(self, stopwords):
return sum(word.lower() in stopwords for word in self.text.split())
def stopwords_density(self, stopwords):
if self.words_count == 0:
return 0
return self.stopwords_count(stopwords) / self.words_count
def links_density(self):
text_length = len(self.text)
if text_length == 0:
return 0
return self.chars_count_in_links / text_length
| miso-belica/jusText | justext/paragraph.py | Python | bsd-2-clause | 1,667 |
from setuptools import setup, find_packages
setup(
name = 'project',
version = '1.0',
packages = find_packages(),
entry_points = {'scrapy': ['settings = bgmapi.settings']},
) | wattlebird/Bangumi_Spider | setup_bgmapi.py | Python | bsd-2-clause | 207 |
###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('unicode_shift_jis.xlsx')
self.set_text_file('unicode_shift_jis.txt')
def test_create_file(self):
"""Test example file converting Unicode text."""
# Open the input file with the correct encoding.
textfile = open(self.txt_filename, mode='r', encoding='shift_jis')
# Create an new Excel file and convert the text data.
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
# Widen the first column to make the text clearer.
worksheet.set_column('A:A', 50)
# Start from the first cell.
row = 0
col = 0
# Read the text file and write it to the worksheet.
for line in textfile:
# Ignore the comments in the sample file.
if line.startswith('#'):
continue
# Write any other lines to the worksheet.
worksheet.write(row, col, line.rstrip("\n"))
row += 1
workbook.close()
textfile.close()
self.assertExcelEqual()
| jmcnamara/XlsxWriter | xlsxwriter/test/comparison/test_unicode_shift_jis.py | Python | bsd-2-clause | 1,538 |
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'rich_string01.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
bold = workbook.add_format({'bold': 1})
italic = workbook.add_format({'italic': 1})
worksheet.write('A1', 'Foo', bold)
worksheet.write('A2', 'Bar', italic)
worksheet.write_rich_string('A3', 'a', bold, 'bc', 'defg')
workbook.close()
self.assertExcelEqual()
| jkyeung/XlsxWriter | xlsxwriter/test/comparison/test_rich_string01.py | Python | bsd-2-clause | 1,233 |
from unittest import skip
from ion.services.dm.test.dm_test_case import DMTestCase
from pyon.public import PRED, OT, RT
from pyon.util.log import log
from ion.services.dm.test.test_dm_end_2_end import DatasetMonitor
from ion.services.dm.utility.granule import RecordDictionaryTool
from nose.plugins.attrib import attr
import numpy as np
import calendar
from datetime import datetime
@attr(group='dm')
class TestSiteDataProducts(DMTestCase):
def create_device_site_deployment(self, dep_name="Deployment", starting=''):
from interface.objects import StreamConfiguration, StreamConfigurationType, InstrumentDevice
from interface.objects import InstrumentModel, PlatformAgent, InstrumentSite, TemporalBounds, Deployment
from interface.objects import RemotePlatformDeploymentContext
stream_conf = StreamConfiguration(stream_name="CTD 1 Parsed Stream", parameter_dictionary_name='ctd_parsed_param_dict', stream_type=StreamConfigurationType.PARSED)
pdict_id = self.dataset_management.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict')
stream_def_id = self.create_stream_definition(name='CTD 1', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product(name="DDP_1", stream_def_id=stream_def_id, stream_configuration=stream_conf)
self.activate_data_product(data_product_id)
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
stream_def = self.resource_registry.find_objects(data_product_id, PRED.hasStreamDefinition)[0][0]
param_dict = self.resource_registry.find_objects(stream_def._id, PRED.hasParameterDictionary)[0][0]
# Add data to the DataProduct
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
rdt = self.ph.get_rdt(stream_def._id)
rdt_ = self.ph.rdt_for_data_product(data_product_id)
self.assertEquals(rdt.fields, rdt_.fields)
rdt['time'] = [0, 1, 2, 3]
rdt['temp'] = [10, 11, 12, 13]
self.ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(dataset_monitor.wait())
# Create Device
device = InstrumentDevice(name='Device 1')
device_id = self.instrument_management.create_instrument_device(device)
self.data_acquisition_management.register_instrument(device_id)
self.data_acquisition_management.assign_data_product(device_id, data_product_id)
# Create Model
model = InstrumentModel(name='Model 1')
model_id = self.instrument_management.create_instrument_model(model)
self.instrument_management.assign_instrument_model_to_instrument_device(model_id, device_id)
# Create AgentDefinition
ad = PlatformAgent(stream_configurations=[stream_conf])
ad_id, _ = self.resource_registry.create(ad)
# Create Site
site = InstrumentSite(name='Site 1', stream_configurations=[stream_conf])
site_id, _ = self.resource_registry.create(site)
self.resource_registry.create_association(site_id, PRED.hasModel, model_id)
self.resource_registry.create_association(site_id, PRED.hasAgentDefinition, ad_id)
# TemporalBounds of the Deployment
temp_bounds = TemporalBounds(start_datetime=starting, end_datetime='')
# Create Deployment
deployment = Deployment(name=dep_name, type="RemotePlatform", context=RemotePlatformDeploymentContext(),
constraint_list=[temp_bounds])
deployment_id = self.observatory_management.create_deployment(deployment=deployment, site_id=site_id, device_id=device_id)
return site_id, device_id, dataset_id, deployment_id, param_dict, data_product_id
@attr('PRELOAD')
def test_preload_creation(self):
from interface.objects import DataProductTypeEnum
self.preload_alpha()
# IDs from Preload sheets
deployment_id = "DEP_BTST_1"
site_id = "IS_BTST_SBE37"
device_id = "ID_BTST_SBE37"
#deployment_id = "DEP_BTST_2"
#site_id = "IS_BTST_CTDSIM0"
#device_id = "ID_BTST_CTDSIM0"
deployment_obj = self.container.resource_registry.find_resources_ext(alt_id=deployment_id, alt_id_ns='PRE')[0][0]
site_obj = self.container.resource_registry.find_resources_ext(alt_id=site_id, alt_id_ns='PRE')[0][0]
device_obj = self.container.resource_registry.find_resources_ext(alt_id=device_id, alt_id_ns='PRE')[0][0]
# Check associations
self.assertEquals(self.resource_registry.find_objects(site_obj._id, PRED.hasDevice, id_only=True)[0][0], device_obj._id)
self.assertEquals(self.resource_registry.find_objects(site_obj._id, PRED.hasPrimaryDeployment, id_only=True)[0][0], deployment_obj._id)
self.assertEquals(self.resource_registry.find_objects(site_obj._id, PRED.hasDeployment, id_only=True)[0][0], deployment_obj._id)
self.assertEquals(self.resource_registry.find_objects(device_obj._id, PRED.hasPrimaryDeployment, id_only=True)[0][0], deployment_obj._id)
self.assertEquals(self.resource_registry.find_objects(device_obj._id, PRED.withinDeployment, id_only=True)[0][0], deployment_obj._id)
self.assertEquals(self.resource_registry.find_objects(device_obj._id, PRED.hasDeployment, id_only=True)[0][0], deployment_obj._id)
# stream_name to dataset_id, for lookup later
device_stream_names = {}
device_data_products, _ = self.resource_registry.find_objects(device_obj._id, PRED.hasOutputProduct)
for ddp in device_data_products:
stream_def = self.resource_registry.find_objects(ddp._id, PRED.hasStreamDefinition)[0][0]
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(ddp._id)
device_stream_names[stream_def.name] = dataset_id
site_data_products, _ = self.resource_registry.find_objects(site_obj._id, PRED.hasOutputProduct)
for sdp in site_data_products:
self.assertEquals(sdp.category, DataProductTypeEnum.SITE)
self.assertEquals(len(sdp.dataset_windows), 1)
stream_def = self.resource_registry.find_objects(sdp._id, PRED.hasStreamDefinition)[0][0]
assert sdp.dataset_windows[0].dataset_id == device_stream_names.get(stream_def.name)
assert sdp.dataset_windows[0].bounds.start_datetime == deployment_obj.start_datetime
assert sdp.dataset_windows[0].bounds.end_datetime == deployment_obj.end_datetime
self.observatory_management.deactivate_deployment(deployment_id=deployment_obj._id)
deployment_obj = self.resource_registry.read(deployment_obj._id)
for sdp in site_data_products:
self.assertEquals(sdp.category, DataProductTypeEnum.SITE)
self.assertEquals(len(sdp.dataset_windows), 1)
stream_def = self.resource_registry.find_objects(sdp._id, PRED.hasStreamDefinition)[0][0]
assert sdp.dataset_windows[0].dataset_id == device_stream_names.get(stream_def.name)
assert sdp.dataset_windows[0].bounds.start_datetime == deployment_obj.start_datetime
assert sdp.dataset_windows[0].bounds.end_datetime == deployment_obj.end_datetime
@attr('INT')
def test_primary_deployment(self):
# First deployment
starting = str(calendar.timegm(datetime(2014, 1, 1, 0).timetuple()))
site_1_id, device_1_id, dataset_1_id, deployment_1_id, param_dict_a, data_product_1_id = self.create_device_site_deployment(dep_name="Site 1 - Device 1",
starting=starting)
self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0])
self.assertEquals([], self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0])
self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0])
self.observatory_management.activate_deployment(deployment_id=deployment_1_id)
self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0])
self.assertEquals(deployment_1_id, self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0])
self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0][0])
self.observatory_management.deactivate_deployment(deployment_id=deployment_1_id)
self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0])
self.assertEquals([], self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0])
self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0][0])
@attr('INT')
@skip("Multiple deployments of the same device are not functional. State transitions need to be looked at.")
def test_multiple_deployments(self):
from interface.objects import DataProductTypeEnum, TemporalBounds, Deployment, RemotePlatformDeploymentContext
# First deployment
starting = str(calendar.timegm(datetime(2014, 1, 1, 0).timetuple()))
site_1_id, device_1_id, dataset_1_id, deployment_1_id, param_dict_a, data_product_1_id = self.create_device_site_deployment(dep_name="Site 1 - Device 1",
starting=starting)
site = self.resource_registry.read(site_1_id)
# Create SDPs
# This logis is also in preload, but testing preload is painful.
# Testing it manually here for now.
for i, scfg in enumerate(site.stream_configurations):
pdict = self.container.resource_registry.find_resources(name=scfg.parameter_dictionary_name,
restype=RT.ParameterDictionary, id_only=False)[0][0]
# Clone/Create the new ParameterDictionary
del pdict._id
del pdict._rev
sdp_pdict_id, _ = self.container.resource_registry.create(pdict)
stream_def_id = self.create_stream_definition(name='CTD 1 - SDP', parameter_dictionary_id=sdp_pdict_id)
sdp_id = self.create_data_product(name="SDP_%d" % i, stream_def_id=stream_def_id, stream_configuration=scfg)
self.activate_data_product(sdp_id)
self.container.resource_registry.create_association(subject=site_1_id,
predicate=PRED.hasOutputProduct,
object=sdp_id,
assoc_type=RT.DataProduct)
sdp = self.resource_registry.read(sdp_id)
sdp.category = DataProductTypeEnum.SITE
self.resource_registry.update(sdp)
self.observatory_management.activate_deployment(deployment_id=deployment_1_id)
self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0])
self.assertEquals(deployment_1_id, self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0])
self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0][0])
self.observatory_management.deactivate_deployment(deployment_id=deployment_1_id)
self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0])
self.assertEquals([], self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0])
self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0][0])
sdps, _ = self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct)
for sdp in sdps:
self.assertEquals(sdp.category, DataProductTypeEnum.SITE)
self.assertEquals(len(sdp.dataset_windows), 1)
assert sdp.dataset_windows[0].dataset_id == dataset_1_id
assert sdp.dataset_windows[0].bounds.start_datetime == starting
assert int(sdp.dataset_windows[0].bounds.end_datetime) - calendar.timegm(datetime.utcnow().timetuple()) < 10
# Second deployment (same site and device)
starting2 = str(calendar.timegm(datetime(2014, 1, 5, 0).timetuple()))
temp_bounds2 = TemporalBounds(start_datetime=starting2, end_datetime='')
deployment_2 = Deployment(name="Site 1 - Device 1 - v2", type="RemotePlatform", context=RemotePlatformDeploymentContext(),
constraint_list=[temp_bounds2])
deployment_2_id = self.observatory_management.create_deployment(deployment=deployment_2, site_id=site_1_id, device_id=device_1_id)
self.observatory_management.activate_deployment(deployment_id=deployment_2_id)
self.assertEquals(deployment_2_id, self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0])
self.assertEquals(deployment_2_id, self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0])
self.assertItemsEqual([deployment_1_id, deployment_2_id], self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0])
self.observatory_management.deactivate_deployment(deployment_id=deployment_1_id)
self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0])
self.assertEquals([], self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0])
self.assertItemsEqual([deployment_1_id, deployment_2_id], self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0])
sdps, _ = self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct)
for sdp in sdps:
self.assertEquals(sdp.category, DataProductTypeEnum.SITE)
self.assertEquals(len(sdp.dataset_windows), 2)
assert sdp.dataset_windows[0].dataset_id == dataset_1_id
assert sdp.dataset_windows[0].bounds.start_datetime == starting
assert int(sdp.dataset_windows[0].bounds.end_datetime) - calendar.timegm(datetime.utcnow().timetuple()) < 10
@attr('INT')
def test_single_device_single_site(self):
from interface.objects import DataProductTypeEnum
starting = str(calendar.timegm(datetime(2014, 1, 1, 0).timetuple()))
site_1_id, device_1_id, dataset_1_id, deployment_1_id, param_dict_a, data_product_1_id = self.create_device_site_deployment(dep_name="Site 1 - Device 1",
starting=starting)
site = self.resource_registry.read(site_1_id)
# Create SDPs
# This logis is also in preload, but testing preload is painful.
# Testing it manually here for now.
for i, scfg in enumerate(site.stream_configurations):
pdict = self.container.resource_registry.find_resources(name=scfg.parameter_dictionary_name,
restype=RT.ParameterDictionary, id_only=False)[0][0]
# Clone/Create the new ParameterDictionary
del pdict._id
del pdict._rev
sdp_pdict_id, _ = self.container.resource_registry.create(pdict)
stream_def_id = self.create_stream_definition(name='CTD 1 - SDP', parameter_dictionary_id=sdp_pdict_id)
sdp_id = self.create_data_product(name="SDP_%d" % i, stream_def_id=stream_def_id, stream_configuration=scfg)
self.activate_data_product(sdp_id)
self.container.resource_registry.create_association(subject=site_1_id,
predicate=PRED.hasOutputProduct,
object=sdp_id,
assoc_type=RT.DataProduct)
sdp = self.resource_registry.read(sdp_id)
sdp.category = DataProductTypeEnum.SITE
self.resource_registry.update(sdp)
self.observatory_management.activate_deployment(deployment_id=deployment_1_id)
# Get Deployment start time
deployment_obj = self.resource_registry.read(deployment_1_id)
for constraint in deployment_obj.constraint_list:
if constraint.type_ == OT.TemporalBounds:
assert constraint.start_datetime == starting
# Get information about the new SiteDataProduct that should have been created
site_data_product_1_id = self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct, id_only=True)[0][0]
stream_def_2_id = self.resource_registry.find_objects(site_data_product_1_id, PRED.hasStreamDefinition, id_only=True)[0][0]
param_dict_b = self.resource_registry.find_objects(stream_def_2_id, PRED.hasParameterDictionary)[0][0]
# Check associations
self.assertEquals(self.resource_registry.find_objects(site_1_id, PRED.hasDevice, id_only=True)[0][0], device_1_id)
self.assertEquals(self.resource_registry.find_objects(site_1_id, PRED.hasDeployment, id_only=True)[0][0], deployment_1_id)
self.assertEquals(self.resource_registry.find_objects(device_1_id, PRED.hasDeployment, id_only=True)[0][0], deployment_1_id)
self.assertEquals(self.resource_registry.find_objects(device_1_id, PRED.hasOutputProduct, id_only=True)[0][0], data_product_1_id)
self.assertEquals(self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct, id_only=True)[0][0], site_data_product_1_id)
site_data_product_1 = self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct)[0][0]
self.assertEquals(site_data_product_1.category, DataProductTypeEnum.SITE)
self.assertEquals(len(site_data_product_1.dataset_windows), 1)
assert site_data_product_1.dataset_windows[0].dataset_id == dataset_1_id
assert site_data_product_1.dataset_windows[0].bounds.start_datetime == starting
assert site_data_product_1.dataset_windows[0].bounds.end_datetime == ''
# Check that param dicts have equal members
self.assertEquals(param_dict_a.name, param_dict_b.name)
self.observatory_management.deactivate_deployment(deployment_id=deployment_1_id)
# Verify the window has an ending time
site_data_product_1 = self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct)[0][0]
self.assertEquals(site_data_product_1.category, DataProductTypeEnum.SITE)
self.assertEquals(len(site_data_product_1.dataset_windows), 1)
assert site_data_product_1.dataset_windows[0].dataset_id == dataset_1_id
assert site_data_product_1.dataset_windows[0].bounds.start_datetime == starting
assert int(site_data_product_1.dataset_windows[0].bounds.end_datetime) - calendar.timegm(datetime.utcnow().timetuple()) < 10
# Verify that data is there
granule = self.data_retriever.retrieve(dataset_1_id)
rdt = RecordDictionaryTool.load_from_granule(granule)
np.testing.assert_allclose(rdt['time'], np.arange(4))
np.testing.assert_allclose(rdt['temp'], np.arange(10, 14))
| ooici/coi-services | ion/services/dm/test/test_site_data_products.py | Python | bsd-2-clause | 20,250 |
import matplotlib
from kid_readout.roach import baseband
matplotlib.use('agg')
import numpy as np
import time
import sys
from kid_readout.utils import data_file,sweeps
from kid_readout.analysis.resonator import fit_best_resonator
ri = baseband.RoachBasebandWide()
ri.initialize()
#ri.set_fft_gain(6)
#f0s = np.load('/home/gjones/workspace/apps/f8_fit_resonances.npy')
#f0s = np.load('/home/gjones/workspace/apps/first_pass_sc3x3_0813f9.npy')
#f0s = np.load('/home/gjones/workspace/apps/sc5x4_0813f10_first_pass.npy')#[:4]
#f0s = np.load('/home/gjones/workspace/readout/apps/sc3x3_0813f9_2014-02-11.npy')
#f0s = np.load('/home/gjones/workspace/readout/apps/sc3x3_0813f5_2014-02-27.npy')
f0s = np.load('/home/gjones/workspace/apps/sc5x4_0813f12.npy')
f0s.sort()
#f0s = f0s*(0.9995)
suffix = "power"
nf = len(f0s)
atonce = 4
if nf % atonce > 0:
print "extending list of resonators to make a multiple of ",atonce
f0s = np.concatenate((f0s,np.arange(1,1+atonce-(nf%atonce))+f0s.max()))
offsets = np.linspace(-4882.8125,4638.671875,20)#[5:15]
offsets = offsets
#offsets = np.concatenate(([-40e3,-20e3],offsets,[20e3,40e3]))/1e6
offsets = np.concatenate(([-40e3],offsets,[40e3]))/1e6
#offsets = offsets*4
nsamp = 2**18
step = 1
nstep = 80
f0binned = np.round(f0s*nsamp/512.0)*512.0/nsamp
offset_bins = np.arange(-(nstep+1),(nstep+1))*step
offsets = offset_bins*512.0/nsamp
offsets = np.concatenate(([offsets.min()-20e-3,],offsets,[offsets.max()+20e-3]))
print f0s
print offsets*1e6
print len(f0s)
if False:
from kid_readout.utils.parse_srs import get_all_temperature_data
while True:
temp = get_all_temperature_data()[1][-1]
print "mk stage at", temp
if temp > 0.348:
break
time.sleep(300)
time.sleep(600)
start = time.time()
use_fmin = True
attenlist = np.linspace(33,45,5)-6
#attenlist = [44.0]
#attenlist = attenlist[:4]
for atten in attenlist:
print "setting attenuator to",atten
ri.set_dac_attenuator(atten)
measured_freqs = sweeps.prepare_sweep(ri,f0binned,offsets,nsamp=nsamp)
print "loaded waveforms in", (time.time()-start),"seconds"
sweep_data = sweeps.do_prepared_sweep(ri, nchan_per_step=atonce, reads_per_step=8)
orig_sweep_data = sweep_data
meas_cfs = []
idxs = []
delays = []
for m in range(len(f0s)):
fr,s21,errors = sweep_data.select_by_freq(f0s[m])
thiscf = f0s[m]
res = fit_best_resonator(fr[1:-1],s21[1:-1],errors=errors[1:-1]) #Resonator(fr,s21,errors=errors)
delay = res.delay
delays.append(delay)
s21 = s21*np.exp(2j*np.pi*res.delay*fr)
res = fit_best_resonator(fr,s21,errors=errors)
fmin = fr[np.abs(s21).argmin()]
print "s21 fmin", fmin, "original guess",thiscf,"this fit", res.f_0, "delay",delay,"resid delay",res.delay
if use_fmin:
meas_cfs.append(fmin)
else:
if abs(res.f_0 - thiscf) > 0.1:
if abs(fmin - thiscf) > 0.1:
print "using original guess"
meas_cfs.append(thiscf)
else:
print "using fmin"
meas_cfs.append(fmin)
else:
print "using this fit"
meas_cfs.append(res.f_0)
idx = np.unravel_index(abs(measured_freqs - meas_cfs[-1]).argmin(),measured_freqs.shape)
idxs.append(idx)
delay = np.median(delays)
print "median delay is ",delay
nsamp = 2**22
step = 1
f0binned_meas = np.round(f0s*nsamp/512.0)*512.0/nsamp
offset_bins = np.array([-8,-4,-2,-1,0,1,2,4])#np.arange(-4,4)*step
offset_bins = np.concatenate(([-40,-20],offset_bins,[20,40]))
offsets = offset_bins*512.0/nsamp
meas_cfs = np.array(meas_cfs)
f0binned_meas = np.round(meas_cfs*nsamp/512.0)*512.0/nsamp
f0s = f0binned_meas
measured_freqs = sweeps.prepare_sweep(ri,f0binned_meas,offsets,nsamp=nsamp)
print "loaded updated waveforms in", (time.time()-start),"seconds"
sys.stdout.flush()
time.sleep(1)
df = data_file.DataFile(suffix=suffix)
df.log_hw_state(ri)
sweep_data = sweeps.do_prepared_sweep(ri, nchan_per_step=atonce, reads_per_step=8, sweep_data=orig_sweep_data)
df.add_sweep(sweep_data)
meas_cfs = []
idxs = []
for m in range(len(f0s)):
fr,s21,errors = sweep_data.select_by_freq(f0s[m])
thiscf = f0s[m]
s21 = s21*np.exp(2j*np.pi*delay*fr)
res = fit_best_resonator(fr,s21,errors=errors) #Resonator(fr,s21,errors=errors)
fmin = fr[np.abs(s21).argmin()]
print "s21 fmin", fmin, "original guess",thiscf,"this fit", res.f_0
if use_fmin:
meas_cfs.append(fmin)
else:
if abs(res.f_0 - thiscf) > 0.1:
if abs(fmin - thiscf) > 0.1:
print "using original guess"
meas_cfs.append(thiscf)
else:
print "using fmin"
meas_cfs.append(fmin)
else:
print "using this fit"
meas_cfs.append(res.f_0)
idx = np.unravel_index(abs(measured_freqs - meas_cfs[-1]).argmin(),measured_freqs.shape)
idxs.append(idx)
print meas_cfs
ri.add_tone_freqs(np.array(meas_cfs))
ri.select_bank(ri.tone_bins.shape[0]-1)
ri._sync()
time.sleep(0.5)
#raw_input("turn on LED take data")
df.log_hw_state(ri)
nsets = len(meas_cfs)/atonce
tsg = None
for iset in range(nsets):
selection = range(len(meas_cfs))[iset::nsets]
ri.select_fft_bins(selection)
ri._sync()
time.sleep(0.2)
t0 = time.time()
dmod,addr = ri.get_data_seconds(30,demod=True)
print nsets,iset,tsg
tsg = df.add_timestream_data(dmod, ri, t0, tsg=tsg)
df.sync()
df.nc.close()
print "completed in",((time.time()-start)/60.0),"minutes"
| ColumbiaCMB/kid_readout | apps/data_taking_scripts/old_scripts/highq_power_sweep_0813f12.py | Python | bsd-2-clause | 5,967 |
"""Terminal management for exposing terminals to a web interface using Tornado.
"""
# Copyright (c) Jupyter Development Team
# Copyright (c) 2014, Ramalingam Saravanan <sarava@sarava.net>
# Distributed under the terms of the Simplified BSD License.
from __future__ import absolute_import, print_function
import asyncio
from collections import deque
import itertools
import logging
import os
import signal
import codecs
import warnings
import select
try:
from ptyprocess import PtyProcessUnicode
def preexec_fn():
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
except ImportError:
from winpty import PtyProcess as PtyProcessUnicode
preexec_fn = None
from tornado.ioloop import IOLoop
ENV_PREFIX = "PYXTERM_" # Environment variable prefix
DEFAULT_TERM_TYPE = "xterm"
class PtyWithClients(object):
def __init__(self, argv, env=[], cwd=None):
self.clients = []
# If you start the process and then construct this object from it,
# output generated by the process prior to the object's creation
# is lost. Hence the change from 0.8.3.
# Buffer output until a client connects; then let the client
# drain the buffer.
# We keep the same read_buffer as before
self.read_buffer = deque([], maxlen=10)
self.preopen_buffer = deque([])
kwargs = dict(argv=argv, env=env, cwd=cwd)
if preexec_fn is not None:
kwargs["preexec_fn"] = preexec_fn
self.ptyproc = PtyProcessUnicode.spawn(**kwargs)
# The output might not be strictly UTF-8 encoded, so
# we replace the inner decoder of PtyProcessUnicode
# to allow non-strict decode.
self.ptyproc.decoder = codecs.getincrementaldecoder('utf-8')(errors='replace')
def resize_to_smallest(self):
"""Set the terminal size to that of the smallest client dimensions.
A terminal not using the full space available is much nicer than a
terminal trying to use more than the available space, so we keep it
sized to the smallest client.
"""
minrows = mincols = 10001
for client in self.clients:
rows, cols = client.size
if rows is not None and rows < minrows:
minrows = rows
if cols is not None and cols < mincols:
mincols = cols
if minrows == 10001 or mincols == 10001:
return
rows, cols = self.ptyproc.getwinsize()
if (rows, cols) != (minrows, mincols):
self.ptyproc.setwinsize(minrows, mincols)
def kill(self, sig=signal.SIGTERM):
"""Send a signal to the process in the pty"""
self.ptyproc.kill(sig)
def killpg(self, sig=signal.SIGTERM):
"""Send a signal to the process group of the process in the pty"""
if os.name == 'nt':
return self.ptyproc.kill(sig)
pgid = os.getpgid(self.ptyproc.pid)
os.killpg(pgid, sig)
async def terminate(self, force=False):
'''This forces a child process to terminate. It starts nicely with
SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This
returns True if the child was terminated. This returns False if the
child could not be terminated. '''
if os.name == 'nt':
signals = [signal.SIGINT, signal.SIGTERM]
else:
signals = [signal.SIGHUP, signal.SIGCONT, signal.SIGINT,
signal.SIGTERM]
loop = IOLoop.current()
def sleep(): return asyncio.sleep(self.ptyproc.delayafterterminate)
if not self.ptyproc.isalive():
return True
try:
for sig in signals:
self.kill(sig)
await sleep()
if not self.ptyproc.isalive():
return True
if force:
self.kill(signal.SIGKILL)
await sleep()
if not self.ptyproc.isalive():
return True
else:
return False
return False
except OSError:
# I think there are kernel timing issues that sometimes cause
# this to happen. I think isalive() reports True, but the
# process is dead to the kernel.
# Make one last attempt to see if the kernel is up to date.
await sleep()
if not self.ptyproc.isalive():
return True
else:
return False
def _update_removing(target, changes):
"""Like dict.update(), but remove keys where the value is None.
"""
for k, v in changes.items():
if v is None:
target.pop(k, None)
else:
target[k] = v
class TermManagerBase(object):
"""Base class for a terminal manager."""
def __init__(self, shell_command, server_url="", term_settings={},
extra_env=None, ioloop=None):
self.shell_command = shell_command
self.server_url = server_url
self.term_settings = term_settings
self.extra_env = extra_env
self.log = logging.getLogger(__name__)
self.ptys_by_fd = {}
if ioloop is not None:
warnings.warn(
f"Setting {self.__class__.__name__}.ioloop is deprecated and ignored",
DeprecationWarning,
stacklevel=2,
)
def make_term_env(self, height=25, width=80, winheight=0, winwidth=0, **kwargs):
"""Build the environment variables for the process in the terminal."""
env = os.environ.copy()
env["TERM"] = self.term_settings.get("type", DEFAULT_TERM_TYPE)
dimensions = "%dx%d" % (width, height)
if winwidth and winheight:
dimensions += ";%dx%d" % (winwidth, winheight)
env[ENV_PREFIX+"DIMENSIONS"] = dimensions
env["COLUMNS"] = str(width)
env["LINES"] = str(height)
if self.server_url:
env[ENV_PREFIX+"URL"] = self.server_url
if self.extra_env:
_update_removing(env, self.extra_env)
return env
def new_terminal(self, **kwargs):
"""Make a new terminal, return a :class:`PtyWithClients` instance."""
options = self.term_settings.copy()
options['shell_command'] = self.shell_command
options.update(kwargs)
argv = options['shell_command']
env = self.make_term_env(**options)
cwd = options.get('cwd', None)
return PtyWithClients(argv, env, cwd)
def start_reading(self, ptywclients):
"""Connect a terminal to the tornado event loop to read data from it."""
fd = ptywclients.ptyproc.fd
self.ptys_by_fd[fd] = ptywclients
loop = IOLoop.current()
loop.add_handler(fd, self.pty_read, loop.READ)
def on_eof(self, ptywclients):
"""Called when the pty has closed.
"""
# Stop trying to read from that terminal
fd = ptywclients.ptyproc.fd
self.log.info("EOF on FD %d; stopping reading", fd)
del self.ptys_by_fd[fd]
IOLoop.current().remove_handler(fd)
# This closes the fd, and should result in the process being reaped.
ptywclients.ptyproc.close()
def pty_read(self, fd, events=None):
"""Called by the event loop when there is pty data ready to read."""
r, _, _ = select.select([fd], [], [], .1)
if not r:
return
ptywclients = self.ptys_by_fd[fd]
try:
s = ptywclients.ptyproc.read(65536)
client_list = ptywclients.clients
ptywclients.read_buffer.append(s)
if not client_list:
# No one to consume our output: buffer it.
ptywclients.preopen_buffer.append(s)
return
for client in ptywclients.clients:
client.on_pty_read(s)
except EOFError:
self.on_eof(ptywclients)
for client in ptywclients.clients:
client.on_pty_died()
def get_terminal(self, url_component=None):
"""Override in a subclass to give a terminal to a new websocket connection
The :class:`TermSocket` handler works with zero or one URL components
(capturing groups in the URL spec regex). If it receives one, it is
passed as the ``url_component`` parameter; otherwise, this is None.
"""
raise NotImplementedError
def client_disconnected(self, websocket):
"""Override this to e.g. kill terminals on client disconnection.
"""
pass
async def shutdown(self):
await self.kill_all()
async def kill_all(self):
futures = []
for term in self.ptys_by_fd.values():
futures.append(term.terminate(force=True))
# wait for futures to finish
if futures:
await asyncio.gather(*futures)
class SingleTermManager(TermManagerBase):
"""All connections to the websocket share a common terminal."""
def __init__(self, **kwargs):
super(SingleTermManager, self).__init__(**kwargs)
self.terminal = None
def get_terminal(self, url_component=None):
if self.terminal is None:
self.terminal = self.new_terminal()
self.start_reading(self.terminal)
return self.terminal
async def kill_all(self):
await super().kill_all()
self.terminal = None
class MaxTerminalsReached(Exception):
def __init__(self, max_terminals):
self.max_terminals = max_terminals
def __str__(self):
return "Cannot create more than %d terminals" % self.max_terminals
class UniqueTermManager(TermManagerBase):
"""Give each websocket a unique terminal to use."""
def __init__(self, max_terminals=None, **kwargs):
super(UniqueTermManager, self).__init__(**kwargs)
self.max_terminals = max_terminals
def get_terminal(self, url_component=None):
if self.max_terminals and len(self.ptys_by_fd) >= self.max_terminals:
raise MaxTerminalsReached(self.max_terminals)
term = self.new_terminal()
self.start_reading(term)
return term
def client_disconnected(self, websocket):
"""Send terminal SIGHUP when client disconnects."""
self.log.info("Websocket closed, sending SIGHUP to terminal.")
if websocket.terminal:
if os.name == 'nt':
websocket.terminal.kill()
# Immediately call the pty reader to process
# the eof and free up space
self.pty_read(websocket.terminal.ptyproc.fd)
return
websocket.terminal.killpg(signal.SIGHUP)
class NamedTermManager(TermManagerBase):
"""Share terminals between websockets connected to the same endpoint.
"""
def __init__(self, max_terminals=None, **kwargs):
super(NamedTermManager, self).__init__(**kwargs)
self.max_terminals = max_terminals
self.terminals = {}
def get_terminal(self, term_name):
assert term_name is not None
if term_name in self.terminals:
return self.terminals[term_name]
if self.max_terminals and len(self.terminals) >= self.max_terminals:
raise MaxTerminalsReached(self.max_terminals)
# Create new terminal
self.log.info("New terminal with specified name: %s", term_name)
term = self.new_terminal()
term.term_name = term_name
self.terminals[term_name] = term
self.start_reading(term)
return term
name_template = "%d"
def _next_available_name(self):
for n in itertools.count(start=1):
name = self.name_template % n
if name not in self.terminals:
return name
def new_named_terminal(self, **kwargs):
if 'name' in kwargs:
name = kwargs['name']
else:
name = self._next_available_name()
term = self.new_terminal(**kwargs)
self.log.info("New terminal with automatic name: %s", name)
term.term_name = name
self.terminals[name] = term
self.start_reading(term)
return name, term
def kill(self, name, sig=signal.SIGTERM):
term = self.terminals[name]
term.kill(sig) # This should lead to an EOF
async def terminate(self, name, force=False):
term = self.terminals[name]
await term.terminate(force=force)
def on_eof(self, ptywclients):
super(NamedTermManager, self).on_eof(ptywclients)
name = ptywclients.term_name
self.log.info("Terminal %s closed", name)
self.terminals.pop(name, None)
async def kill_all(self):
await super().kill_all()
self.terminals = {}
| takluyver/terminado | terminado/management.py | Python | bsd-2-clause | 12,822 |
import time
from Block import Block
from ..ProtectFlags import ProtectFlags
class UserDirBlock(Block):
def __init__(self, blkdev, blk_num):
Block.__init__(self, blkdev, blk_num, is_type=Block.T_SHORT, is_sub_type=Block.ST_USERDIR)
def set(self, data):
self._set_data(data)
self._read()
def read(self):
self._read_data()
self._read()
def _read(self):
Block.read(self)
if not self.valid:
return False
# UserDir fields
self.own_key = self._get_long(1)
self.protect = self._get_long(-48)
self.comment = self._get_bstr(-46, 79)
self.mod_ts = self._get_timestamp(-23)
self.name = self._get_bstr(-20, 30)
self.hash_chain = self._get_long(-4)
self.parent = self._get_long(-3)
self.extension = self._get_long(-2)
# hash table of entries
self.hash_table = []
self.hash_size = self.blkdev.block_longs - 56
for i in xrange(self.hash_size):
self.hash_table.append(self._get_long(6+i))
self.valid = (self.own_key == self.blk_num)
return self.valid
def create(self, parent, name, protect=0, comment=None, mod_ts=None, hash_chain=0, extension=0):
Block.create(self)
self.own_key = self.blk_num
self.protect = protect
if comment == None:
self.comment = ''
else:
self.comment = comment
# timestamps
self.mod_ts = mod_ts
self.name = name
self.hash_chain = hash_chain
self.parent = parent
self.extension = extension
# empty hash table
self.hash_table = []
self.hash_size = self.blkdev.block_longs - 56
for i in xrange(self.hash_size):
self.hash_table.append(0)
self.valid = True
return True
def write(self):
Block._create_data(self)
self._put_long(1, self.own_key)
self._put_long(-48, self.protect)
self._put_bstr(-46, 79, self.comment)
self._put_timestamp(-23, self.mod_ts)
self._put_bstr(-20, 30, self.name)
self._put_long(-4, self.hash_chain)
self._put_long(-3, self.parent)
self._put_long(-2, self.extension)
# hash table
for i in xrange(self.hash_size):
self._put_long(6+i, self.hash_table[i])
Block.write(self)
def dump(self):
Block.dump(self,"UserDir")
print " own_key: %d" % (self.own_key)
pf = ProtectFlags(self.protect)
print " protect: 0x%x 0b%s %s" % (self.protect, pf.bin_str(), pf)
print " comment: '%s'" % self.comment
print " mod_ts: %s" % self.mod_ts
print " name: '%s'" % self.name
print " hash_chain: %d" % self.hash_chain
print " parent: %d" % self.parent
print " extension: %s" % self.extension
| alpine9000/amiga_examples | tools/external/amitools/amitools/fs/block/UserDirBlock.py | Python | bsd-2-clause | 2,641 |
"""
KAIST 단일인증서비스 모델.
"""
from django.db import models
from django.utils.translation import ugettext_lazy as _
class PortalInfoManager(models.Manager):
"""
:class:`PortalInfo` 에 대한 커스텀 매니저.
KAIST 단일인증서비스를 통해 최초 로그인 한 사용자는 자신의 정보를 총학생회에
제공하는 것에 동의해야 최종 가입이 됩니다. 만약 동의하지 않을 경우에는 자동
탈퇴처리가 됩니다. 그러나, 동의여부를 묻는 페이지에서 사이트 접속을 종료하는
등 동의/비동의 여부를 확인할 수 없는 경우가 있습니다. 이때 임시로
저장되어있는 사용자 개인정보를 사이트 관리자가 임의로 활용하지 못하도록
동의한 사용자만을 필터링하여 제공해야 합니다. 본 매니저가 해당 역할을
수행하고 있습니다.
"""
def get_queryset(self):
"""
정보제공에 동의한 사용자만을 필터링한 쿼리셋을 반환하는 메서드.
"""
return super().get_queryset().filter(is_signed_up=True)
class PortalInfo(models.Model):
"""
사용자의 포탈 계정 정보를 저장하는 모델.
"""
user = models.OneToOneField(
'auth.User',
primary_key=True, related_name='portal_info',
verbose_name=_("유저 인스턴스"))
kaist_uid = models.CharField(
_("KAIST UID"),
max_length=128, unique=True)
ku_kname = models.CharField(
_("이름"),
max_length=128, blank=True)
ku_acad_prog = models.CharField(
_("과정"),
max_length=32, blank=True)
ku_std_no = models.CharField(
_("학번"),
max_length=32, blank=True)
ku_psft_user_status_kor = models.CharField(
_("학적상태"),
max_length=32, blank=True)
ku_born_date = models.CharField(
_("생년월일"),
max_length=32, blank=True)
ku_sex = models.CharField(
_("성별"),
max_length=32, blank=True)
ou = models.CharField(
_("학과"),
max_length=32, blank=True)
mail = models.CharField(
_("메일주소"),
max_length=32, blank=True)
mobile = models.CharField(
_("전화번호"),
max_length=32, blank=True)
is_signed_up = models.BooleanField(
_("정보제공 동의여부"),
default=False,
help_text=_(
"정보제공에 반대하면 계정삭제 처리가 되나, 아직 동의여부를 "
"선택하지 않은 최초가입자의 경우 의사표현 시까지 정보가 "
"임시저장됩니다. 이 특수경우에는 정보를 활용하지 않아야 합니다."))
#: 정보제공 동의한 사용자만 다루는 커스텀 매니저.
objects = PortalInfoManager()
#: 모든 사용자를 다루는 기존 매니저.
all_objects = models.Manager()
def __str__(self):
return self.ku_kname
@classmethod
def create(cls, user, kaist_uid):
"""
클래스 인스턴스 생성 메서드.
사용자 인스턴스와 사용자 UID를 입력받습니다.
"""
return cls(user=user, kaist_uid=kaist_uid)
@property
def enter_year(self):
if self.ku_std_no and len(self.ku_std_no) == 8:
return self.ku_std_no[2:4]
return None
| hangpark/kaistusc | apps/ksso/models.py | Python | bsd-2-clause | 3,434 |
import javabridge
import bioformats as bf
import numpy as np
from matplotlib import pyplot as plt, cm
javabridge.start_vm(class_path=bf.JARS, run_headless=True)
NT = 10
NC = 2
NZ = 4
NX = 217
NY = 94
output_path = 'outome.tif'
frames = []
for t in range(NT):
for z in range(NZ):
for c in range(NC):
frame = np.random.randn(NY, NX, 1, 1, 1).astype(np.uint16)
frames.append(np.squeeze(frame))
print(frame.shape)
bf.write_image(output_path, pixels=frame, pixel_type=bf.PT_UINT16, c=c, t=t, z=z, size_c=NC, size_t=NT, size_z=NZ)
"""
xml_metadata = bf.get_omexml_metadata(path=output_path)
metadata = bf.OMEXML(xml_metadata)
NXp = metadata.image().Pixels.SizeX
NYp = metadata.image().Pixels.SizeY
NZp = metadata.image().Pixels.SizeZ
NCp = metadata.image().Pixels.SizeC
NTp = metadata.image().Pixels.SizeT
print(NXp, NYp, NZp, NCp, NTp)
assert(NXp == NX)
assert(NYp == NY)
assert(NZp == NZ)
assert(NCp == NC)
assert(NTp == NT)
"""
javabridge.kill_vm()
fig = plt.figure(figsize=(10, 8), dpi=100)
ax = fig.add_subplot(111)
cax = ax.imshow(frames[0], interpolation='nearest', cmap=cm.hot)
ax.set_xlabel('X-dimension [pixel]', fontsize=10)
ax.set_ylabel('Y-dimension [pixel]', fontsize=10)
cbar = fig.colorbar(cax)
# show plots
plt.show() | sebi06/BioFormatsRead | ometiff_write.py | Python | bsd-2-clause | 1,302 |
from flask.ext.wtf import Form
from wtforms import TextField, TextAreaField, SubmitField, PasswordField, validators, ValidationError, SelectField
class AWSIEFormFields(Form):
deviceId = TextField(u'Device ID', validators=[validators.required()])
notificationEmail = TextField("Notification Email", [validators.email("Please enter 7 characters to search")])
region = SelectField(u'Region', choices=[('us-east-1', 'us-east-1'), ('us-west-1', 'us-west-1'),
('us-west-2', 'us-west-2'), ('eu-west-1 ', 'eu-west-1'), ('ap-southeast-1', 'ap-southeast-1')])
acl = TextField("ACL", [validators.email("Please enter an ACL if you like")])
bucket = TextField("Import Bucket", [validators.required("Please enter the bucket name for importing files")])
logPrefix = TextField("Log Prefix", [validators.required("Please enter a log prefix")])
prefix = TextField("Prefix", [validators.required("Please enter a prefix")])
substitutions = TextField("Replace box1 with box2")
substitutionsb = TextField("Ignore directories")
substitutions2 = TextField("Replace box1 with box2")
substitutions2b = TextField("Ignore directories")
logBucket = TextField("Log Bucket", [validators.required("Please enter the bucket name for job logs")])
trueCryptPassword = TextField("TrueCrypt Password")
pinCode = TextField("Pin Code")
cacheControl = TextField("Cache Control")
contentDisposition = TextField("Content Disposition")
contentLanguage = TextField("Content Language")
contentTypes = SelectField(u'Map Content types', choices=[('', ''), ('yes', 'yes')])
diskTimestampMetadataKey = TextField("Disk Time Stamp Metadata Key")
expires = TextField("Expires")
ignore = SelectField(u'Exclude Lost+Found', choices=[('', ''), ('yes', 'yes')])
ignore2 = SelectField(u'Exclude Recycle Bin', choices=[('', ''), ('yes', 'yes')])
ignore3 = SelectField(u'Exclude ~ and .swp files', choices=[('', ''), ('yes', 'yes')])
ignore4 = TextField("Ignore directories")
ignore5 = TextField("Ignore directories")
ignore6 = TextField("Ignore files with specific extension")
ignore7 = TextField("Ignore files with specific extension")
setContentEncodingForGzFiles = SelectField(u'Set Encoding for .gz files', choices=[('', ''), ('yes', 'Yes')])
staticMetadata = TextField("Static Metadata")
storageClass = SelectField(u'Select Storage Class', choices=[('', ''), ('REDUCED_REDUNDANCY', 'REDUCED_REDUNDANCY')])
serviceLevel = SelectField(u'Expedite return shipping', choices=[('', ''), ('expeditedShipping', 'Expedited Shipping'), ('standard', 'standard Shipping')])
name = TextField("Name", [validators.required("Please enter your name, it's required")])
company = TextField("Company")
street1 = TextField("Street1", [validators.required("Please enter your street, it's required")])
street2 = TextField("Street2")
street3 = TextField("Street3")
city = TextField("City", [validators.required("Please enter your city, it's required")])
stateOrProvince = TextField("State or Province", [validators.required("Please enter your state or province, it's required")])
postalCode = TextField("Postal Code", [validators.required("Please enter your postal code, it's required")])
phoneNumber = TextField("Phone Number", [validators.required("Please enter your phone number, it's required")])
country = TextField("Country", [validators.required("Please enter your country, it's required")])
dataDescription = TextField("Description of The Data", [validators.required("Please enter a description, it's required")])
encryptedData = SelectField(u'Encrypted Data', choices=[('', ''), ('Yes', 'Yes'), ('No', 'No')])
exportCertifierName = TextField("Shipper Name", [validators.required("Please enter a name, it's required")])
requiresExportLicense = SelectField(u'Requires Export License', choices=[('', ''), ('Yes', 'Yes'), ('No', 'No')])
deviceValue = TextField("Device Value", [validators.required("Please enter a value, it's required")])
deviceCountryOfOrigin = TextField("Drive Manufacture Country", [validators.required("Please a country, it's required")])
deviceType = SelectField(u'Device Type', choices=[('', ''), ('externalStorageDevice', 'externalStorageDevice'), ('usbFlashDrive', 'usbFlashDrive'), ('sataDrive', 'sataDrive')])
typeOfExport = SelectField(u'Type of Export', choices=[('', ''), ('return', 'return'), ('permanent', 'permanent'), ('temporary', 'temporary')])
archivecomment = TextField("Archive Comment")
fileSystem = SelectField(u'File System', choices=[('', ''), ('NTFS', 'NTFS'), ('EXT4', 'EXT4')])
submit = SubmitField("Generate")
| AlperSakarya/AWS-Import-Export-Manifest-Generator | forms.py | Python | bsd-2-clause | 4,728 |
from ctypes import c_void_p
import math
import numpy as np
from OpenGL.GL import *
from OpenGL.GLU import *
from PyEngine3D.Common import logger, COMMAND
from PyEngine3D.Common.Constants import *
from PyEngine3D.Utilities import *
from PyEngine3D.OpenGLContext import InstanceBuffer, FrameBufferManager, RenderBuffer, UniformBlock, CreateTexture
from .PostProcess import AntiAliasing, PostProcess
from . import RenderTargets, RenderOption, RenderingType, RenderGroup, RenderMode
from . import SkeletonActor, StaticActor, ScreenQuad, Line
from . import Spline3D
class Renderer(Singleton):
def __init__(self):
self.initialized = False
self.view_mode = GL_FILL
# managers
self.core_manager = None
self.viewport_manager = None
self.resource_manager = None
self.font_manager = None
self.scene_manager = None
self.debug_line_manager = None
self.render_option_manager = None
self.rendertarget_manager = None
self.framebuffer_manager = None
self.postprocess = None
# components
self.viewport = None
self.debug_texture = None
self.blend_enable = False
self.blend_equation = GL_FUNC_ADD
self.blend_func_src = GL_SRC_ALPHA
self.blend_func_dst = GL_ONE_MINUS_SRC_ALPHA
self.blend_enable_prev = self.blend_enable
self.blend_equation_prev = self.blend_equation
self.blend_func_src_prev = self.blend_func_src
self.blend_func_dst_prev = self.blend_func_dst
# scene constants uniform buffer
self.uniform_scene_buffer = None
self.uniform_scene_data = None
self.uniform_view_buffer = None
self.uniform_view_data = None
self.uniform_view_projection_buffer = None
self.uniform_view_projection_data = None
self.uniform_light_buffer = None
self.uniform_light_data = None
self.uniform_point_light_buffer = None
self.uniform_point_light_data = None
self.uniform_particle_common_buffer = None
self.uniform_particle_common_data = None
self.uniform_particle_infos_buffer = None
self.uniform_particle_infos_data = None
# material instances
self.scene_constants_material = None
self.debug_bone_material = None
self.shadowmap_material = None
self.shadowmap_skeletal_material = None
self.static_object_id_material = None
self.skeletal_object_id_material = None
self.selcted_static_object_material = None
self.selcted_skeletal_object_material = None
self.selcted_object_composite_material = None
self.render_color_material = None
self.render_heightmap_material = None
# font
self.font_instance_buffer = None
self.font_shader = None
self.actor_instance_buffer = None
self.render_custom_translucent_callbacks = []
def initialize(self, core_manager):
logger.info("Initialize Renderer")
self.core_manager = core_manager
self.viewport_manager = core_manager.viewport_manager
self.viewport = self.viewport_manager.main_viewport
self.resource_manager = core_manager.resource_manager
self.render_option_manager = core_manager.render_option_manager
self.font_manager = core_manager.font_manager
self.scene_manager = core_manager.scene_manager
self.debug_line_manager = core_manager.debug_line_manager
self.rendertarget_manager = core_manager.rendertarget_manager
self.postprocess = PostProcess()
self.postprocess.initialize()
self.framebuffer_manager = FrameBufferManager.instance()
# material instances
self.scene_constants_material = self.resource_manager.get_material_instance('scene_constants_main')
self.debug_bone_material = self.resource_manager.get_material_instance("debug_bone")
self.shadowmap_material = self.resource_manager.get_material_instance("shadowmap")
self.shadowmap_skeletal_material = self.resource_manager.get_material_instance(name="shadowmap_skeletal",
shader_name="shadowmap",
macros={"SKELETAL": 1})
self.static_object_id_material = self.resource_manager.get_material_instance(name="render_static_object_id",
shader_name="render_object_id")
self.skeletal_object_id_material = self.resource_manager.get_material_instance(name="render_skeletal_object_id",
shader_name="render_object_id",
macros={"SKELETAL": 1})
self.selcted_static_object_material = self.resource_manager.get_material_instance("selected_object")
self.selcted_skeletal_object_material = self.resource_manager.get_material_instance(name="selected_object_skeletal",
shader_name="selected_object",
macros={"SKELETAL": 1})
self.selcted_object_composite_material = self.resource_manager.get_material_instance("selected_object_composite")
self.render_color_material = self.resource_manager.get_material_instance(name="render_object_color", shader_name="render_object_color")
self.render_heightmap_material = self.resource_manager.get_material_instance(name="render_heightmap", shader_name="render_heightmap")
# font
self.font_shader = self.resource_manager.get_material_instance("font")
self.font_instance_buffer = InstanceBuffer(name="font_offset", location_offset=1, element_datas=[FLOAT4_ZERO, ])
# instance buffer
self.actor_instance_buffer = InstanceBuffer(name="actor_instance_buffer", location_offset=7, element_datas=[MATRIX4_IDENTITY, ])
# scene constants uniform buffer
program = self.scene_constants_material.get_program()
self.uniform_scene_data = np.zeros(1, dtype=[('TIME', np.float32),
('JITTER_FRAME', np.float32),
('RENDER_SSR', np.int32),
('RENDER_SSAO', np.int32),
('SCREEN_SIZE', np.float32, 2),
('BACKBUFFER_SIZE', np.float32, 2),
('MOUSE_POS', np.float32, 2),
('DELTA_TIME', np.float32),
('SCENE_DUMMY_0', np.int32)])
self.uniform_scene_buffer = UniformBlock("scene_constants", program, 0, self.uniform_scene_data)
self.uniform_view_data = np.zeros(1, dtype=[('VIEW', np.float32, (4, 4)),
('INV_VIEW', np.float32, (4, 4)),
('VIEW_ORIGIN', np.float32, (4, 4)),
('INV_VIEW_ORIGIN', np.float32, (4, 4)),
('PROJECTION', np.float32, (4, 4)),
('INV_PROJECTION', np.float32, (4, 4)),
('CAMERA_POSITION', np.float32, 3),
('VIEW_DUMMY_0', np.float32),
('NEAR_FAR', np.float32, 2),
('JITTER_DELTA', np.float32, 2),
('JITTER_OFFSET', np.float32, 2),
('VIEWCONSTANTS_DUMMY0', np.float32, 2)])
self.uniform_view_buffer = UniformBlock("view_constants", program, 1, self.uniform_view_data)
self.uniform_view_projection_data = np.zeros(1, dtype=[('VIEW_PROJECTION', np.float32, (4, 4)),
('PREV_VIEW_PROJECTION', np.float32, (4, 4))])
self.uniform_view_projection_buffer = UniformBlock("view_projection", program, 2,
self.uniform_view_projection_data)
self.uniform_light_data = np.zeros(1, dtype=[('SHADOW_MATRIX', np.float32, (4, 4)),
('LIGHT_POSITION', np.float32, 3),
('SHADOW_EXP', np.float32),
('LIGHT_DIRECTION', np.float32, 3),
('SHADOW_BIAS', np.float32),
('LIGHT_COLOR', np.float32, 3),
('SHADOW_SAMPLES', np.int32)])
self.uniform_light_buffer = UniformBlock("light_constants", program, 3, self.uniform_light_data)
self.uniform_point_light_data = np.zeros(MAX_POINT_LIGHTS, dtype=[('color', np.float32, 3),
('radius', np.float32),
('pos', np.float32, 3),
('render', np.float32)])
self.uniform_point_light_buffer = UniformBlock("point_light_constants", program, 4, self.uniform_point_light_data)
self.uniform_particle_common_data = np.zeros(1, dtype=[
('PARTICLE_COLOR', np.float32, 3),
('PARTICLE_ALIGN_MODE', np.int32),
('PARTICLE_CELL_COUNT', np.int32, 2),
('PARTICLE_BLEND_MODE', np.int32),
('PARTICLE_COMMON_DUMMY_0', np.int32)
])
self.uniform_particle_common_buffer = UniformBlock("particle_common", program, 5, self.uniform_particle_common_data)
self.uniform_particle_infos_data = np.zeros(1, dtype=[
('PARTICLE_PARENT_MATRIX', np.float32, (4, 4)),
('PARTICLE_DELAY', np.float32, 2),
('PARTICLE_LIFE_TIME', np.float32, 2),
('PARTICLE_TRANSFORM_ROTATION_MIN', np.float32, 3),
('PARTICLE_FADE_IN', np.float32),
('PARTICLE_TRANSFORM_ROTATION_MAX', np.float32, 3),
('PARTICLE_FADE_OUT', np.float32),
('PARTICLE_TRANSFORM_SCALE_MIN', np.float32, 3),
('PARTICLE_OPACITY', np.float32),
('PARTICLE_TRANSFORM_SCALE_MAX', np.float32, 3),
('PARTICLE_ENABLE_VECTOR_FIELD', np.int32),
('PARTICLE_VELOCITY_POSITION_MIN', np.float32, 3),
('PARTICLE_VECTOR_FIELD_STRENGTH', np.float32),
('PARTICLE_VELOCITY_POSITION_MAX', np.float32, 3),
('PARTICLE_VECTOR_FIELD_TIGHTNESS', np.float32),
('PARTICLE_VELOCITY_ROTATION_MIN', np.float32, 3),
('PARTICLE_MAX_COUNT', np.uint32),
('PARTICLE_VELOCITY_ROTATION_MAX', np.float32, 3),
('PARTICLE_SPAWN_COUNT', np.uint32),
('PARTICLE_VELOCITY_SCALE_MIN', np.float32, 3),
('PARTICLE_VELOCITY_STRETCH', np.float32),
('PARTICLE_VELOCITY_SCALE_MAX', np.float32, 3),
('PARTICLE_VELOCITY_ACCELERATION', np.float32),
('PARTICLE_VECTOR_FIELD_MATRIX', np.float32, (4, 4)),
('PARTICLE_VECTOR_FIELD_INV_MATRIX', np.float32, (4, 4)),
('PARTICLE_SPAWN_VOLUME_INFO', np.float32, 3),
('PARTICLE_SPAWN_VOLUME_TYPE', np.uint32),
('PARTICLE_SPAWN_VOLUME_MATRIX', np.float32, (4, 4)),
('PARTICLE_VELOCITY_LIMIT', np.float32, 2),
('PARTICLE_FORCE_GRAVITY', np.float32),
('PARTICLE_PLAY_SPEED', np.float32),
('PARTICLE_VELOCITY_TYPE', np.uint32),
('PARTICLE_FORCE_ELASTICITY', np.float32),
('PARTICLE_FORCE_FRICTION', np.float32),
('PARTICLE_DUMMY_0', np.uint32),
])
self.uniform_particle_infos_buffer = UniformBlock("particle_infos", program, 6, self.uniform_particle_infos_data)
def get_rendering_type_name(rendering_type):
rendering_type = str(rendering_type)
return rendering_type.split('.')[-1] if '.' in rendering_type else rendering_type
rendering_type_list = [get_rendering_type_name(RenderingType.convert_index_to_enum(x)) for x in range(RenderingType.COUNT.value)]
self.initialized = True
# Send to GUI
self.core_manager.send_rendering_type_list(rendering_type_list)
def close(self):
pass
def render_custom_translucent(self, render_custom_translucent_callback):
self.render_custom_translucent_callbacks.append(render_custom_translucent_callback)
def set_blend_state(self, blend_enable=True, equation=GL_FUNC_ADD, func_src=GL_SRC_ALPHA, func_dst=GL_ONE_MINUS_SRC_ALPHA):
self.blend_enable_prev = self.blend_enable
self.blend_equation_prev = self.blend_equation
self.blend_func_src_prev = self.blend_func_src
self.blend_func_dst_prev = self.blend_func_dst
self.blend_enable = blend_enable
if blend_enable:
self.blend_equation = equation
self.blend_func_src = func_src
self.blend_func_dst = func_dst
glEnable(GL_BLEND)
glBlendEquation(equation)
glBlendFunc(func_src, func_dst)
else:
glDisable(GL_BLEND)
def restore_blend_state_prev(self):
self.set_blend_state(self.blend_enable_prev,
self.blend_equation_prev,
self.blend_func_src_prev,
self.blend_func_dst_prev)
def set_view_mode(self, view_mode):
if view_mode == COMMAND.VIEWMODE_WIREFRAME:
self.view_mode = GL_LINE
elif view_mode == COMMAND.VIEWMODE_SHADING:
self.view_mode = GL_FILL
def reset_renderer(self):
self.scene_manager.update_camera_projection_matrix(aspect=self.core_manager.game_backend.aspect)
self.framebuffer_manager.clear_framebuffer()
self.rendertarget_manager.create_rendertargets()
self.scene_manager.reset_light_probe()
self.core_manager.gc_collect()
def ortho_view(self, look_at=True):
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, self.viewport.width, 0, self.viewport.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
if look_at:
self.look_at()
def perspective_view(self, look_at=True):
camera = self.scene_manager.main_camera
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(camera.fov, camera.aspect, camera.near, camera.far)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
if look_at:
self.look_at()
def look_at(self):
camera = self.scene_manager.main_camera
camera_target = -camera.transform.front
camera_up = camera.transform.up
glScalef(*(1.0 / camera.transform.get_scale()))
gluLookAt(0.0, 0.0, 0.0, *camera_target, *camera_up)
glTranslatef(*(-camera.transform.get_pos()))
def set_debug_texture(self, texture):
if texture is not None and texture is not RenderTargets.BACKBUFFER and type(texture) != RenderBuffer:
self.debug_texture = texture
self.postprocess.is_render_material_instance = False
logger.info("Current texture : %s" % self.debug_texture.name)
else:
self.debug_texture = None
def bind_uniform_blocks(self):
camera = self.scene_manager.main_camera
main_light = self.scene_manager.main_light
if not camera or not main_light:
return
frame_count = self.core_manager.frame_count % 16
uniform_data = self.uniform_scene_data
uniform_data['TIME'] = self.core_manager.current_time
uniform_data['JITTER_FRAME'] = frame_count
uniform_data['RENDER_SSR'] = self.postprocess.is_render_ssr
uniform_data['RENDER_SSAO'] = self.postprocess.is_render_ssao
uniform_data['SCREEN_SIZE'] = (self.core_manager.game_backend.width, self.core_manager.game_backend.height)
uniform_data['BACKBUFFER_SIZE'] = (RenderTargets.BACKBUFFER.width, RenderTargets.BACKBUFFER.height)
uniform_data['MOUSE_POS'] = self.core_manager.get_mouse_pos()
uniform_data['DELTA_TIME'] = self.core_manager.delta
self.uniform_scene_buffer.bind_uniform_block(data=uniform_data)
uniform_data = self.uniform_view_data
uniform_data['VIEW'][...] = camera.view
uniform_data['INV_VIEW'][...] = camera.inv_view
uniform_data['VIEW_ORIGIN'][...] = camera.view_origin
uniform_data['INV_VIEW_ORIGIN'][...] = camera.inv_view_origin
uniform_data['PROJECTION'][...] = camera.projection_jitter
uniform_data['INV_PROJECTION'][...] = camera.inv_projection_jitter
uniform_data['CAMERA_POSITION'][...] = camera.transform.get_pos()
uniform_data['NEAR_FAR'][...] = (camera.near, camera.far)
uniform_data['JITTER_DELTA'][...] = self.postprocess.jitter_delta
uniform_data['JITTER_OFFSET'][...] = self.postprocess.jitter
self.uniform_view_buffer.bind_uniform_block(data=uniform_data)
uniform_data = self.uniform_light_data
uniform_data['SHADOW_MATRIX'][...] = main_light.shadow_view_projection
uniform_data['SHADOW_EXP'] = main_light.shadow_exp
uniform_data['SHADOW_BIAS'] = main_light.shadow_bias
uniform_data['SHADOW_SAMPLES'] = main_light.shadow_samples
uniform_data['LIGHT_POSITION'][...] = main_light.transform.get_pos()
uniform_data['LIGHT_DIRECTION'][...] = main_light.transform.front
uniform_data['LIGHT_COLOR'][...] = main_light.light_color[:3]
self.uniform_light_buffer.bind_uniform_block(data=uniform_data)
self.uniform_point_light_buffer.bind_uniform_block(data=self.uniform_point_light_data)
def render_light_probe(self, light_probe):
if light_probe.isRendered:
return
logger.info("Rendering Light Probe")
# Set Valid
light_probe.isRendered = True
camera = self.scene_manager.main_camera
old_pos = camera.transform.get_pos().copy()
old_rot = camera.transform.get_rotation().copy()
old_fov = camera.fov
old_aspect = camera.aspect
old_render_font = RenderOption.RENDER_FONT
old_render_skeleton = RenderOption.RENDER_SKELETON_ACTOR
old_render_effect = RenderOption.RENDER_EFFECT
old_render_collision = RenderOption.RENDER_COLLISION
old_render_ssr = self.postprocess.is_render_ssr
old_render_motion_blur = self.postprocess.is_render_motion_blur
old_antialiasing = self.postprocess.anti_aliasing
old_debug_absolute = self.postprocess.debug_absolute
old_debug_mipmap = self.postprocess.debug_mipmap
old_debug_intensity_min = self.postprocess.debug_intensity_min
old_debug_intensity_max = self.postprocess.debug_intensity_max
# set render light probe
RenderOption.RENDER_LIGHT_PROBE = True
RenderOption.RENDER_SKELETON_ACTOR = False
RenderOption.RENDER_EFFECT = False
RenderOption.RENDER_FONT = False
self.postprocess.is_render_motion_blur = False
self.postprocess.anti_aliasing = AntiAliasing.NONE_AA
camera.update_projection(fov=90.0, aspect=1.0)
def render_cube_face(dst_texture, target_face, pos, rotation):
camera.transform.set_pos(pos)
camera.transform.set_rotation(rotation)
camera.update(force_update=True)
# render
self.render_scene()
# copy
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.HDR)
self.framebuffer_manager.bind_framebuffer(dst_texture, target_face=target_face)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.mirror_framebuffer(src_framebuffer)
return dst_texture
target_faces = [GL_TEXTURE_CUBE_MAP_POSITIVE_X,
GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
GL_TEXTURE_CUBE_MAP_POSITIVE_Y,
GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
GL_TEXTURE_CUBE_MAP_POSITIVE_Z,
GL_TEXTURE_CUBE_MAP_NEGATIVE_Z]
pos = light_probe.transform.get_pos()
camera_rotations = [[0.0, math.pi * 1.5, 0.0],
[0.0, math.pi * 0.5, 0.0],
[math.pi * -0.5, math.pi * 1.0, 0.0],
[math.pi * 0.5, math.pi * 1.0, 0.0],
[0.0, math.pi * 1.0, 0.0],
[0.0, 0.0, 0.0]]
# render atmosphere scene to light_probe textures.
RenderOption.RENDER_ONLY_ATMOSPHERE = True
texture_cube = RenderTargets.LIGHT_PROBE_ATMOSPHERE
for i in range(6):
render_cube_face(texture_cube, target_faces[i], pos, camera_rotations[i])
texture_cube.generate_mipmap()
# render final scene to temp textures.
RenderOption.RENDER_ONLY_ATMOSPHERE = False
texture_cube = light_probe.texture_probe
for i in range(6):
render_cube_face(texture_cube, target_faces[i], pos, camera_rotations[i])
texture_cube.generate_mipmap()
# convolution
texture_info = light_probe.texture_probe.get_texture_info()
texture_info['name'] = 'temp_cube'
temp_cube = CreateTexture(**texture_info)
mipmap_count = temp_cube.get_mipmap_count()
face_matrixies = [np.array([[0, 0, 1, 0], [0, 1, 0, 0], [-1, 0, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[0, 0, -1, 0], [0, 1, 0, 0], [1, 0, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[1, 0, 0, 0], [0, 0, 1, 0], [0, -1, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[1, 0, 0, 0], [0, 0, -1, 0], [0, 1, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[-1, 0, 0, 0], [0, 1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]], dtype=np.float32)]
convolve_environment = self.resource_manager.get_material_instance('convolve_environment')
convolve_environment.use_program()
for i in range(6):
for lod in range(mipmap_count):
self.framebuffer_manager.bind_framebuffer(temp_cube, target_face=target_faces[i], target_level=lod)
glClear(GL_COLOR_BUFFER_BIT)
convolve_environment.bind_uniform_data("texture_environment", texture_cube)
convolve_environment.bind_uniform_data("face_matrix", face_matrixies[i])
convolve_environment.bind_uniform_data("lod", float(lod))
convolve_environment.bind_uniform_data("mipmap_count", float(mipmap_count))
self.postprocess.draw_elements()
light_probe.replace_texture_probe(temp_cube)
self.rendertarget_manager.get_temporary('temp_cube', light_probe.texture_probe)
RenderOption.RENDER_LIGHT_PROBE = False
RenderOption.RENDER_SKELETON_ACTOR = old_render_skeleton
RenderOption.RENDER_EFFECT = old_render_effect
RenderOption.RENDER_FONT = old_render_font
RenderOption.RENDER_COLLISION = old_render_collision
self.postprocess.is_render_ssr = old_render_ssr
self.postprocess.is_render_motion_blur = old_render_motion_blur
self.postprocess.anti_aliasing = old_antialiasing
self.postprocess.debug_absolute = old_debug_absolute
self.postprocess.debug_mipmap = old_debug_mipmap
self.postprocess.debug_intensity_min = old_debug_intensity_min
self.postprocess.debug_intensity_max = old_debug_intensity_max
camera.update_projection(old_fov, old_aspect)
camera.transform.set_pos(old_pos)
camera.transform.set_rotation(old_rot)
camera.update(force_update=True)
def render_gbuffer(self):
self.framebuffer_manager.bind_framebuffer(RenderTargets.DIFFUSE,
RenderTargets.MATERIAL,
RenderTargets.WORLD_NORMAL,
depth_texture=RenderTargets.DEPTH)
glClearColor(0.0, 0.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# render terrain
if self.scene_manager.terrain.is_render_terrain:
self.scene_manager.terrain.render_terrain(RenderMode.GBUFFER)
# render static actor
if RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.GBUFFER,
self.scene_manager.static_solid_render_infos)
# render velocity
self.framebuffer_manager.bind_framebuffer(RenderTargets.VELOCITY)
glClear(GL_COLOR_BUFFER_BIT)
if RenderOption.RENDER_STATIC_ACTOR:
self.postprocess.render_velocity(RenderTargets.DEPTH)
# render skeletal actor gbuffer
if RenderOption.RENDER_SKELETON_ACTOR:
self.framebuffer_manager.bind_framebuffer(RenderTargets.DIFFUSE,
RenderTargets.MATERIAL,
RenderTargets.WORLD_NORMAL,
RenderTargets.VELOCITY,
depth_texture=RenderTargets.DEPTH)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.GBUFFER,
self.scene_manager.skeleton_solid_render_infos)
def render_shadow(self):
light = self.scene_manager.main_light
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = light.shadow_view_projection
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = light.shadow_view_projection
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
# static shadow
self.framebuffer_manager.bind_framebuffer(depth_texture=RenderTargets.STATIC_SHADOWMAP)
glClear(GL_DEPTH_BUFFER_BIT)
glFrontFace(GL_CCW)
if self.scene_manager.terrain.is_render_terrain:
self.scene_manager.terrain.render_terrain(RenderMode.SHADOW)
if RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR, RenderMode.SHADOW, self.scene_manager.static_shadow_render_infos, self.shadowmap_material)
# dyanmic shadow
self.framebuffer_manager.bind_framebuffer(depth_texture=RenderTargets.DYNAMIC_SHADOWMAP)
glClear(GL_DEPTH_BUFFER_BIT)
glFrontFace(GL_CCW)
if RenderOption.RENDER_SKELETON_ACTOR:
self.render_actors(RenderGroup.SKELETON_ACTOR, RenderMode.SHADOW, self.scene_manager.skeleton_shadow_render_infos, self.shadowmap_skeletal_material)
# composite shadow maps
self.framebuffer_manager.bind_framebuffer(RenderTargets.COMPOSITE_SHADOWMAP)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
glDisable(GL_CULL_FACE)
self.postprocess.render_composite_shadowmap(RenderTargets.STATIC_SHADOWMAP, RenderTargets.DYNAMIC_SHADOWMAP)
def render_preprocess(self):
# Linear depth
self.framebuffer_manager.bind_framebuffer(RenderTargets.LINEAR_DEPTH)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_linear_depth(RenderTargets.DEPTH, RenderTargets.LINEAR_DEPTH)
# Screen Space Reflection
if self.postprocess.is_render_ssr:
self.framebuffer_manager.bind_framebuffer(RenderTargets.SCREEN_SPACE_REFLECTION)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_screen_space_reflection(RenderTargets.HDR,
RenderTargets.WORLD_NORMAL,
RenderTargets.MATERIAL,
RenderTargets.VELOCITY,
RenderTargets.LINEAR_DEPTH)
# swap ssr resolve textures
RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED, RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED_PREV = \
RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED_PREV, RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED
self.framebuffer_manager.bind_framebuffer(RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_screen_space_reflection_resolve(RenderTargets.SCREEN_SPACE_REFLECTION,
RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED_PREV,
RenderTargets.VELOCITY)
# SSAO
if self.postprocess.is_render_ssao:
temp_ssao = self.rendertarget_manager.get_temporary('temp_ssao', RenderTargets.SSAO)
self.framebuffer_manager.bind_framebuffer(RenderTargets.SSAO)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_ssao(texture_size=(RenderTargets.SSAO.width, RenderTargets.SSAO.height),
texture_lod=self.rendertarget_manager.texture_lod_in_ssao,
texture_normal=RenderTargets.WORLD_NORMAL,
texture_linear_depth=RenderTargets.LINEAR_DEPTH)
self.postprocess.render_gaussian_blur(RenderTargets.SSAO, temp_ssao)
def render_solid(self):
if RenderingType.DEFERRED_RENDERING == self.render_option_manager.rendering_type:
self.postprocess.render_deferred_shading(self.scene_manager.get_light_probe_texture(),
self.scene_manager.atmosphere)
elif RenderingType.FORWARD_RENDERING == self.render_option_manager.rendering_type:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.static_solid_render_infos)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.skeleton_solid_render_infos)
def render_translucent(self):
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.static_translucent_render_infos)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.skeleton_translucent_render_infos)
for render_custom_translucent_callback in self.render_custom_translucent_callbacks:
render_custom_translucent_callback()
self.render_custom_translucent_callbacks.clear()
def render_effect(self):
self.scene_manager.effect_manager.render()
def render_actors(self, render_group, render_mode, render_infos, scene_material_instance=None):
if len(render_infos) < 1:
return
last_actor = None
last_actor_material = None
last_actor_material_instance = None
if scene_material_instance is not None:
scene_material_instance.use_program()
scene_material_instance.bind_material_instance()
# render
for render_info in render_infos:
actor = render_info.actor
geometry = render_info.geometry
actor_material = render_info.material
actor_material_instance = render_info.material_instance
is_instancing = actor.is_instancing()
if RenderMode.GBUFFER == render_mode or RenderMode.FORWARD_SHADING == render_mode:
if last_actor_material != actor_material and actor_material is not None:
actor_material.use_program()
if last_actor_material_instance != actor_material_instance and actor_material_instance is not None:
actor_material_instance.bind_material_instance()
actor_material_instance.bind_uniform_data('is_render_gbuffer', RenderMode.GBUFFER == render_mode)
if RenderMode.FORWARD_SHADING == render_mode:
actor_material_instance.bind_uniform_data('texture_probe', self.scene_manager.get_light_probe_texture())
actor_material_instance.bind_uniform_data('texture_shadow', RenderTargets.COMPOSITE_SHADOWMAP)
actor_material_instance.bind_uniform_data('texture_ssao', RenderTargets.SSAO)
actor_material_instance.bind_uniform_data('texture_scene_reflect', RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED)
# Bind Atmosphere
self.scene_manager.atmosphere.bind_precomputed_atmosphere(actor_material_instance)
elif RenderMode.SHADOW == render_mode:
if last_actor_material_instance != actor_material_instance and actor_material_instance is not None:
# get diffuse texture from actor material instance
data_diffuse = actor_material_instance.get_uniform_data('texture_diffuse')
scene_material_instance.bind_uniform_data('texture_diffuse', data_diffuse)
if last_actor != actor:
material_instance = scene_material_instance or actor_material_instance
if RenderMode.OBJECT_ID == render_mode:
material_instance.bind_uniform_data('object_id', actor.get_object_id())
elif RenderMode.GIZMO == render_mode:
material_instance.bind_uniform_data('color', actor.get_object_color())
material_instance.bind_uniform_data('is_instancing', is_instancing)
material_instance.bind_uniform_data('model', actor.transform.matrix)
if render_group == RenderGroup.SKELETON_ACTOR:
animation_buffer = actor.get_animation_buffer(geometry.skeleton.index)
prev_animation_buffer = actor.get_prev_animation_buffer(geometry.skeleton.index)
material_instance.bind_uniform_data('bone_matrices', animation_buffer, num=len(animation_buffer))
material_instance.bind_uniform_data('prev_bone_matrices', prev_animation_buffer, num=len(prev_animation_buffer))
# draw
if is_instancing:
geometry.draw_elements_instanced(actor.get_instance_render_count(), self.actor_instance_buffer, [actor.instance_matrix, ])
else:
geometry.draw_elements()
last_actor = actor
last_actor_material = actor_material
last_actor_material_instance = actor_material_instance
def render_selected_object(self):
selected_object = self.scene_manager.get_selected_object()
if selected_object is not None:
self.framebuffer_manager.bind_framebuffer(RenderTargets.TEMP_RGBA8)
glDisable(GL_DEPTH_TEST)
glDepthMask(False)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT)
self.set_blend_state(False)
object_type = type(selected_object)
if SkeletonActor == object_type and RenderOption.RENDER_SKELETON_ACTOR:
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.SELECTED_OBJECT,
self.scene_manager.selected_object_render_info,
self.selcted_skeletal_object_material)
elif StaticActor == object_type and RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.SELECTED_OBJECT,
self.scene_manager.selected_object_render_info,
self.selcted_static_object_material)
elif Spline3D == object_type:
self.debug_line_manager.bind_render_spline_program()
self.debug_line_manager.render_spline(selected_object, Float4(1.0, 1.0, 1.0, 1.0))
else:
return
# composite
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
self.selcted_object_composite_material.use_program()
self.selcted_object_composite_material.bind_uniform_data("texture_mask", RenderTargets.TEMP_RGBA8)
self.postprocess.draw_elements()
def render_axis_gizmo(self, render_mode):
if self.scene_manager.get_selected_object() is not None:
axis_gizmo_actor = self.scene_manager.get_axis_gizmo()
material_instance = None
if RenderMode.GIZMO == render_mode:
material_instance = self.render_color_material
elif RenderMode.OBJECT_ID == render_mode:
material_instance = self.static_object_id_material
material_instance.use_program()
material_instance.bind_uniform_data('is_instancing', False)
material_instance.bind_uniform_data('model', axis_gizmo_actor.transform.matrix)
geometries = axis_gizmo_actor.get_geometries()
for i, geometry in enumerate(geometries):
if RenderMode.GIZMO == render_mode:
material_instance.bind_uniform_data('color', axis_gizmo_actor.get_object_color(i))
elif RenderMode.OBJECT_ID == render_mode:
material_instance.bind_uniform_data('object_id', axis_gizmo_actor.get_object_id(i))
geometry.draw_elements()
def render_object_id(self):
self.framebuffer_manager.bind_framebuffer(RenderTargets.OBJECT_ID, depth_texture=RenderTargets.OBJECT_ID_DEPTH)
glDisable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.set_blend_state(False)
# render static actor object id
if RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.static_solid_render_infos,
self.static_object_id_material)
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.static_translucent_render_infos,
self.static_object_id_material)
# render skeletal actor object id
if RenderOption.RENDER_SKELETON_ACTOR:
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.skeleton_solid_render_infos,
self.skeletal_object_id_material)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.skeleton_translucent_render_infos,
self.skeletal_object_id_material)
# spline object id
self.debug_line_manager.bind_render_spline_program()
for spline in self.scene_manager.splines:
object_id = spline.get_object_id()
self.debug_line_manager.render_spline(spline, Float4(object_id, object_id, object_id, 1.0), add_width=10.0)
# spline gizmo object id
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.spline_gizmo_render_infos,
self.static_object_id_material)
# gizmo object id
glClear(GL_DEPTH_BUFFER_BIT)
self.render_axis_gizmo(RenderMode.OBJECT_ID)
def render_heightmap(self, actor):
self.framebuffer_manager.bind_framebuffer(RenderTargets.TEMP_HEIGHT_MAP)
self.set_blend_state(blend_enable=True, equation=GL_MAX, func_src=GL_ONE, func_dst=GL_ONE)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glDisable(GL_CULL_FACE)
glDisable(GL_DEPTH_TEST)
glClearColor(0.0, 0.0, 0.0, 1.0)
self.render_heightmap_material.use_program()
self.render_heightmap_material.bind_material_instance()
self.render_heightmap_material.bind_uniform_data('model', actor.transform.matrix)
self.render_heightmap_material.bind_uniform_data('bound_box_min', actor.bound_box.bound_min)
self.render_heightmap_material.bind_uniform_data('bound_box_max', actor.bound_box.bound_max)
actor.get_geometry(0).draw_elements()
if RenderTargets.TEMP_HEIGHT_MAP.enable_mipmap:
self.postprocess.render_generate_max_z(RenderTargets.TEMP_HEIGHT_MAP)
def render_bones(self):
glDisable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
mesh = self.resource_manager.get_mesh("Cube")
static_actors = self.scene_manager.static_actors[:]
if mesh and self.debug_bone_material:
material_instance = self.debug_bone_material
material_instance.use_program()
material_instance.bind()
def draw_bone(mesh, skeleton_mesh, parent_matrix, material_instance, bone, root_matrix, isAnimation):
if isAnimation:
bone_transform = skeleton_mesh.get_animation_transform(bone.name, frame)
else:
bone_transform = np.linalg.inv(bone.inv_bind_matrix)
if bone.children:
for child_bone in bone.children:
if isAnimation:
bone_transform = skeleton_mesh.get_animation_transform(bone.name, frame)
child_transform = skeleton_mesh.get_animation_transform(child_bone.name, frame)
else:
bone_transform = np.linalg.inv(bone.inv_bind_matrix)
child_transform = np.linalg.inv(child_bone.inv_bind_matrix)
material_instance.bind_uniform_data("mat1", np.dot(bone_transform, root_matrix))
material_instance.bind_uniform_data("mat2", np.dot(child_transform, root_matrix))
mesh.draw_elements()
draw_bone(mesh, skeleton_mesh, bone_transform.copy(), material_instance, child_bone, root_matrix, isAnimation)
else:
material_instance.bind_uniform_data("mat1", np.dot(bone_transform, root_matrix))
child_transform = np.dot(bone_transform, root_matrix)
child_transform[3, :] += child_transform[1, :]
material_instance.bind_uniform_data("mat2", child_transform)
mesh.draw_elements()
for static_actor in static_actors:
if static_actor.model and static_actor.model.mesh and static_actor.model.mesh.skeletons:
skeletons = static_actor.model.mesh.skeletons
skeleton_mesh = static_actor.model.mesh
frame_count = skeleton_mesh.get_animation_frame_count()
frame = math.fmod(self.core_manager.current_time * 30.0, frame_count) if frame_count > 0.0 else 0.0
isAnimation = frame_count > 0.0
for skeleton in skeletons:
matrix = static_actor.transform.matrix
for bone in skeleton.hierachy:
draw_bone(mesh, skeleton_mesh, Matrix4().copy(), material_instance, bone, matrix, isAnimation)
def render_postprocess(self):
# bind frame buffer
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
# copy HDR target
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.HDR)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR_TEMP)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
# Temporal AA
if AntiAliasing.TAA == self.postprocess.anti_aliasing:
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_temporal_antialiasing(RenderTargets.HDR_TEMP,
RenderTargets.TAA_RESOLVE,
RenderTargets.VELOCITY)
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.HDR)
self.framebuffer_manager.bind_framebuffer(RenderTargets.TAA_RESOLVE)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
# Bloom
if self.postprocess.is_render_bloom:
self.postprocess.render_bloom(RenderTargets.HDR)
# Light Shaft
if self.postprocess.is_render_light_shaft:
self.framebuffer_manager.bind_framebuffer(RenderTargets.LIGHT_SHAFT)
self.postprocess.render_light_shaft(RenderTargets.ATMOSPHERE, RenderTargets.DEPTH)
# Depth Of Field
if self.postprocess.is_render_depth_of_field:
self.postprocess.render_depth_of_field()
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
RenderTargets.HDR.generate_mipmap()
# Tone Map
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_tone_map(RenderTargets.HDR,
RenderTargets.BLOOM_0,
RenderTargets.BLOOM_1,
RenderTargets.BLOOM_2,
RenderTargets.BLOOM_3,
RenderTargets.BLOOM_4,
RenderTargets.LIGHT_SHAFT)
# MSAA Test
if AntiAliasing.MSAA == self.postprocess.anti_aliasing:
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
# resolve MSAA
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
# Motion Blur
if self.postprocess.is_render_motion_blur:
backbuffer_copy = self.rendertarget_manager.get_temporary('backbuffer_copy', RenderTargets.BACKBUFFER)
self.framebuffer_manager.bind_framebuffer(backbuffer_copy)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_motion_blur(RenderTargets.VELOCITY, RenderTargets.BACKBUFFER)
# copy to backbuffer
src_framebuffer = self.framebuffer_manager.get_framebuffer(backbuffer_copy)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
def render_log(self):
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
self.font_manager.render_log(self.viewport.width, self.viewport.height)
def render_text(self, text_render_data, offset_x, offset_y, canvas_width, canvas_height):
if 0 < text_render_data.render_count:
self.font_shader.use_program()
self.font_shader.bind_material_instance()
self.font_shader.bind_uniform_data("texture_font", text_render_data.font_data.texture)
self.font_shader.bind_uniform_data("font_size", text_render_data.font_size)
self.font_shader.bind_uniform_data("offset", (offset_x, offset_y))
self.font_shader.bind_uniform_data("inv_canvas_size", (1.0 / canvas_width, 1.0 / canvas_height))
self.font_shader.bind_uniform_data("count_of_side", text_render_data.font_data.count_of_side)
self.postprocess.draw_elements_instanced(text_render_data.render_count, self.font_instance_buffer, [text_render_data.render_queue, ])
def render_axis(self):
camera = self.scene_manager.main_camera
line_thickness = 2.0
line_length = 100.0
line_size = Float2(line_length / self.core_manager.game_backend.width, line_length / self.core_manager.game_backend.height)
line_offset = line_size - 1.0
self.debug_line_manager.draw_debug_line_2d(line_offset, line_offset + camera.view_origin[2][0:2] * line_size, color=Float4(0.0, 0.0, 1.0, 1.0), width=line_thickness)
self.debug_line_manager.draw_debug_line_2d(line_offset, line_offset + camera.view_origin[1][0:2] * line_size, color=Float4(0.0, 1.0, 0.0, 1.0), width=line_thickness)
self.debug_line_manager.draw_debug_line_2d(line_offset, line_offset + camera.view_origin[0][0:2] * line_size, color=Float4(1.0, 0.0, 0.0, 1.0), width=line_thickness)
def render_scene(self):
main_camera = self.scene_manager.main_camera
# bind scene constants uniform blocks
self.bind_uniform_blocks()
self.set_blend_state(False)
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST)
glPolygonMode(GL_FRONT_AND_BACK, self.view_mode)
# glEnable(GL_FRAMEBUFFER_SRGB)
glEnable(GL_MULTISAMPLE)
glEnable(GL_TEXTURE_CUBE_MAP_SEAMLESS)
glDepthFunc(GL_LEQUAL)
glEnable(GL_CULL_FACE)
glFrontFace(GL_CCW)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
glClearColor(0.0, 0.0, 0.0, 1.0)
glClearDepth(1.0)
if self.postprocess.is_render_shader() and not RenderOption.RENDER_LIGHT_PROBE:
""" debug shader """
self.set_blend_state(False)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_material_instance()
elif RenderOption.RENDER_ONLY_ATMOSPHERE and RenderOption.RENDER_LIGHT_PROBE:
""" render light probe preprocess """
self.framebuffer_manager.bind_framebuffer(RenderTargets.COMPOSITE_SHADOWMAP)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.bind_framebuffer(RenderTargets.WORLD_NORMAL, depth_texture=RenderTargets.DEPTH)
glClearColor(0.0, 1.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.framebuffer_manager.bind_framebuffer(RenderTargets.LINEAR_DEPTH)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_linear_depth(RenderTargets.DEPTH, RenderTargets.LINEAR_DEPTH)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
glClearColor(0.0, 0.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
# render atmosphere
if self.scene_manager.atmosphere.is_render_atmosphere:
self.scene_manager.atmosphere.render_precomputed_atmosphere(RenderTargets.LINEAR_DEPTH,
RenderTargets.COMPOSITE_SHADOWMAP,
RenderOption.RENDER_LIGHT_PROBE)
# done render light probe preprocess
return
else:
""" render normal scene """
self.scene_manager.ocean.simulateFFTWaves()
# render gbuffer & preprocess
camera = self.scene_manager.main_camera
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = camera.view_projection_jitter
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = camera.prev_view_projection_jitter
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
self.render_gbuffer()
self.render_preprocess()
self.render_shadow()
# render solid
camera = self.scene_manager.main_camera
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = camera.view_projection_jitter
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = camera.prev_view_projection_jitter
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
glFrontFace(GL_CCW)
glDepthMask(False) # cause depth prepass and gbuffer
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR, depth_texture=RenderTargets.DEPTH)
glClear(GL_COLOR_BUFFER_BIT)
self.render_solid()
# copy HDR Target
src_framebuffer = self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
dst_framebuffer = self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR_TEMP)
glClear(GL_COLOR_BUFFER_BIT)
dst_framebuffer.copy_framebuffer(src_framebuffer)
src_framebuffer.bind_framebuffer()
# set common projection matrix
camera = self.scene_manager.main_camera
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = camera.view_projection
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = camera.prev_view_projection
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
# render ocean
if self.scene_manager.ocean.is_render_ocean:
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR, depth_texture=RenderTargets.DEPTH)
glDisable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
self.scene_manager.ocean.render_ocean(atmosphere=self.scene_manager.atmosphere,
texture_scene=RenderTargets.HDR_TEMP,
texture_linear_depth=RenderTargets.LINEAR_DEPTH,
texture_probe=RenderTargets.LIGHT_PROBE_ATMOSPHERE,
texture_shadow=RenderTargets.COMPOSITE_SHADOWMAP)
# re copy Linear depth
self.framebuffer_manager.bind_framebuffer(RenderTargets.LINEAR_DEPTH)
self.postprocess.render_linear_depth(RenderTargets.DEPTH, RenderTargets.LINEAR_DEPTH)
# render atmosphere
if self.scene_manager.atmosphere.is_render_atmosphere:
self.framebuffer_manager.bind_framebuffer(RenderTargets.ATMOSPHERE,
RenderTargets.ATMOSPHERE_INSCATTER)
self.scene_manager.atmosphere.render_precomputed_atmosphere(RenderTargets.LINEAR_DEPTH,
RenderTargets.COMPOSITE_SHADOWMAP,
RenderOption.RENDER_LIGHT_PROBE)
glEnable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
glDepthMask(False)
# Composite Atmosphere
if self.scene_manager.atmosphere.is_render_atmosphere:
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
self.set_blend_state(True, GL_FUNC_ADD, GL_ONE, GL_ONE_MINUS_SRC_ALPHA)
composite_atmosphere = self.resource_manager.get_material_instance("precomputed_atmosphere.composite_atmosphere")
composite_atmosphere.use_program()
above_the_cloud = self.scene_manager.atmosphere.cloud_altitude < main_camera.transform.get_pos()[1]
composite_atmosphere.bind_uniform_data("above_the_cloud", above_the_cloud)
composite_atmosphere.bind_uniform_data("inscatter_power", self.scene_manager.atmosphere.inscatter_power)
composite_atmosphere.bind_uniform_data("texture_atmosphere", RenderTargets.ATMOSPHERE)
composite_atmosphere.bind_uniform_data("texture_inscatter", RenderTargets.ATMOSPHERE_INSCATTER)
composite_atmosphere.bind_uniform_data("texture_linear_depth", RenderTargets.LINEAR_DEPTH)
self.postprocess.draw_elements()
# prepare translucent
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR, depth_texture=RenderTargets.DEPTH)
glEnable(GL_DEPTH_TEST)
# Translucent
self.render_translucent()
# render particle
if RenderOption.RENDER_EFFECT:
glDisable(GL_CULL_FACE)
glEnable(GL_BLEND)
self.render_effect()
glDisable(GL_BLEND)
glEnable(GL_CULL_FACE)
# render probe done
if RenderOption.RENDER_LIGHT_PROBE:
return
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
self.set_blend_state(False)
self.render_postprocess()
if RenderOption.RENDER_OBJECT_ID:
self.render_object_id()
self.render_selected_object()
# debug render target
if self.debug_texture is not None:
self.set_blend_state(False)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_texture(self.debug_texture)
if RenderOption.RENDER_FONT:
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.render_log()
if RenderOption.RENDER_DEBUG_LINE and self.debug_texture is None:
# render world axis
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER, depth_texture=RenderTargets.DEPTH)
self.render_axis()
self.debug_line_manager.bind_render_spline_program()
for spline in self.scene_manager.splines:
self.debug_line_manager.render_spline(spline)
self.debug_line_manager.render_debug_lines()
if RenderOption.RENDER_GIZMO and self.debug_texture is None:
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER, depth_texture=RenderTargets.DEPTH)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
# render spline gizmo
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.GIZMO,
self.scene_manager.spline_gizmo_render_infos,
self.render_color_material)
# render transform axis gizmo
glClear(GL_DEPTH_BUFFER_BIT)
self.render_axis_gizmo(RenderMode.GIZMO)
| ubuntunux/PyEngine3D | PyEngine3D/Render/Renderer.py | Python | bsd-2-clause | 60,697 |
#!/usr/bin/python
########################################################################
# Copyright (c) 2017
# Daniel Plohmann <daniel.plohmann<at>mailbox<dot>org>
# All rights reserved.
########################################################################
#
# This file is part of apiscout
#
# apiscout is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import argparse
import json
import logging
from operator import attrgetter
import os
import re
import sys
import platform
import ctypes
import pefile
import config
from ThreadedCommand import ThreadedCommand
LOG = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG, format="%(asctime)-15s %(message)s")
def get_system_info():
platform_info = platform.uname()
version_info = sys.getwindowsversion()
if sys.version > '3':
os_name = "%s %s %s (%s)" % (platform_info.system, platform_info.release, version_info.service_pack, platform_info.machine)
os_version = platform_info.version
else:
os_name = "%s %s %s (%s)" % (platform_info[0], platform_info[2], version_info[4], platform_info[4])
os_version = platform_info[3]
return os_name, os_version
# courtesy of http://stackoverflow.com/a/16076661
def loword(dword):
return dword & 0x0000ffff
def hiword(dword):
return dword >> 16
def get_product_version(pe):
try:
ms = pe.VS_FIXEDFILEINFO.ProductVersionMS
ls = pe.VS_FIXEDFILEINFO.ProductVersionLS
return "%d.%d.%d.%d" % (hiword(ms), loword(ms), hiword(ls), loword(ls))
except AttributeError:
return "0.0.0.0"
def check_aslr():
# first check for a potentially rebased user32.dll
from ctypes import windll
from ctypes import wintypes
check_dlls = ["user32.dll", "kernel32.dll", "ntdll.dll"]
offsets = []
is_aslr = False
windll.kernel32.GetModuleHandleW.restype = wintypes.HMODULE
windll.kernel32.GetModuleHandleW.argtypes = [wintypes.LPCWSTR]
windll.kernel32.GetModuleFileNameW.restype = wintypes.DWORD
windll.kernel32.GetModuleFileNameW.argtypes = [wintypes.HANDLE, wintypes.LPWSTR, wintypes.DWORD]
for dll_name in check_dlls:
h_module_base = windll.kernel32.GetModuleHandleW(dll_name)
# next get the module's file path
module_path = ctypes.create_unicode_buffer(255)
windll.kernel32.GetModuleFileNameW(h_module_base, module_path, 255)
# then the ImageBase from python.exe file
pe = pefile.PE(module_path.value)
pe_header_base_addr = pe.OPTIONAL_HEADER.ImageBase
offsets.append(pe_header_base_addr - h_module_base)
for dll_name, offset in zip(check_dlls, offsets):
LOG.debug("Memory vs. File ImageBase offset (%s): 0x%x", dll_name, offset)
is_aslr |= offset != 0
return is_aslr
class DatabaseBuilder(object):
def _extractPeExports(self, filepath):
try:
pe = pefile.PE(filepath)
if hasattr(pe, "DIRECTORY_ENTRY_EXPORT"):
dll_entry = {}
dll_entry["base_address"] = pe.OPTIONAL_HEADER.ImageBase
dll_entry["bitness"] = 32 if pe.FILE_HEADER.Machine == 0x14c else 64
dll_entry["version"] = get_product_version(pe)
dll_entry["filepath"] = filepath
dll_entry["aslr_offset"] = 0
dll_entry["exports"] = []
min_addr = sys.maxsize
max_addr = 0
for exp in sorted(pe.DIRECTORY_ENTRY_EXPORT.symbols, key=attrgetter("address")):
export_info = {}
min_addr = min(pe.OPTIONAL_HEADER.ImageBase + exp.address, min_addr)
max_addr = max(pe.OPTIONAL_HEADER.ImageBase + exp.address, max_addr)
export_info["address"] = exp.address
if exp.name == None:
export_info["name"] = "None"
else:
export_info["name"] = exp.name.decode("utf-8")
export_info["ordinal"] = exp.ordinal
dll_entry["exports"].append(export_info)
return dll_entry
except Exception as exc:
return None
def _buildDllKey(self, dll_info):
filename = os.path.basename(dll_info["filepath"])
return "{}_{}_{}_0x{:x}".format(dll_info["bitness"], dll_info["version"], filename, dll_info["base_address"])
def _isInFilter(self, target_dll, filter_dlls):
# since we want to maintain compatibility with Python 2.7, we can't casefold - upper+lower should suffice though.
for check_dll in filter_dlls:
if target_dll.upper().lower() == check_dll.upper().lower():
return True
return False
def extractRecursively(self, paths, filter_dlls=False):
api_count = 0
pe_count = 0
duplicate_count = 0
skipped_count = 0
num_hit_dlls = 0
api_db = {"dlls": {}}
if paths is None:
paths = config.DEFAULT_FOLDERS
for base in paths:
if not os.path.isdir(base):
LOG.warn("%s is not a directory, skipping...", base)
continue
for root, _, files in os.walk(base):
for fn in files:
if filter_dlls and not self._isInFilter(fn, config.DLL_FILTER):
skipped_count += 1
continue
elif not (fn.lower().endswith(".dll") or fn.lower().endswith(".drv") or fn.lower().endswith(".mui")):
continue
pe_count += 1
LOG.info("processing: %s %s", root, fn)
dll_summary = self._extractPeExports(root + os.sep + fn)
if dll_summary is not None:
dll_key = self._buildDllKey(dll_summary)
if dll_key not in api_db["dlls"]:
api_db["dlls"][dll_key] = dll_summary
num_hit_dlls += 1
api_count += len(dll_summary["exports"])
LOG.info("APIs: %d", len(dll_summary["exports"]))
else:
duplicate_count += 1
LOG.info("PEs examined: %d (%d duplicates, %d skipped)", pe_count, duplicate_count, skipped_count)
LOG.info("Successfully evaluated %d DLLs with %d APIs", num_hit_dlls, api_count)
api_db["os_name"], api_db["os_version"] = get_system_info()
api_db["aslr_offsets"] = False
api_db["num_dlls"] = num_hit_dlls
api_db["num_apis"] = api_count
api_db["crawled_paths"] = paths
api_db["filtered"] = filter_dlls
return api_db
def extractAslrOffsets(self, api_db):
LOG.info("Now check for ASLR...")
if check_aslr():
LOG.info(" looks like ASLR is active, let's extract some offsets!")
num_offsets = {32: 0, 64: 0}
for dll_key in api_db["dlls"]:
dll = api_db["dlls"][dll_key]
if dll["bitness"] in [32, 64]:
offset = self.getAslrOffsetForDll(dll)
dll["aslr_offset"] = offset
if offset:
num_offsets[dll["bitness"]] += 1
LOG.info("Found %d 32bit and %d 64bit ASLR offsets.", num_offsets[32], num_offsets[64])
api_db["aslr_offsets"] = True
return api_db
def getAslrOffsetForDll(self, dll_entry):
this_file = str(os.path.abspath(__file__))
basechecker = "DllBaseChecker{}.exe".format(dll_entry["bitness"])
basechecker_path = os.path.abspath(os.sep.join([this_file, "..", "DllBaseChecker", basechecker]))
cmds = [basechecker_path, dll_entry["filepath"]]
threaded_basecheck = ThreadedCommand(cmds)
result = threaded_basecheck.run(10)
load_address = 0
aslr_offset = 0
if result["std_out"] and result["std_out"].startswith(b"DLL loaded at: 0x"):
load_address = int(result["std_out"][15:], 16)
if load_address:
aslr_offset = dll_entry["base_address"] - load_address
else:
LOG.warning("Could not get a load address for %s, ASLR offset left as 0.", dll_entry["filepath"])
return aslr_offset
def persistApiDb(self, api_db, filepath=None):
if filepath is None:
filtered = "_filtered" if api_db["filtered"] else ""
filepath = "." + os.sep + ".".join(api_db["os_version"].split(".")[:2]) + filtered + ".json"
if not filepath.endswith(".json"):
filepath += ".json"
with open(filepath, "w") as f_out:
f_out.write(json.dumps(api_db, indent=1, sort_keys=True))
def main():
parser = argparse.ArgumentParser(description='Build a database to be used by apiscout.')
parser.add_argument('--filter', dest='filter_dlls', action='store_true',
help='(optional) filter DLLs by name (see config.py)')
parser.add_argument('--auto', dest='auto', action='store_true',
help='Use default configuration (filtered DLLs from preconfigured paths (see config.py) and extract ASLR offsets.')
parser.add_argument('--paths', metavar='P', type=str, nargs='+', default=None,
help='the paths to recursively crawl for DLLs (None -> use default, see config.py).')
parser.add_argument('--outfile', dest='output_file', type=str, default=None,
help='(optional) filepath where to put the resulting API DB file.')
parser.add_argument('--ignore_aslr', dest='ignore_aslr', action='store_true',
help='Do not perform extraction of ASLR offsets.')
parser.add_argument('--aslr_check', dest='aslr_check', action='store_true',
help='Only show ASLR offset.')
args = parser.parse_args()
builder = DatabaseBuilder()
if args.aslr_check:
print("OS has ASLR offsets: {}".format(check_aslr()))
elif args.auto:
api_db = builder.extractRecursively(None, True)
api_db = builder.extractAslrOffsets(api_db)
builder.persistApiDb(api_db, args.output_file)
elif args.paths:
api_db = builder.extractRecursively(args.paths, args.filter_dlls)
if not args.ignore_aslr:
api_db = builder.extractAslrOffsets(api_db)
builder.persistApiDb(api_db, args.output_file)
else:
parser.print_help()
if __name__ == "__main__":
sys.exit(main())
| danielplohmann/apiscout | apiscout/db_builder/DatabaseBuilder.py | Python | bsd-2-clause | 11,496 |
from typing import Dict, List, NamedTuple
import pykube.objects
import requests
import pendulum
from urllib.parse import urlparse
from boto import ec2
from ..context import Context
from k8s_snapshots.snapshot import Snapshot
from .abstract import NewSnapshotIdentifier, SnapshotStatus
from ..errors import SnapshotCreateError
def validate_config(config):
"""Ensure the config of this backend is correct.
manual volumes are validated by the backend
- for aws, google cloud, need different data, say, region or zone.
"""
pass
def supports_volume(volume: pykube.objects.PersistentVolume):
return bool(volume.obj['spec'].get('awsElasticBlockStore'))
class AWSDiskIdentifier(NamedTuple):
region: str
volume_id: str
def get_current_region(ctx):
"""Get the current region from the metadata service.
"""
if not ctx.config['aws_region']:
response = requests.get(
'http://169.254.169.254/latest/meta-data/placement/availability-zone',
timeout=5)
response.raise_for_status()
ctx.config['aws_region'] = response.text[:-1]
return ctx.config['aws_region']
def get_disk_identifier(volume: pykube.objects.PersistentVolume):
volume_url = volume.obj['spec'].get('awsElasticBlockStore')['volumeID']
if volume_url.startswith('aws://'):
# An url such as aws://eu-west-1a/vol-00292b2da3d4ed1e4
parts = urlparse(volume_url)
zone = parts.netloc
volume_id = parts.path[1:]
return AWSDiskIdentifier(region=zone[:-1], volume_id=volume_id)
else:
# Older versions of kube just put the volume id in the volume id field.
volume_id = volume_url
region = volume.obj['metadata']['labels']['failure-domain.beta.kubernetes.io/region']
return AWSDiskIdentifier(region=region, volume_id=volume_id)
def parse_timestamp(date_str: str) -> pendulum.Pendulum:
return pendulum.parse(date_str).in_timezone('utc')
def validate_disk_identifier(disk_id: Dict):
try:
return AWSDiskIdentifier(
region=disk_id['region'],
volume_id=disk_id['volumeId']
)
except:
raise ValueError(disk_id)
# AWS can filter by volume-id, which means we wouldn't have to match in Python.
# In any case, it might be easier to let the backend handle the matching. Then
# it relies less on the DiskIdentifier object always matching.
#filters={'volume-id': volume.id}
def load_snapshots(ctx: Context, label_filters: Dict[str, str]) -> List[Snapshot]:
connection = get_connection(ctx, region=get_current_region(ctx))
snapshots = connection.get_all_snapshots(
owner='self',
filters={f'tag:{k}': v for k, v in label_filters.items()}
)
return list(map(lambda snapshot: Snapshot(
name=snapshot.id,
created_at=parse_timestamp(snapshot.start_time),
disk=AWSDiskIdentifier(
volume_id=snapshot.volume_id,
region=snapshot.region.name
)
), snapshots))
def create_snapshot(
ctx: Context,
disk: AWSDiskIdentifier,
snapshot_name: str,
snapshot_description: str
) -> NewSnapshotIdentifier:
connection = get_connection(ctx, disk.region)
# TODO: Seems like the API doesn't actually allow us to set a snapshot
# name, although it's possible in the UI.
snapshot = connection.create_snapshot(
disk.volume_id,
description=snapshot_name
)
return {
'id': snapshot.id,
'region': snapshot.region.name
}
def get_snapshot_status(
ctx: Context,
snapshot_identifier: NewSnapshotIdentifier
) -> SnapshotStatus:
connection = get_connection(ctx, snapshot_identifier['region'])
snapshots = connection.get_all_snapshots(
[snapshot_identifier['id']]
)
snapshot = snapshots[0]
# Can be pending | completed | error
if snapshot.status == 'pending':
return SnapshotStatus.PENDING
elif snapshot.status == 'completed':
return SnapshotStatus.COMPLETE
elif snapshot.status == 'error':
raise SnapshotCreateError(snapshot['status'])
else:
raise NotImplementedError()
def set_snapshot_labels(
ctx: Context,
snapshot_identifier: NewSnapshotIdentifier,
labels: Dict
):
connection = get_connection(ctx, snapshot_identifier['region'])
connection.create_tags([snapshot_identifier['id']], labels)
def delete_snapshot(
ctx: Context,
snapshot: Snapshot
):
connection = get_connection(ctx, snapshot.disk.region)
connection.delete_snapshot(snapshot.name)
def get_connection(ctx: Context, region):
connection = ec2.connect_to_region(region)
return connection
| EQTPartners/k8s-snapshots | k8s_snapshots/backends/aws.py | Python | bsd-2-clause | 4,710 |
from urlparse import urljoin
from django.conf import settings
def trailing_slash_or_none():
"""
Return a slash or empty string based on tastypie setting
"""
if getattr(settings, 'TASTYPIE_ALLOW_MISSING_SLASH', False):
return ''
return '/'
def urljoin_forced(base, path, **kwargs):
"""
urljoin base with path, except append '/' to base if it doesnt exist
"""
base = base.endswith('/') and base or '%s/' % base
return urljoin(base, path, **kwargs)
| benthomasson/django-tastypie-swagger | tastypie_swagger/utils.py | Python | bsd-2-clause | 499 |
import pytest
from webtest import TestApp
from pyramid.config import Configurator
from pyramid.testing import DummyRequest
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.authentication import AuthTktAuthenticationPolicy
def make_app(config):
return TestApp(config.make_wsgi_app())
@pytest.mark.parametrize('method', ['delete', 'get', 'post', 'patch', 'put'])
def test_unallowed_method_added(method):
config = Configurator()
config.scan('resource_only')
app = make_app(config)
getattr(app, method)('/', status=405)
def test_default_options_method():
config = Configurator()
config.scan('resource_only')
app = make_app(config)
response = app.options('/')
assert response.headers['Access-Control-Allow-Methods'] == 'OPTIONS'
def test_request_add_get_view():
config = Configurator()
config.scan('resource_get')
app = make_app(config)
app.get('/')
def test_request_default_to_json_renderer():
config = Configurator()
config.scan('resource_get')
app = make_app(config)
r = app.get('/')
assert r.content_type == 'application/json'
assert r.json == {'message': 'hello'}
def test_request_override_renderer():
config = Configurator()
config.scan('resource_get_renderer')
app = make_app(config)
r = app.get('/')
assert r.content_type == 'text/plain'
assert r.unicode_body == 'hello'
def test_add_controller():
config = Configurator()
config.scan('controller')
app = make_app(config)
app.post('/engage')
def test_nested_controller():
# Test for https://github.com/wichert/rest_toolkit/issues/12
config = Configurator()
config.scan('controller')
app = make_app(config)
app.post('/resource/engage')
def test_controller_default_to_json_renderer():
config = Configurator()
config.scan('controller')
app = make_app(config)
r = app.post('/engage')
assert r.content_type == 'application/json'
assert r.json == {'message': 'Ai ai captain'}
def test_set_controller_method():
config = Configurator()
config.scan('controller')
app = make_app(config)
r = app.get('/engage')
assert r.json == {'message': 'Warp engine offline'}
@pytest.mark.parametrize('method', ['delete', 'get', 'patch', 'put'])
def test_controller_invalid_method(method):
config = Configurator()
config.scan('controller')
app = make_app(config)
getattr(app, method)('/', status=405)
def test_default_get_view():
config = Configurator()
config.scan('resource_abc')
app = make_app(config)
r = app.get('/')
assert r.json == {'message': 'Hello, world'}
def test_override_default_view():
config = Configurator()
config.scan('resource_abc_override')
app = make_app(config)
r = app.get('/')
assert r.json == {'message': 'Welcome'}
def test_set_resource_route_name():
config = Configurator()
config.scan('resource_route_name')
config.make_wsgi_app()
request = DummyRequest()
request.registry = config.registry
assert request.route_path('user', id=15) == '/users/15'
def test_secured_default_view_not_allowed():
config = Configurator()
config.set_authentication_policy(AuthTktAuthenticationPolicy('seekrit'))
config.set_authorization_policy(ACLAuthorizationPolicy())
config.scan('resource_abc')
app = make_app(config)
app.get('/secure', status=403)
def test_secured_default_view_allowed():
config = Configurator()
config.testing_securitypolicy(1)
config.scan('resource_abc')
app = make_app(config)
app.get('/secure')
| wichert/rest_toolkit | tests/test_resource.py | Python | bsd-2-clause | 3,615 |
# -*- coding: utf-8 -*-
import sys
import os
from recommonmark.parser import CommonMarkParser
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
source_parsers = {
'.md': CommonMarkParser,
}
source_suffix = ['.rst', '.md']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Frank'
copyright = u'2017, Kyle Fuller'
author = u'Kyle Fuller'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
html_sidebars = {
'index': ['sidebar_intro.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html', 'searchbox.html'],
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
html_show_sourcelink = True
html_show_sphinx = False
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Frankdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Frank.tex', u'Frank Documentation',
u'Kyle Fuller', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'frank', u'Frank Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Frank', u'Frank Documentation',
author, 'Frank', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| nestproject/Frank | docs/conf.py | Python | bsd-2-clause | 8,306 |
import numpy as np
''' Belief Propagation on tree'''
class Node:
children = []
parent = -1
pos = -1
index = -1
depth = 0
msgsDown = {}
msgUp = -1
psi = -1
belief = -1
def __init__(self, parent_=-1, index_=-1, children_=[], pos_=-1, depth_=0):
self.index = index_
self.pos = pos_
self.parent = parent_
self.children = []
self.depth = depth_
if len(children_) > 1:
for i in children_:
if self.parent == -1 or i != self.parent.index:
# print self.index, i
self.children.append(Node(index_=i, parent_=self, children_=edgeDict[i], pos_=regionLabels[i][1], depth_=self.depth+1))
self.msgsDown[i] = -1
self.msgUp = -1; self.psi = -1; self.belief = -1
def getLeaves(self, leaves=set()):
if self.children == []:
leaves.add(self)
else:
for c in self.children:
l = c.getLeaves(leaves)
if self.parent == -1:
return list(leaves)
else:
return leaves
def calcPsi(self, hypothesis):
#psi is the unary potentials/messages
# hypothesis = guessPose
self.psi = np.empty([np.shape(hypothesis)[0]])
for i in xrange(np.shape(hypothesis)[0]):
self.psi[i] = np.sqrt(np.sum((hypothesis[i] - self.pos)**2))
# print '1', self.psi
self.psi = (np.sum(self.psi) - self.psi)/np.sum(self.psi)
self.psi /= np.sum(self.psi)
# print '4', self.psi
def setPsi(self, psi):
pass
def calcMessageUp(self):
for kid in self.children:
if np.all(kid.msgUp == -1):
return
tmpMsg = 1
# print '-'
tmpMsg *= self.psi
# print self.psi
if self.children != []:
for kid in self.children:
# print np.asarray(kid.msgUp).T[0], tmpMsg
# tmpMsg *= np.asarray(kid.msgUp).T[0]
# tmpMsg = np.sum(np.asmatrix(tmpMsg).T*kid.msgUp.T, 0)
tmpMsg = tmpMsg*kid.msgUp
tmpMsg /= np.sum(tmpMsg)
# print tmpMsg, np.asarray(kid.msgUp).T[0]
# pdb.set_trace()
# pdb.set_trace()
self.msgUp = np.array(transitionMatrix*np.asmatrix(tmpMsg).T).T
self.msgUp /= np.sum(self.msgUp)
# print self.msgUp, np.asmatrix(tmpMsg).T
def calcMessagesDown(self):
for c, kid in zip(range(len(self.children)), self.children):
tmpMsg = 1
tmpMsg *= self.psi
if self.parent != -1:
for m in self.parent.msgsDown.keys():
if m == self.index:
# tmpMsg = np.sum(np.asmatrix(tmpMsg).T*self.parent.msgsDown[m].T, 0)
tmpMsg = tmpMsg*self.parent.msgsDown[m]
tmpMsg /= np.sum(tmpMsg)
break
for c2_i, kid2 in zip(range(len(self.children)), self.children):
if kid != kid2:
# pdb.set_trace()
# tmpMsg *= np.array(self.children[c2_i].msgUp).T[0]
# tmpMsg = np.sum(np.asmatrix(tmpMsg).T*kid.msgUp.T, 0)
tmpMsg = tmpMsg*kid.msgUp
tmpMsg /= np.sum(tmpMsg)
self.msgsDown[kid.index] = np.array(transitionMatrix*np.asmatrix(tmpMsg).T).T
self.msgsDown[kid.index] /= np.sum(self.msgsDown[kid.index])
# pdb.set_trace()
# print np.array(transitionMatrix*np.asmatrix(tmpMsg).T).T
def calcBelief(self):
self.belief = 1
self.belief *= self.psi
if self.parent != -1:
for c in self.parent.msgsDown.keys():
if c == self.index:
# self.belief *= np.array(self.parent.msgsDown[c]).T[0]
self.belief *= self.parent.msgsDown[c][0]
break
if self.children != []:
for kid in self.children:
if np.any(kid.msgUp == -1):
self.belief = -1
break
else:
# self.belief *= np.asarray(kid.msgUp).T[0]
self.belief *= kid.msgUp[0]
if np.all(self.belief >= 0):
self.belief /= np.sum(self.belief)
def updateLeaves(self):
self.calcMessagesDown()
self.calcBelief()
if self.children != []:
for c in self.children:
c.updateLeaves()
def reset(self):
self.msgUp = -1
self.psi = -1
self.belief = -1
self.msgsDown = {}
if self.children != []:
for c in self.children:
c.reset()
def calcTotalBelief(self):
if self.children == []:
return np.max(self.belief)
else:
belief = 1
for kids in self.children:
belief *= kids.calcTotalBelief()
return belief
def calcAll(self, hypothesis):
if np.any(self.belief >= 0):
self.reset()
leaves = self.getLeaves()
oldLeaves = set()
while np.any(self.belief < 0):
newLeaves = set()
for l in leaves:
l.calcPsi(hypothesis)
l.calcMessageUp()
if np.any(l.msgUp != -1):
oldLeaves.add(l)
if np.any(l.parent != -1) and l.parent not in oldLeaves:
newLeaves.add(l.parent)
leaves = newLeaves
self.calcBelief()
self.calcMessagesDown()
# self.calcBelief()
for c in self.children:
c.updateLeaves()
def drawClass(self):
pt1 = (regionLabels[self.index][2][1],regionLabels[self.index][2][0])
color_ = int(np.argmax(self.belief)*30)
cv2.circle(imLines, pt1, radius=10, color=color_, thickness=1)
def drawAll(self):
self.drawClass()
for kid in self.children:
kid.drawAll()
| colincsl/pyKinectTools | pyKinectTools/algs/SkeletonBeliefPropagation.py | Python | bsd-2-clause | 4,811 |
# Copyright 2014 Dietrich Epp.
# This file is part of SGLib. SGLib is licensed under the terms of the
# 2-clause BSD license. For more information, see LICENSE.txt.
import uuid as uuid_module
import xml.etree.ElementTree as etree
from ..util import indent_xml
from ..error import ConfigError
import io
import os
Element = etree.Element
SubElement = etree.SubElement
XMLNS = 'http://schemas.microsoft.com/developer/msbuild/2003'
def condition(variant):
return "'$(Configuration)|$(Platform)'=='{}'".format(variant)
SOURCE_TYPES = {kk: (n, v) for n, (k, v) in enumerate([
('c c++', 'ClCompile'),
('h h++', 'ClInclude'),
('rc', 'ResourceCompile'),
('vcxproj', 'ProjectReference'),
]) for kk in k.split()}
def proj_import(root, path):
SubElement(root, 'Import', {'Project': path})
def emit_properties(*, element, props, var=None):
for k, v in sorted(props.items()):
if isinstance(v, list):
assert var is not None
vs = '{};{}({})'.format(';'.join(v), var, k)
elif isinstance(v, bool):
vs = str(v).lower()
elif isinstance(v, str):
vs = v
else:
raise TypeError('unexpected property type: {}'.format(type(v)))
SubElement(element, k).text = vs
TYPE_CPP = uuid_module.UUID('8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942')
class Project(object):
"""A Visual Studio project."""
__slots__ = [
# The project name.
'name',
# Path to the project file.
'path',
# The project type UUID.
'type',
# The project UUID.
'uuid',
# Map from solution (config, platform) to project (config, platform).
'configs',
# List of project dependencies (other projects).
'dependencies',
]
@property
def sourcetype(self):
return 'vcxproj'
def emit(self):
"""Emit project files if necessary."""
class UserProject(Project):
"""A user-generated Visual Studio project."""
__slots__ = [
# Contents of the project file.
'_data_project',
# Contents of the filter file.
'_data_filter',
# Contents of the user file.
'_data_user',
]
def emit(self):
"""Emit project files if necessary."""
with open(self.name + '.vcxproj', 'wb') as fp:
fp.write(self._data_project)
with open(self.name + '.vcxproj.filters', 'wb') as fp:
fp.write(self._data_filter)
with open(self.name + '.vcxproj.user', 'wb') as fp:
fp.write(self._data_user)
def read_project(*, path, configs):
"""Read a Visual Studio project."""
if os.path.splitext(path)[1] != '.vcxproj':
raise UserError('invalid Visual Studio project extension')
with open(path, 'rb') as fp:
doc = etree.parse(fp)
def get_uuid():
gtag = etree.QName(XMLNS, 'PropertyGroup')
ptag = etree.QName(XMLNS, 'ProjectGuid')
for gelem in doc.getroot():
if gelem.tag == gtag:
for pelem in gelem:
if pelem.tag == ptag:
return uuid_module.UUID(pelem.text)
raise ConfigError('could not detect project UUID: {}'.format(path))
def get_configs():
gtag = etree.QName(XMLNS, 'ItemGroup')
itag = etree.QName(XMLNS, 'ProjectConfiguration')
ctag = etree.QName(XMLNS, 'Configuration')
ptag = etree.QName(XMLNS, 'Platform')
configs = []
for gelem in doc.getroot():
if (gelem.tag != gtag or
gelem.attrib.get('Label') != 'ProjectConfigurations'):
continue
for ielem in gelem:
if ielem.tag != itag:
continue
cfg = None
plat = None
for pelem in ielem:
if pelem.tag == ctag:
cfg = pelem.text
elif pelem.tag == ptag:
plat = pelem.text
if cfg is None or plat is None:
raise ConfigError(
'could not parse project configurations')
configs.append((cfg, plat))
return configs
obj = Project()
obj.name = os.path.splitext(os.path.basename(path))[0]
obj.path = path
obj.type = TYPE_CPP
obj.uuid = get_uuid()
obj.configs = configs
obj.dependencies = []
return obj
def xml_data(root):
indent_xml(root)
return etree.tostring(root, encoding='UTF-8')
def create_project(*, name, sources, uuid, variants, props, arguments):
"""Create a Visual Studio project.
name: the project name.
sources: list of source files in the project.
uuid: the project UUID.
variants: list of "config|arch" variants.
props: map from "config|arch" to map from group to prop dict.
arguments: default arguments for debugging.
"""
def create_project():
root = Element('Project', {
'xmlns': XMLNS,
'ToolsVersion': '12.0',
'DefaultTargets': 'Build',
})
cfgs = SubElement(
root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
for variant in variants:
pc = SubElement(
cfgs, 'ProjectConfiguration', {'Include': variant})
configuration, platform = variant.split('|')
SubElement(pc, 'Configuration').text = configuration
SubElement(pc, 'Platform').text = platform
del cfgs, variant, configuration, platform, pc
pg = SubElement(root, 'PropertyGroup', {'Label': 'Globals'})
SubElement(pg, 'Keyword').text = 'Win32Proj'
SubElement(pg, 'ProjectGuid').text = \
'{{{}}}'.format(str(uuid).upper())
# RootNamespace
del pg
proj_import(root, '$(VCTargetsPath)\\Microsoft.Cpp.Default.props')
for variant in variants:
emit_properties(
element=SubElement(root, 'PropertyGroup', {
'Condition': condition(variant),
'Label': 'Configuration',
}),
props=props[variant]['Config'])
del variant
proj_import(root, '$(VCTargetsPath)\\Microsoft.Cpp.props')
SubElement(root, 'ImportGroup', {'Label': 'ExtensionSettings'})
for variant in variants:
ig = SubElement(root, 'ImportGroup', {
'Label': 'PropertySheets',
'Condition': condition(variant),
})
path = '$(UserRootDir)\\Microsoft.Cpp.$(Platform).user.props'
i = SubElement(ig, 'Import', {
'Project': path,
'Condition': "exists('{}')".format(path),
'Label': 'LocalAppDataPlatform',
})
del variant, ig, path, i
SubElement(root, 'PropertyGroup', {'Label': 'UserMacros'})
for variant in variants:
emit_properties(
element=SubElement(root, 'PropertyGroup', {
'Condition': condition(variant),
}),
props=props[variant]['VC'],
var='$')
del variant
for variant in variants:
ig = SubElement(root, 'ItemDefinitionGroup', {
'Condition': condition(variant),
})
for group in ('ClCompile', 'Link'):
emit_properties(
element=SubElement(ig, group),
props=props[variant][group],
var='%')
del variant, ig, group
groups = {}
for source in sources:
try:
index, tag = SOURCE_TYPES[source.sourcetype]
except KeyError:
raise ConfigError(
'cannot add file to executable: {}'.format(source.path))
try:
group = groups[index]
except KeyError:
group = Element('ItemGroup')
groups[index] = group
src = SubElement(group, tag, {'Include': source.path})
if tag == 'ProjectReference':
SubElement(src, 'Project').text = \
'{{{}}}'.format(str(source.uuid).upper())
del source, index, tag, group, src
for n, elt in sorted(groups.items()):
root.append(elt)
del n, elt
proj_import(root, '$(VCTargetsPath)\\Microsoft.Cpp.targets')
SubElement(root, 'ImportGroup', {'Label': 'ExtensionTargets'})
return root
def create_filter():
filters = set()
root = Element('Project', {
'xmlns': XMLNS,
'ToolsVersion': '12.0',
})
groups = {}
for source in sources:
index, tag = SOURCE_TYPES[source.sourcetype]
if tag == 'ProjectReference':
continue
try:
group = groups[index]
except KeyError:
group = Element('ItemGroup')
groups[index] = group
elt = SubElement(group, tag, {'Include': source.path})
dirname, basename = os.path.split(source.path)
if not dirname:
continue
filter = dirname
SubElement(elt, 'Filter').text = filter
while filter and filter not in filters:
filters.add(filter)
filter = os.path.dirname(filter)
for n, elt in sorted(groups.items()):
root.append(elt)
fgroup = SubElement(root, 'ItemGroup')
for filter in sorted(filters):
elt = SubElement(fgroup, 'Filter', {'Include': filter})
SubElement(elt, 'UniqueIdentifier').text = \
'{{{}}}'.format(str(uuid_module.uuid4()).upper())
return root
def convert_arg(arg):
return '"{}"'.format(arg)
def create_user():
root = Element('Project', {
'xmlns': XMLNS,
'ToolsVersion': '12.0',
})
args = ' '.join(convert_arg(arg) for arg in arguments)
for variant in variants:
pg = SubElement(root, 'PropertyGroup', {
'Condition': condition(variant),
})
SubElement(pg, 'LocalDebuggerCommandArguments').text = args
SubElement(pg, 'DebuggerFlavor').text = 'WindowsLocalDebugger'
epath = props[variant]['Debug'].get('Path', ())
if epath:
SubElement(pg, 'LocalDebuggerEnvironment').text = \
'PATH=%PATH%;' + ';'.join(epath)
return root
def create_object():
configs = set(x.split('|')[0] for x in variants)
obj = UserProject()
obj.name = name
obj.path = name + '.vcxproj'
obj.type = TYPE_CPP
obj.uuid = uuid
obj.configs = {c: c for c in configs}
obj.dependencies = [source for source in sources
if source.sourcetype == 'vcxproj']
obj._data_project = xml_data(create_project())
obj._data_filter = xml_data(create_filter())
obj._data_user = xml_data(create_user())
return obj
return create_object()
| depp/sglib | script/d3build/msvc/project.py | Python | bsd-2-clause | 11,200 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 12, transform = "None", sigma = 0.0, exog_count = 20, ar_order = 12); | antoinecarme/pyaf | tests/artificial/transf_None/trend_PolyTrend/cycle_12/ar_12/test_artificial_32_None_PolyTrend_12_12_20.py | Python | bsd-3-clause | 260 |
"""KrakenSystem - objects.kraken_core module.
Classes:
KrakenSystem - Class for constructing the Fabric Engine Core client.
"""
import logging
import os
import sys
import json
import importlib
from collections import OrderedDict
import FabricEngine.Core
# import kraken
from kraken.core.profiler import Profiler
from kraken.plugins import getFabricClient
from kraken.log import getLogger
from kraken.log.utils import fabricCallback
logger = getLogger('kraken')
class KrakenSystem(object):
"""The KrakenSystem is a singleton object used to provide an interface with
the FabricEngine Core and RTVal system."""
__instance = None
def __init__(self):
"""Initializes the Kraken System object."""
super(KrakenSystem, self).__init__()
self.client = None
self.typeDescs = None
self.registeredTypes = None
self.loadedExtensions = []
self.registeredConfigs = OrderedDict()
self.registeredComponents = OrderedDict()
# self.moduleImportManager = ModuleImportManager()
def loadCoreClient(self):
"""Loads the Fabric Engine Core Client"""
if self.client is None:
Profiler.getInstance().push("loadCoreClient")
client = getFabricClient()
if client is None:
options = {
'reportCallback': fabricCallback,
'guarded': True
}
client = FabricEngine.Core.createClient(options)
self.client = client
self.loadExtension('Math')
self.loadExtension('Kraken')
self.loadExtension('KrakenForCanvas')
Profiler.getInstance().pop()
def getCoreClient(self):
"""Returns the Fabric Engine Core Client owned by the KrakenSystem
Returns:
object: The Fabric Engine Core Client
"""
if self.client is None:
self.loadCoreClient()
return self.client
def loadExtension(self, extension):
"""Loads the given extension and updates the registeredTypes cache.
Args:
extension (str): The name of the extension to load.
"""
if extension not in self.loadedExtensions:
Profiler.getInstance().push("loadExtension:" + extension)
self.client.loadExtension(extension)
self.registeredTypes = self.client.RT.types
self.typeDescs = self.client.RT.getRegisteredTypes()
# Cache the loaded extension so that we aviod refreshing the typeDescs cache(costly)
self.loadedExtensions.append(extension)
Profiler.getInstance().pop()
# ==============
# RTVal Methods
# ==============
def convertFromRTVal(self, target, RTTypeName=None):
"""Generates an RTVal object based on the simple type of target
and passes target to constructor. Converts a property of an RTVal object
to its own pytholn RTVal object
Args:
target (RTVal): The RTVal object or property to cast
RTTypeName (str): The type of RTVal to convert to
Returns:
RTVal: The RTVal object
"""
self.loadCoreClient()
if RTTypeName is None:
RTTypeName = target.type('String').getSimpleType()
rtValType = getattr(self.client.RT.types, RTTypeName)
pythonRTVal = rtValType(target)
return pythonRTVal
def constructRTVal(self, dataType, defaultValue=None):
"""Constructs a new RTVal using the given name and optional devault value.
Args:
dataType (str): The name of the data type to construct.
defaultValue (value): The default value to use to initialize the RTVal
Returns:
object: The constructed RTval.
"""
self.loadCoreClient()
klType = getattr(self.registeredTypes, dataType)
if defaultValue is not None:
if hasattr(defaultValue, '_rtval'):
return defaultValue._rtval
typeDesc = self.typeDescs[dataType]
if 'members' in typeDesc:
try:
value = klType.create()
except:
try:
return klType()
except Exception as e:
raise Exception("Error constructing RTVal:" + dataType)
for i in xrange(0, len(typeDesc['members'])):
memberName = typeDesc['members'][i]['name']
memberType = typeDesc['members'][i]['type']
if memberName in defaultValue:
setattr(value, memberName, self.constructRTVal(memberType, getattr(defaultValue, memberName)))
return value
else:
return klType(defaultValue)
else:
try:
return klType.create()
except:
try:
return klType()
except Exception as e:
raise Exception("Error constructing RTVal:" + dataType)
def rtVal(self, dataType, defaultValue=None):
"""Constructs a new RTVal using the given name and optional devault value.
Args:
dataType (str): The name of the data type to construct.
defaultValue (value): The default value to use to initialize the RTVal
Returns:
object: The constructed RTval.
"""
return self.constructRTVal(dataType, defaultValue)
def isRTVal(self, value):
"""Returns true if the given value is an RTVal.
Args:
value (value): value to test.
Returns:
bool: True if successful.
"""
return str(type(value)) == "<type 'PyRTValObject'>"
def getRTValTypeName(self, rtval):
"""Returns the name of the type, handling extracting the name from KL RTVals.
Args:
rtval (rtval): The rtval to extract the name from.
Returns:
bool: True if successful.
"""
if ks.isRTVal(rtval):
return json.loads(rtval.type("Type").jsonDesc("String").getSimpleType())['name']
else:
return "None"
# ==================
# Config Methods
# ==================
def registerConfig(self, configClass):
"""Registers a config Python class with the KrakenSystem so ti can be built by the rig builder.
Args:
configClass (str): The Python class of the config
"""
configModulePath = configClass.__module__ + "." + configClass.__name__
self.registeredConfigs[configModulePath] = configClass
def getConfigClass(self, className):
"""Returns the registered Python config class with the given name
Args:
className (str): The name of the Python config class
Returns:
object: The Python config class
"""
if className not in self.registeredConfigs:
raise Exception("Config with that class not registered:" + className)
return self.registeredConfigs[className]
def getConfigClassNames(self):
"""Returns the names of the registered Python config classes
Returns:
list: The array of config class names.
"""
return self.registeredConfigs.keys()
# ==================
# Component Methods
# ==================
def registerComponent(self, componentClass):
"""Registers a component Python class with the KrakenSystem so ti can be built by the rig builder.
Args:
componentClass (str): The Python class of the component
"""
componentClassPath = componentClass.__module__ + "." + componentClass.__name__
if componentClassPath in self.registeredComponents:
# we allow reregistring of components because as a component's class is edited
# it will be re-imported by python(in Maya), and the classes reregistered.
pass
self.registeredComponents[componentClassPath] = componentClass
def getComponentClass(self, className):
"""Returns the registered Python component class with the given name
Args:
className (str): The name of the Python component class
Returns:
object: The Python component class
"""
if className not in self.registeredComponents:
raise Exception("Component with that class not registered:" + className)
return self.registeredComponents[className]
def getComponentClassNames(self):
"""Returns the names of the registered Python component classes
Returns:
list: The array of component class names.
"""
return self.registeredComponents.keys()
def loadComponentModules(self):
"""Loads all the component modules and configs specified in the 'KRAKEN_PATHS' environment variable.
The kraken_components are loaded at all times.
Returns:
bool: True if all components loaded, else False.
"""
for componentClassPath in self.registeredComponents:
componentModulePath = self.registeredComponents[componentClassPath].__module__
if componentModulePath in sys.modules:
del(sys.modules[componentModulePath])
self.registeredComponents = {}
logger.info("Loading component modules...")
def __importDirRecursive(path, parentModulePath=''):
isSuccessful = True
contents = os.listdir(path)
moduleFilefound = False
for item in contents:
if os.path.isfile(os.path.join(path, item)):
if item == "__init__.py":
if parentModulePath == '':
modulePath = os.path.basename(path)
moduleParentFolder = os.path.split( path )[0]
if moduleParentFolder not in sys.path:
sys.path.append(moduleParentFolder)
else:
modulePath = parentModulePath + '.' + os.path.basename(path)
moduleFilefound = True
if moduleFilefound:
logger.info(" " + path + ":")
for i, item in enumerate(contents):
if os.path.isfile(os.path.join(path, item)):
# Parse all the files of given path and import python
# modules. The files in these folders really should be
# limited to components, otherwise we are loading more
# than modules and that is not clear.
# TODO: Figure out a way to limit imports to just rig
# component modules.
if item.endswith(".py") and item != "__init__.py":
module = modulePath + "." + item[:-3]
try:
logger.info(" " + module)
importlib.import_module(module)
except ImportError, e:
isSuccessful = False
logging.exception("Error importing '" + module)
except Exception, e:
isSuccessful = False
logging.exception("Error Loading Modules'" + module)
logging.info("")
for item in contents:
if os.path.isdir(os.path.join(path, item)):
if moduleFilefound:
if not __importDirRecursive(os.path.join(path, item), modulePath):
isSuccessful = False
else:
if not __importDirRecursive(os.path.join(path, item)):
isSuccessful = False
return isSuccessful
# Find the kraken examples module in the same folder as the kraken module.
default_component_path = os.path.normpath(os.path.join(os.environ.get('KRAKEN_PATH'), 'Python', 'kraken_components'))
isSuccessful = __importDirRecursive(default_component_path)
pathsVar = os.getenv('KRAKEN_PATHS')
if pathsVar is not None:
pathsList = pathsVar.split(os.pathsep)
for path in pathsList:
if path == '':
continue
if not os.path.exists(path):
logging.info("Invalid Kraken Path: " + path)
continue
if not __importDirRecursive(path):
isSuccessful = False
return isSuccessful
@classmethod
def getInstance(cls):
"""This class method returns the singleton instance for the KrakenSystem
Returns:
object: The singleton instance.
"""
if cls.__instance is None:
cls.__instance = KrakenSystem()
return cls.__instance
ks = KrakenSystem.getInstance()
| oculusstorystudio/kraken | Python/kraken/core/kraken_system.py | Python | bsd-3-clause | 13,241 |
from __future__ import print_function
from __future__ import absolute_import
# System modules
import argparse
import sys
import multiprocessing
import os
import textwrap
# Third-party modules
# LLDB modules
from . import configuration
class ArgParseNamespace(object):
pass
def parse_args(parser, argv):
""" Returns an argument object. LLDB_TEST_ARGUMENTS environment variable can
be used to pass additional arguments.
"""
args = ArgParseNamespace()
if ('LLDB_TEST_ARGUMENTS' in os.environ):
print(
"Arguments passed through environment: '%s'" %
os.environ['LLDB_TEST_ARGUMENTS'])
args = parser.parse_args([sys.argv[0]].__add__(
os.environ['LLDB_TEST_ARGUMENTS'].split()), namespace=args)
return parser.parse_args(args=argv, namespace=args)
def default_thread_count():
# Check if specified in the environment
num_threads_str = os.environ.get("LLDB_TEST_THREADS")
if num_threads_str:
return int(num_threads_str)
else:
return multiprocessing.cpu_count()
def create_parser():
parser = argparse.ArgumentParser(
description='description',
prefix_chars='+-',
add_help=False)
group = None
# Helper function for boolean options (group will point to the current
# group when executing X)
X = lambda optstr, helpstr, **kwargs: group.add_argument(
optstr, help=helpstr, action='store_true', **kwargs)
group = parser.add_argument_group('Help')
group.add_argument(
'-h',
'--help',
dest='h',
action='store_true',
help="Print this help message and exit. Add '-v' for more detailed help.")
# C and Python toolchain options
group = parser.add_argument_group('Toolchain options')
group.add_argument(
'-A',
'--arch',
metavar='arch',
dest='arch',
help=textwrap.dedent('''Specify the architecture(s) to test. This option can be specified more than once'''))
group.add_argument('-C', '--compiler', metavar='compiler', dest='compiler', help=textwrap.dedent(
'''Specify the compiler(s) used to build the inferior executables. The compiler path can be an executable basename or a full path to a compiler executable. This option can be specified multiple times.'''))
if sys.platform == 'darwin':
group.add_argument('--apple-sdk', metavar='apple_sdk', dest='apple_sdk', default="macosx", help=textwrap.dedent(
'''Specify the name of the Apple SDK (macosx, macosx.internal, iphoneos, iphoneos.internal, or path to SDK) and use the appropriate tools from that SDK's toolchain.'''))
# FIXME? This won't work for different extra flags according to each arch.
group.add_argument(
'-E',
metavar='extra-flags',
help=textwrap.dedent('''Specify the extra flags to be passed to the toolchain when building the inferior programs to be debugged
suggestions: do not lump the "-A arch1 -A arch2" together such that the -E option applies to only one of the architectures'''))
group.add_argument('--dsymutil', metavar='dsymutil', dest='dsymutil', help=textwrap.dedent('Specify which dsymutil to use.'))
# Test filtering options
group = parser.add_argument_group('Test filtering options')
group.add_argument(
'-f',
metavar='filterspec',
action='append',
help='Specify a filter, which consists of the test class name, a dot, followed by the test method, to only admit such test into the test suite') # FIXME: Example?
X('-l', "Don't skip long running tests")
group.add_argument(
'-p',
metavar='pattern',
help='Specify a regexp filename pattern for inclusion in the test suite')
group.add_argument('--excluded', metavar='exclusion-file', action='append', help=textwrap.dedent(
'''Specify a file for tests to exclude. File should contain lists of regular expressions for test files or methods,
with each list under a matching header (xfail files, xfail methods, skip files, skip methods)'''))
group.add_argument(
'-G',
'--category',
metavar='category',
action='append',
dest='categoriesList',
help=textwrap.dedent('''Specify categories of test cases of interest. Can be specified more than once.'''))
group.add_argument(
'--skip-category',
metavar='category',
action='append',
dest='skipCategories',
help=textwrap.dedent('''Specify categories of test cases to skip. Takes precedence over -G. Can be specified more than once.'''))
# Configuration options
group = parser.add_argument_group('Configuration options')
group.add_argument(
'--framework',
metavar='framework-path',
help='The path to LLDB.framework')
group.add_argument(
'--executable',
metavar='executable-path',
help='The path to the lldb executable')
group.add_argument(
'--server',
metavar='server-path',
help='The path to the debug server executable to use')
group.add_argument(
'--out-of-tree-debugserver',
dest='out_of_tree_debugserver',
action='store_true',
help='A flag to indicate an out-of-tree debug server is being used')
group.add_argument(
'-s',
metavar='name',
help='Specify the name of the dir created to store the session files of tests with errored or failed status. If not specified, the test driver uses the timestamp as the session dir name')
group.add_argument(
'-S',
'--session-file-format',
default=configuration.session_file_format,
metavar='format',
help='Specify session file name format. See configuration.py for a description.')
group.add_argument(
'-y',
type=int,
metavar='count',
help="Specify the iteration count used to collect our benchmarks. An example is the number of times to do 'thread step-over' to measure stepping speed.")
group.add_argument(
'-#',
type=int,
metavar='sharp',
dest='sharp',
help='Repeat the test suite for a specified number of times')
group.add_argument('--channel', metavar='channel', dest='channels', action='append', help=textwrap.dedent(
"Specify the log channels (and optional categories) e.g. 'lldb all' or 'gdb-remote packets' if no categories are specified, 'default' is used"))
group.add_argument(
'--log-success',
dest='log_success',
action='store_true',
help="Leave logs/traces even for successful test runs (useful for creating reference log files during debugging.)")
group.add_argument(
'--codesign-identity',
metavar='Codesigning identity',
default='lldb_codesign',
help='The codesigning identity to use')
group.add_argument(
'--build-dir',
dest='test_build_dir',
metavar='Test build directory',
default='lldb-test-build.noindex',
help='The root build directory for the tests. It will be removed before running.')
# Configuration options
group = parser.add_argument_group('Remote platform options')
group.add_argument(
'--platform-name',
dest='lldb_platform_name',
metavar='platform-name',
help='The name of a remote platform to use')
group.add_argument(
'--platform-url',
dest='lldb_platform_url',
metavar='platform-url',
help='A LLDB platform URL to use when connecting to a remote platform to run the test suite')
group.add_argument(
'--platform-working-dir',
dest='lldb_platform_working_dir',
metavar='platform-working-dir',
help='The directory to use on the remote platform.')
# Test-suite behaviour
group = parser.add_argument_group('Runtime behaviour options')
X('-d', 'Suspend the process after launch to wait indefinitely for a debugger to attach')
X('-q', "Don't print extra output from this script.")
X('-t', 'Turn on tracing of lldb command and other detailed test executions')
group.add_argument(
'-u',
dest='unset_env_varnames',
metavar='variable',
action='append',
help='Specify an environment variable to unset before running the test cases. e.g., -u DYLD_INSERT_LIBRARIES -u MallocScribble')
group.add_argument(
'--env',
dest='set_env_vars',
metavar='variable',
action='append',
help='Specify an environment variable to set to the given value before running the test cases e.g.: --env CXXFLAGS=-O3 --env DYLD_INSERT_LIBRARIES')
X('-v', 'Do verbose mode of unittest framework (print out each test case invocation)')
group.add_argument(
'--enable-crash-dialog',
dest='disable_crash_dialog',
action='store_false',
help='(Windows only) When LLDB crashes, display the Windows crash dialog.')
group.set_defaults(disable_crash_dialog=True)
group = parser.add_argument_group('Parallel execution options')
group.add_argument(
'--inferior',
action='store_true',
help=('specify this invocation is a multiprocess inferior, '
'used internally'))
group.add_argument(
'--no-multiprocess',
action='store_true',
help='skip running the multiprocess test runner')
group.add_argument(
'--threads',
type=int,
dest='num_threads',
default=default_thread_count(),
help=('The number of threads/processes to use when running tests '
'separately, defaults to the number of CPU cores available'))
group.add_argument(
'--test-subdir',
action='store',
help='Specify a test subdirectory to use relative to the test root dir'
)
group.add_argument(
'--test-runner-name',
action='store',
help=('Specify a test runner strategy. Valid values: multiprocessing,'
' multiprocessing-pool, serial, threading, threading-pool')
)
# Test results support.
group = parser.add_argument_group('Test results options')
group.add_argument(
'--curses',
action='store_true',
help='Shortcut for specifying test results using the curses formatter')
group.add_argument(
'--results-file',
action='store',
help=('Specifies the file where test results will be written '
'according to the results-formatter class used'))
group.add_argument(
'--results-port',
action='store',
type=int,
help=('Specifies the localhost port to which the results '
'formatted output should be sent'))
group.add_argument(
'--results-formatter',
action='store',
help=('Specifies the full package/module/class name used to translate '
'test events into some kind of meaningful report, written to '
'the designated output results file-like object'))
group.add_argument(
'--results-formatter-option',
'-O',
action='append',
dest='results_formatter_options',
help=('Specify an option to pass to the formatter. '
'Use --results-formatter-option="--option1=val1" '
'syntax. Note the "=" is critical, don\'t include whitespace.'))
group.add_argument(
'--event-add-entries',
action='store',
help=('Specify comma-separated KEY=VAL entries to add key and value '
'pairs to all test events generated by this test run. VAL may '
'be specified as VAL:TYPE, where TYPE may be int to convert '
'the value to an int'))
# Re-run related arguments
group = parser.add_argument_group('Test Re-run Options')
group.add_argument(
'--rerun-all-issues',
action='store_true',
help=('Re-run all issues that occurred during the test run '
'irrespective of the test method\'s marking as flakey. '
'Default behavior is to apply re-runs only to flakey '
'tests that generate issues.'))
group.add_argument(
'--rerun-max-file-threshold',
action='store',
type=int,
default=50,
help=('Maximum number of files requiring a rerun beyond '
'which the rerun will not occur. This is meant to '
'stop a catastrophically failing test suite from forcing '
'all tests to be rerun in the single-worker phase.'))
# Remove the reference to our helper function
del X
group = parser.add_argument_group('Test directories')
group.add_argument(
'args',
metavar='test-dir',
nargs='*',
help='Specify a list of directory names to search for test modules named after Test*.py (test discovery). If empty, search from the current working directory instead.')
return parser
| youtube/cobalt | third_party/llvm-project/lldb/packages/Python/lldbsuite/test/dotest_args.py | Python | bsd-3-clause | 13,110 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import mock
import six
import time
from exam import fixture, patcher
from sentry.quotas.redis import (
is_rate_limited,
RedisQuota,
)
from sentry.testutils import TestCase
from sentry.utils.redis import clusters
def test_is_rate_limited_script():
now = int(time.time())
cluster = clusters.get('default')
client = cluster.get_local_client(six.next(iter(cluster.hosts)))
# The item should not be rate limited by either key.
assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))) == \
[False, False]
# The item should be rate limited by the first key (1).
assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))) == \
[True, False]
# The item should still be rate limited by the first key (1), but *not*
# rate limited by the second key (2) even though this is the third time
# we've checked the quotas. This ensures items that are rejected by a lower
# quota don't affect unrelated items that share a parent quota.
assert list(map(bool, is_rate_limited(client, ('foo', 'bar'), (1, now + 60, 2, now + 120)))) == \
[True, False]
assert client.get('foo') == '1'
assert 59 <= client.ttl('foo') <= 60
assert client.get('bar') == '1'
assert 119 <= client.ttl('bar') <= 120
class RedisQuotaTest(TestCase):
quota = fixture(RedisQuota)
@patcher.object(RedisQuota, 'get_project_quota')
def get_project_quota(self):
inst = mock.MagicMock()
inst.return_value = (0, 60)
return inst
@patcher.object(RedisQuota, 'get_organization_quota')
def get_organization_quota(self):
inst = mock.MagicMock()
inst.return_value = (0, 60)
return inst
def test_uses_defined_quotas(self):
self.get_project_quota.return_value = (200, 60)
self.get_organization_quota.return_value = (300, 60)
quotas = self.quota.get_quotas(self.project)
assert quotas[0].key == 'p:{}'.format(self.project.id)
assert quotas[0].limit == 200
assert quotas[0].window == 60
assert quotas[1].key == 'o:{}'.format(self.project.organization.id)
assert quotas[1].limit == 300
assert quotas[1].window == 60
@mock.patch('sentry.quotas.redis.is_rate_limited')
@mock.patch.object(RedisQuota, 'get_quotas', return_value=[])
def test_bails_immediately_without_any_quota(self, get_quotas, is_rate_limited):
result = self.quota.is_rate_limited(self.project)
assert not is_rate_limited.called
assert not result.is_limited
@mock.patch('sentry.quotas.redis.is_rate_limited', return_value=(False, False))
def test_is_not_limited_without_rejections(self, is_rate_limited):
self.get_organization_quota.return_value = (100, 60)
self.get_project_quota.return_value = (200, 60)
assert not self.quota.is_rate_limited(self.project).is_limited
@mock.patch('sentry.quotas.redis.is_rate_limited', return_value=(True, False))
def test_is_limited_on_rejections(self, is_rate_limited):
self.get_organization_quota.return_value = (100, 60)
self.get_project_quota.return_value = (200, 60)
assert self.quota.is_rate_limited(self.project).is_limited
| BuildingLink/sentry | tests/sentry/quotas/redis/tests.py | Python | bsd-3-clause | 3,356 |
from sys import version_info
try:
from collections.abc import Iterable, Container
except ImportError:
from collections import Iterable, Container
from pyvalid import accepts
from pyvalid.validators import AbstractValidator
class NumberValidator(AbstractValidator):
number_types = (int, float)
if version_info < (3, 0, 0):
number_types += (long, ) # noqa: F821
@classmethod
def number_type_checker(cls, val, number_type):
"""Checks if the number is of required data type.
Args:
val (number):
Tensor whose type is to be validated.
number_type (type):
Expected data type of number.
Ex: int, float, long.
Returns (bool):
True:
If the type of given number matches the required type.
False:
If the type of given number does not match the required type.
"""
return type(val) == number_type
@classmethod
def min_val_checker(cls, val, min_val):
return val >= min_val
@classmethod
def max_val_checker(cls, val, max_val):
return val <= max_val
@classmethod
def in_range_checker(cls, val, in_range):
is_valid = False
if isinstance(in_range, Container):
is_valid = val in in_range
elif isinstance(in_range, Iterable):
for item in in_range:
if item == val:
is_valid = True
break
return is_valid
@classmethod
def not_in_range_checker(cls, val, not_in_range):
return not cls.in_range_checker(val, not_in_range)
@property
def checkers(self):
return self.__checkers
@accepts(
object, min_val=number_types, max_val=number_types,
in_range=[Iterable, Container], not_in_range=[Iterable, Container]
)
def __init__(self, **kwargs):
min_val = kwargs.get('min_val', None)
max_val = kwargs.get('max_val', None)
if min_val is not None and max_val is not None and min_val > max_val:
raise ValueError('Min value can\'t be greater than max value!')
number_type = kwargs.get('number_type', None)
in_range = kwargs.get('in_range', None)
not_in_range = kwargs.get('not_in_range', None)
self.__checkers = {
NumberValidator.min_val_checker: [min_val],
NumberValidator.max_val_checker: [max_val],
NumberValidator.number_type_checker: [number_type],
NumberValidator.in_range_checker: [in_range],
NumberValidator.not_in_range_checker: [not_in_range]
}
AbstractValidator.__init__(self, allowed_types=NumberValidator.number_types)
| uzumaxy/pyvalid | pyvalid/validators/__number.py | Python | bsd-3-clause | 2,763 |
# -*- coding: utf-8 -*-
"""
proxy.py
~~~~~~~~
⚡⚡⚡ Fast, Lightweight, Pluggable, TLS interception capable proxy server focused on
Network monitoring, controls & Application development, testing, debugging.
:copyright: (c) 2013-present by Abhinav Singh and contributors.
:license: BSD, see LICENSE for more details.
"""
import os
import logging
from typing import List, Tuple
from ..http.parser import HttpParser
from ..http.server import HttpWebServerBasePlugin, httpProtocolTypes
from ..http.responses import permanentRedirectResponse
logger = logging.getLogger(__name__)
class ProxyDashboard(HttpWebServerBasePlugin):
"""Proxy Dashboard."""
# Redirects to /dashboard/
REDIRECT_ROUTES = [
(httpProtocolTypes.HTTP, r'/dashboard$'),
(httpProtocolTypes.HTTPS, r'/dashboard$'),
(httpProtocolTypes.HTTP, r'/dashboard/proxy.html$'),
(httpProtocolTypes.HTTPS, r'/dashboard/proxy.html$'),
]
# Index html route
INDEX_ROUTES = [
(httpProtocolTypes.HTTP, r'/dashboard/$'),
(httpProtocolTypes.HTTPS, r'/dashboard/$'),
]
def routes(self) -> List[Tuple[int, str]]:
return ProxyDashboard.REDIRECT_ROUTES + \
ProxyDashboard.INDEX_ROUTES
def handle_request(self, request: HttpParser) -> None:
if request.path == b'/dashboard/':
self.client.queue(
self.serve_static_file(
os.path.join(
self.flags.static_server_dir,
'dashboard', 'proxy.html',
),
self.flags.min_compression_length,
),
)
elif request.path in (
b'/dashboard',
b'/dashboard/proxy.html',
):
self.client.queue(permanentRedirectResponse(b'/dashboard/'))
| abhinavsingh/proxy.py | proxy/dashboard/dashboard.py | Python | bsd-3-clause | 1,872 |
from uuid import uuid4
from django.test import TestCase
from casexml.apps.case.cleanup import claim_case, get_first_claim
from casexml.apps.case.mock import CaseBlock
from casexml.apps.case.util import post_case_blocks
from corehq.apps.case_search.models import CLAIM_CASE_TYPE
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.ota.utils import get_restore_user
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.exceptions import CaseNotFound
from corehq.form_processor.models import CommCareCase
DOMAIN = 'test_domain'
USERNAME = 'lina.stern@ras.ru'
PASSWORD = 'hemato-encephalic'
# https://en.wikipedia.org/wiki/Lina_Stern
def index_to_dict(instance):
keys = ('identifier', 'referenced_type', 'referenced_id', 'relationship')
return {k: str(getattr(instance, k)) for k in keys}
class CaseClaimTests(TestCase):
def setUp(self):
super(CaseClaimTests, self).setUp()
self.domain = create_domain(DOMAIN)
self.user = CommCareUser.create(DOMAIN, USERNAME, PASSWORD, None, None)
self.restore_user = get_restore_user(DOMAIN, self.user, None)
self.host_case_id = uuid4().hex
self.host_case_name = 'Dmitri Bashkirov'
self.host_case_type = 'person'
self.create_case()
def tearDown(self):
self.user.delete(self.domain.name, deleted_by=None)
self.domain.delete()
super(CaseClaimTests, self).tearDown()
def create_case(self):
case_block = CaseBlock.deprecated_init(
create=True,
case_id=self.host_case_id,
case_name=self.host_case_name,
case_type=self.host_case_type,
owner_id='in_soviet_russia_the_case_owns_you',
).as_xml()
post_case_blocks([case_block], {'domain': DOMAIN})
def assert_claim(self, claim=None, claim_id=None):
if claim is None:
claim_ids = CommCareCase.objects.get_case_ids_in_domain(DOMAIN, CLAIM_CASE_TYPE)
self.assertEqual(len(claim_ids), 1)
claim = CommCareCase.objects.get_case(claim_ids[0], DOMAIN)
if claim_id:
self.assertEqual(claim.case_id, claim_id)
self.assertEqual(claim.name, self.host_case_name)
self.assertEqual(claim.owner_id, self.user.user_id)
self.assertEqual([index_to_dict(i) for i in claim.indices], [{
'identifier': 'host',
'referenced_type': 'person',
'referenced_id': self.host_case_id,
'relationship': 'extension',
}])
def test_claim_case(self):
"""
claim_case should create an extension case
"""
claim_id = claim_case(DOMAIN, self.restore_user, self.host_case_id,
host_type=self.host_case_type, host_name=self.host_case_name)
self.assert_claim(claim_id=claim_id)
def test_claim_case_id_only(self):
"""
claim_case should look up host case details if only ID is passed
"""
claim_id = claim_case(DOMAIN, self.restore_user, self.host_case_id)
self.assert_claim(claim_id=claim_id)
def test_first_claim_one(self):
"""
get_first_claim should return one claim
"""
claim_id = claim_case(DOMAIN, self.restore_user, self.host_case_id,
host_type=self.host_case_type, host_name=self.host_case_name)
claim = get_first_claim(DOMAIN, self.user.user_id, self.host_case_id)
self.assert_claim(claim, claim_id)
def test_first_claim_none(self):
"""
get_first_claim should return None if not found
"""
claim = get_first_claim(DOMAIN, self.user.user_id, self.host_case_id)
self.assertIsNone(claim)
def test_closed_claim(self):
"""
get_first_claim should return None if claim case is closed
"""
claim_id = claim_case(DOMAIN, self.restore_user, self.host_case_id,
host_type=self.host_case_type, host_name=self.host_case_name)
self._close_case(claim_id)
first_claim = get_first_claim(DOMAIN, self.user.user_id, self.host_case_id)
self.assertIsNone(first_claim)
def test_claim_case_other_domain(self):
malicious_domain = 'malicious_domain'
domain_obj = create_domain(malicious_domain)
self.addCleanup(domain_obj.delete)
claim_id = claim_case(malicious_domain, self.restore_user, self.host_case_id,
host_type=self.host_case_type, host_name=self.host_case_name)
with self.assertRaises(CaseNotFound):
CommCareCase.objects.get_case(claim_id, malicious_domain)
def _close_case(self, case_id):
case_block = CaseBlock.deprecated_init(
create=False,
case_id=case_id,
close=True
).as_xml()
post_case_blocks([case_block], {'domain': DOMAIN})
| dimagi/commcare-hq | corehq/apps/ota/tests/test_claim.py | Python | bsd-3-clause | 4,929 |
from traceback import format_exc
from django.core.management.base import BaseCommand
from ...toolbox import send_scheduled_messages
class Command(BaseCommand):
help = 'Sends scheduled messages (both in pending and error statuses).'
def add_arguments(self, parser):
parser.add_argument(
'--priority', action='store', dest='priority', default=None,
help='Allows to filter scheduled messages by a priority number. Defaults to None.')
def handle(self, *args, **options):
priority = options.get('priority', None)
priority_str = ''
if priority is not None:
priority_str = f'with priority {priority} '
self.stdout.write(f'Sending scheduled messages {priority_str} ...\n')
try:
send_scheduled_messages(priority=priority)
except Exception as e:
self.stderr.write(self.style.ERROR(f'Error on send: {e}\n{format_exc()}'))
else:
self.stdout.write('Sending done.\n')
| idlesign/django-sitemessage | sitemessage/management/commands/sitemessage_send_scheduled.py | Python | bsd-3-clause | 1,014 |
import logging
from cumulusci.cli.config import CliConfig
from cumulusci.core.config import TaskConfig
from cumulusci.core.exceptions import TaskNotFoundError
from cumulusci.core.exceptions import TaskOptionsError
from cumulusci.core.tasks import CURRENT_TASK
from cumulusci.core.utils import import_class
from cumulusci.tasks.robotframework.robotframework import Robot
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
from simple_salesforce import Salesforce
class CumulusCI(object):
""" Library for accessing CumulusCI for the local git project
This library allows Robot Framework tests to access credentials to a
Salesforce org created by CumulusCI, including Scratch Orgs. It also
exposes the core logic of CumulusCI including interactions with the
Salesforce API's and project specific configuration including custom
and customized tasks and flows.
Initialization requires a single argument, the org name for the target
CumulusCI org. If running your tests via cci's robot task (recommended),
you can initialize the library in your tests taking advantage of the
variable set by the robot task:
| ``*** Settings ***``
|
| Library cumulusci.robotframework.CumulusCI ${ORG}
"""
ROBOT_LIBRARY_SCOPE = "GLOBAL"
def __init__(self, org_name=None):
if not org_name:
org_name = 'dev'
self.org_name = org_name
self._project_config = None
self._org = None
self._sf = None
self._tooling = None
# Turn off info logging of all http requests
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN)
@property
def project_config(self):
if self._project_config is None:
if CURRENT_TASK and isinstance(CURRENT_TASK, Robot):
# If CumulusCI is running a task, use that task's config
return CURRENT_TASK.project_config
else:
logger.console('Initializing CumulusCI config\n')
self._project_config = CliConfig().project_config
return self._project_config
def set_project_config(self, project_config):
logger.console('\n')
self._project_config = project_config
@property
def keychain(self):
return self.project_config.keychain
@property
def org(self):
if self._org is None:
if CURRENT_TASK and isinstance(CURRENT_TASK, Robot):
# If CumulusCI is running a task, use that task's org
return CURRENT_TASK.org_config
else:
self._org = self.keychain.get_org(self.org_name)
return self._org
@property
def sf(self):
if self._sf is None:
self._sf = self._init_api()
return self._sf
@property
def tooling(self):
if self._tooling is None:
self._tooling = self._init_api('tooling/')
return self._tooling
def set_login_url(self):
""" Sets the LOGIN_URL variable in the suite scope which will
automatically log into the target Salesforce org.
Typically, this is run during Suite Setup
"""
BuiltIn().set_suite_variable('${LOGIN_URL}', self.org.start_url)
def get_org_info(self):
""" Returns a dictionary of the org information for the current target
Salesforce org
"""
return self.org.config
def login_url(self, org=None):
""" Returns the login url which will automatically log into the target
Salesforce org. By default, the org_name passed to the library
constructor is used but this can be overridden with the org option
to log into a different org.
"""
if org is None:
org = self.org
else:
org = self.keychain.get_org(org)
return org.start_url
def run_task(self, task_name, **options):
""" Runs a named CumulusCI task for the current project with optional
support for overriding task options via kwargs.
Examples:
| =Keyword= | =task_name= | =task_options= | =comment= |
| Run Task | deploy | | Run deploy with standard options |
| Run Task | deploy | path=path/to/some/metadata | Run deploy with custom path |
"""
task_config = self.project_config.get_task(task_name)
class_path = task_config.class_path
logger.console('\n')
task_class, task_config = self._init_task(class_path, options, task_config)
return self._run_task(task_class, task_config)
def run_task_class(self, class_path, **options):
""" Runs a CumulusCI task class with task options via kwargs.
Use this keyword to run logic from CumulusCI tasks which have not
been configured in the project's cumulusci.yml file. This is
most useful in cases where a test needs to use task logic for
logic unique to the test and thus not worth making into a named
task for the project
Examples:
| =Keyword= | =task_class= | =task_options= |
| Run Task Class | cumulusci.task.utils.DownloadZip | url=http://test.com/test.zip dir=test_zip |
"""
logger.console('\n')
task_class, task_config = self._init_task(class_path, options, TaskConfig())
return self._run_task(task_class, task_config)
def _init_api(self, base_url=None):
api_version = self.project_config.project__package__api_version
rv = Salesforce(
instance=self.org.instance_url.replace('https://', ''),
session_id=self.org.access_token,
version=api_version,
)
if base_url is not None:
rv.base_url += base_url
return rv
def _init_task(self, class_path, options, task_config):
task_class = import_class(class_path)
task_config = self._parse_task_options(options, task_class, task_config)
return task_class, task_config
def _parse_task_options(self, options, task_class, task_config):
if 'options' not in task_config.config:
task_config.config['options'] = {}
# Parse options and add to task config
if options:
for name, value in options.items():
# Validate the option
if name not in task_class.task_options:
raise TaskOptionsError(
'Option "{}" is not available for task {}'.format(
name,
task_name,
),
)
# Override the option in the task config
task_config.config['options'][name] = value
return task_config
def _run_task(self, task_class, task_config):
exception = None
task = task_class(self.project_config,
task_config, org_config=self.org)
task()
return task.return_values
| e02d96ec16/CumulusCI | cumulusci/robotframework/CumulusCI.py | Python | bsd-3-clause | 7,324 |
import inspect
import logging
import threading
from django.apps import apps
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from django.db.migrations.operations.base import Operation
from django.utils.translation import lazy
from .exceptions import TemplateSchemaActivation, SchemaNotFound
from .signals import find_schema
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(logging.NullHandler())
_thread_locals = threading.local()
def get_schema_model():
"""
Return the class that is currently set as the schema model.
"""
try:
return apps.get_model(settings.BOARDINGHOUSE_SCHEMA_MODEL)
except AttributeError:
raise ImproperlyConfigured("BOARDINGHOUSE_SCHEMA_MODEL is not set: is 'boardinghouse' in your INSTALLED_APPS?")
except ValueError:
raise ImproperlyConfigured("BOARDINGHOUSE_SCHEMA_MODEL must be of the form 'app_label.model_name'")
except LookupError:
raise ImproperlyConfigured(
"BOARDINGHOUSE_SCHEMA_MODEL refers to model '{0!s}' that has not been installed".format(
settings.BOARDINGHOUSE_SCHEMA_MODEL))
def _get_search_path():
cursor = connection.cursor()
cursor.execute('SELECT current_schema()')
search_path = cursor.fetchone()[0]
cursor.close()
return search_path
def _set_search_path(search_path):
cursor = connection.cursor()
cursor.execute('SET search_path TO %s,{0}'.format(settings.PUBLIC_SCHEMA),
[search_path])
cursor.close()
def _schema_exists(schema_name, cursor=None):
if cursor:
cursor.execute('''SELECT schema_name
FROM information_schema.schemata
WHERE schema_name = %s''',
[schema_name])
return bool(cursor.fetchone())
cursor = connection.cursor()
try:
return _schema_exists(schema_name, cursor)
finally:
cursor.close()
def get_active_schema_name():
"""
Get the currently active schema.
This requires a database query to ask it what the current `search_path` is.
"""
active_schema = getattr(_thread_locals, 'schema', None)
if not active_schema:
reported_schema = _get_search_path()[0]
if _get_schema(reported_schema):
active_schema = reported_schema
else:
active_schema = None
_thread_locals.schema = active_schema
return active_schema
def get_active_schema():
"""
Get the (internal) name of the currently active schema.
"""
return _get_schema(get_active_schema_name())
def get_active_schemata():
"""
Get a (cached) list of all currently active schemata.
"""
schemata = cache.get('active-schemata')
if schemata is None:
schemata = get_schema_model().objects.active()
cache.set('active-schemata', schemata)
return schemata
def _get_schema(schema_name):
"""
Get the matching active schema object for the given name,
if it exists.
"""
for handler, response in find_schema.send(sender=None, schema=schema_name):
if response:
return response
def activate_schema(schema_name):
"""
Activate the current schema: this will execute, in the database
connection, something like:
.. code:: sql
SET search_path TO "foo",public;
It sends signals before and after that the schema will be, and was
activated.
Must be passed a string: the internal name of the schema to activate.
"""
from .signals import schema_pre_activate, schema_post_activate
if schema_name == settings.TEMPLATE_SCHEMA:
raise TemplateSchemaActivation()
schema_pre_activate.send(sender=None, schema_name=schema_name)
_set_search_path(schema_name)
found_schema = _get_search_path()
if found_schema != schema_name:
raise SchemaNotFound('Schema activation failed. Expected "{0}", saw "{1}"'.format(
schema_name, found_schema,
))
schema_post_activate.send(sender=None, schema_name=schema_name)
_thread_locals.schema = schema_name
def activate_template_schema():
"""
Activate the template schema.
You probably don't want to do this. Sometimes you do (like for instance
to apply migrations).
"""
from .signals import schema_pre_activate, schema_post_activate
_thread_locals.schema = None
schema_name = settings.TEMPLATE_SCHEMA
schema_pre_activate.send(sender=None, schema_name=schema_name)
_set_search_path(schema_name)
if _get_search_path() != schema_name:
raise SchemaNotFound('Template schema was not activated. It seems "{0}" is active.'.format(_get_search_path()))
schema_post_activate.send(sender=None, schema_name=schema_name)
def get_template_schema():
return get_schema_model()(settings.TEMPLATE_SCHEMA)
def deactivate_schema(schema=None):
"""
Deactivate the provided (or current) schema.
"""
from .signals import schema_pre_activate, schema_post_activate
cursor = connection.cursor()
schema_pre_activate.send(sender=None, schema_name=None)
cursor.execute('SET search_path TO "$user",{0}'.format(settings.PUBLIC_SCHEMA))
schema_post_activate.send(sender=None, schema_name=None)
_thread_locals.schema = None
cursor.close()
#: These models are required to be shared by the system.
REQUIRED_SHARED_MODELS = [
'auth.user',
'auth.permission',
'auth.group',
'boardinghouse.schema',
'sites.site',
'sessions.session',
'contenttypes.contenttype',
'admin.logentry',
'migrations.migration',
# In the case these are not the default values.
lazy(lambda: settings.BOARDINGHOUSE_SCHEMA_MODEL.lower())(),
lazy(lambda: settings.AUTH_USER_MODEL.lower())(),
]
REQUIRED_SHARED_TABLES = [
'django_migrations',
]
def _is_join_model(model):
"""
We define a join model to be one that has no fields that are
not related fields (excluding the primary key), and that has
more than one field.
This may be a satisfactory definition, as a through model,
which has non-related fields, must have been explicitly declared,
and all automatic join models will have just (pk, from, to).
"""
return len(model._meta.fields) > 1 and all(
(field.primary_key or field.remote_field)
for field in model._meta.fields
)
def is_shared_model(model):
"""
Is the model (or instance of a model) one that should be in the
public/shared schema?
"""
if model._is_shared_model:
return True
app_model = '{m.app_label}.{m.model_name}'.format(m=model._meta).lower()
# These should be case insensitive!
if app_model in REQUIRED_SHARED_MODELS:
return True
if app_model in [x.lower() for x in settings.SHARED_MODELS]:
return True
# Sometimes, we want a join table to be private.
if app_model in [x.lower() for x in settings.PRIVATE_MODELS]:
return False
# if all fields are auto or fk, then we are a join model,
# and if all related objects are shared, then we must
# also be shared, unless we were explicitly marked as private
# above.
if _is_join_model(model):
return all(
is_shared_model(field.remote_field.model)
for field in model._meta.fields
if field.remote_field
)
return False
def _get_models(apps, stack):
"""
If we are in a migration operation, we need to look in that for models.
We really only should be injecting ourselves if we find a frame that contains
a database_(forwards|backwards) function.
Otherwise, we can look in the `apps` object passed in.
"""
for frame in stack:
frame_locals = frame[0].f_locals
if frame[3] == 'database_forwards' and all(
local in frame_locals for local in ('from_state', 'to_state', 'schema_editor', 'self')
) and isinstance(frame_locals['self'], Operation):
# Should this be from_state, or to_state, or should we look in both?
from_state = frame_locals['from_state']
to_state = frame_locals['to_state']
models = set()
if to_state.apps:
models = models.union(to_state.apps.get_models())
if from_state.apps:
models = models.union(from_state.apps.get_models())
return models
return apps.get_models()
def _get_join_model(table, table_map):
"""
Given a database table, and a mapping of tables to models, look for a many-to-many field on models
that uses that database table.
Currently, it only looks within models that have a matching prefix.
"""
for db_table, model in table_map.items():
if table.startswith(db_table):
for field in model._meta.local_many_to_many:
through = (field.remote_field if hasattr(field, 'remote_field') else field.rel).through
if through._meta.db_table == table:
return through
def is_shared_table(table, apps=apps):
"""
Is the model from the provided database table name shared?
We may need to look and see if we can work out which models
this table joins.
"""
if table in REQUIRED_SHARED_TABLES:
return True
# Get a mapping of all table names to models.
models = _get_models(apps, inspect.stack())
table_map = {
x._meta.db_table: x for x in models
if not x._meta.proxy
}
# If we have a match, see if that one is shared.
if table in table_map:
return is_shared_model(table_map[table])
# It may be a join table.
through = _get_join_model(table, table_map)
if through:
return is_shared_model(through)
# Not a join table: just assume that it's not shared.
return False
# Internal helper functions.
def _table_exists(table_name, schema=None):
cursor = connection.cursor()
cursor.execute("""SELECT *
FROM information_schema.tables
WHERE table_name = %s
AND table_schema = %s""", [table_name, schema or settings.PUBLIC_SCHEMA])
return bool(cursor.fetchone())
def _schema_table_exists():
return _table_exists(get_schema_model()._meta.db_table)
| schinckel/django-boardinghouse | boardinghouse/schema.py | Python | bsd-3-clause | 10,412 |
from django.contrib.sites.models import RequestSite
from django.shortcuts import render
from django.conf import settings
from django.db.models import Q
from django.core.urlresolvers import reverse
from airmozilla.main.models import Channel, Event
from airmozilla.main.views import is_contributor
from airmozilla.base.utils import (
paginate
)
from airmozilla.main.context_processors import get_featured_events
def categories_feed(request):
context = {}
privacy_filter = {}
privacy_exclude = {}
if request.user.is_active:
if is_contributor(request.user):
# feed_privacy = 'contributors'
privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
# else:
# feed_privacy = 'company'
else:
privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
# feed_privacy = 'public'
events = Event.objects.scheduled().approved()
live_events = Event.objects.live().approved()
if privacy_filter:
events = events.filter(**privacy_filter)
live_events = live_events.filter(**privacy_filter)
elif privacy_exclude:
events = events.exclude(**privacy_exclude)
live_events = live_events.exclude(**privacy_exclude)
channels = get_channels(events)
context['channels'] = channels
context['live_events'] = live_events
prefix = request.is_secure() and 'https' or 'http'
root_url = '%s://%s' % (prefix, RequestSite(request).domain)
def abs_url_maker(viewname, *args, **kwargs):
return root_url + reverse(viewname, args=args, kwargs=kwargs)
context['abs_url'] = abs_url_maker
context['get_media_info'] = get_media_info
response = render(request, 'roku/categories.xml', context)
response['Content-Type'] = 'text/xml'
return response
def get_channels(events, parent=None):
channels = []
channels_qs = Channel.objects.all()
if parent is None:
channels_qs = channels_qs.filter(parent__isnull=True)
else:
channels_qs = channels_qs.filter(parent=parent)
for channel in channels_qs:
event_count = events.filter(channels=channel).count()
subchannel_count = Channel.objects.filter(parent=channel).count()
if event_count or subchannel_count:
# channel.subchannels = get_channels(events, parent=channel)
channels.append(channel)
def sorter(x, y):
if x.slug == settings.DEFAULT_CHANNEL_SLUG:
return -2
return cmp(x.name.lower(), y.name.lower())
channels.sort(sorter)
return channels
def get_media_info(event):
if event.template and 'vid.ly' in event.template.name.lower():
tag = event.template_environment['tag']
return {
# 'url': 'http://vid.ly/%s?content=video&format=webm' % tag,
# 'format': 'webm'
# NOTE that it's deliberately set to the HTTP URL. Not HTTPS :(
'url': 'http://vid.ly/%s?content=video&format=mp4' % tag,
'format': 'mp4'
}
elif event.template and 'hls' in event.template.name.lower():
try:
file = event.template_environment['file']
wowzaapp = event.template_environment.get('wowzaapp') or 'Edgecast'
return {
# it's important to use HTTP here :(
'url': (
'http://wowza1.cdn.mozilla.net/%s/ngrp:%s_all'
'/playlist.m3u8' % (wowzaapp, file)
),
'format': 'hls',
}
except KeyError:
pass
return None
def event_feed(request, id):
# return a feed containing exactly only one event
context = {}
events = Event.objects.filter(id=id)
context['events'] = events
context['get_media_info'] = get_media_info
response = render(request, 'roku/channel.xml', context)
response['Content-Type'] = 'text/xml'
return response
def channel_feed(request, slug):
# this slug might be the slug of a parent
channels = Channel.objects.filter(
Q(slug=slug) |
Q(parent__slug=slug)
)
events = Event.objects.archived().approved()
events = events.filter(channels__in=channels)
privacy_filter = {}
privacy_exclude = {}
if request.user.is_active:
if is_contributor(request.user):
privacy_exclude = {'privacy': Event.PRIVACY_COMPANY}
else:
privacy_filter = {'privacy': Event.PRIVACY_PUBLIC}
if privacy_filter:
events = events.filter(**privacy_filter)
elif privacy_exclude:
events = events.exclude(**privacy_exclude)
events = events.order_by('-start_time')
paged = paginate(events, 1, 100)
return render_channel_events(paged, request)
def trending_feed(request):
events = get_featured_events(
None, # across all channels
request.user,
length=settings.TRENDING_ROKU_COUNT,
)
return render_channel_events(events, request)
def render_channel_events(events, request):
context = {}
context['events'] = events
context['get_media_info'] = get_media_info
response = render(request, 'roku/channel.xml', context)
response['Content-Type'] = 'text/xml'
return response
| kenrick95/airmozilla | airmozilla/roku/views.py | Python | bsd-3-clause | 5,212 |
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Schema of the JSON summary file written out by the GM tool.
This must be kept in sync with the kJsonKey_ constants in gm_expectations.cpp !
"""
__author__ = 'Elliot Poger'
# system-level imports
import io
import json
import os
# Key strings used in GM results JSON files (both expected-results.json and
# actual-results.json).
#
# These constants must be kept in sync with the kJsonKey_ constants in
# gm_expectations.cpp !
JSONKEY_ACTUALRESULTS = 'actual-results'
# Tests whose results failed to match expectations.
JSONKEY_ACTUALRESULTS_FAILED = 'failed'
# Tests whose results failed to match expectations, but IGNOREFAILURE causes
# us to take them less seriously.
JSONKEY_ACTUALRESULTS_FAILUREIGNORED = 'failure-ignored'
# Tests for which we do not have any expectations. They may be new tests that
# we haven't had a chance to check in expectations for yet, or we may have
# consciously decided to leave them without expectations because we are unhappy
# with the results (although we should try to move away from that, and instead
# check in expectations with the IGNOREFAILURE flag set).
JSONKEY_ACTUALRESULTS_NOCOMPARISON = 'no-comparison'
# Tests whose results matched their expectations.
JSONKEY_ACTUALRESULTS_SUCCEEDED = 'succeeded'
JSONKEY_EXPECTEDRESULTS = 'expected-results'
# One or more [HashType/DigestValue] pairs representing valid results for this
# test. Typically, there will just be one pair, but we allow for multiple
# expectations, and the test will pass if any one of them is matched.
JSONKEY_EXPECTEDRESULTS_ALLOWEDDIGESTS = 'allowed-digests'
# Optional: one or more integers listing Skia bugs (under
# https://code.google.com/p/skia/issues/list ) that pertain to this expectation.
JSONKEY_EXPECTEDRESULTS_BUGS = 'bugs'
# If IGNOREFAILURE is set to True, a failure of this test will be reported
# within the FAILUREIGNORED section (thus NOT causing the buildbots to go red)
# rather than the FAILED section (which WOULD cause the buildbots to go red).
JSONKEY_EXPECTEDRESULTS_IGNOREFAILURE = 'ignore-failure'
# Optional: a free-form text string with human-readable information about
# this expectation.
JSONKEY_EXPECTEDRESULTS_NOTES = 'notes'
# Optional: boolean indicating whether this expectation was reviewed/approved
# by a human being.
# If True: a human looked at this image and approved it.
# If False: this expectation was committed blind. (In such a case, please
# add notes indicating why!)
# If absent: this expectation was committed by a tool that didn't enforce human
# review of expectations.
JSONKEY_EXPECTEDRESULTS_REVIEWED = 'reviewed-by-human'
# Allowed hash types for test expectations.
JSONKEY_HASHTYPE_BITMAP_64BITMD5 = 'bitmap-64bitMD5'
# Root directory where the buildbots store their actually-generated images...
# as a publicly readable HTTP URL:
GM_ACTUALS_ROOT_HTTP_URL = (
'http://chromium-skia-gm.commondatastorage.googleapis.com/gm')
# as a GS URL that allows credential-protected write access:
GM_ACTUALS_ROOT_GS_URL = 'gs://chromium-skia-gm/gm'
# Root directory where buildbots store skimage actual results json files.
SKIMAGE_ACTUALS_BASE_URL = (
'http://chromium-skia-gm.commondatastorage.googleapis.com/skimage/actuals')
# Root directory inside trunk where skimage expectations are stored.
SKIMAGE_EXPECTATIONS_ROOT = os.path.join('expectations', 'skimage')
# Pattern used to assemble each image's filename
IMAGE_FILENAME_PATTERN = '(.+)_(.+)\.png' # matches (testname, config)
def CreateGmActualUrl(test_name, hash_type, hash_digest,
gm_actuals_root_url=GM_ACTUALS_ROOT_HTTP_URL):
"""Return the URL we can use to download a particular version of
the actually-generated image for this particular GM test.
test_name: name of the test, e.g. 'perlinnoise'
hash_type: string indicating the hash type used to generate hash_digest,
e.g. JSONKEY_HASHTYPE_BITMAP_64BITMD5
hash_digest: the hash digest of the image to retrieve
gm_actuals_root_url: root url where actual images are stored
"""
return '%s/%s/%s/%s.png' % (gm_actuals_root_url, hash_type, test_name,
hash_digest)
def LoadFromString(file_contents):
"""Loads the JSON summary written out by the GM tool.
Returns a dictionary keyed by the values listed as JSONKEY_ constants
above."""
# TODO(epoger): we should add a version number to the JSON file to ensure
# that the writer and reader agree on the schema (raising an exception
# otherwise).
json_dict = json.loads(file_contents)
return json_dict
def LoadFromFile(file_path):
"""Loads the JSON summary written out by the GM tool.
Returns a dictionary keyed by the values listed as JSONKEY_ constants
above."""
file_contents = open(file_path, 'r').read()
return LoadFromString(file_contents)
def WriteToFile(json_dict, file_path):
"""Writes the JSON summary in json_dict out to file_path.
The file is written Unix-style (each line ends with just LF, not CRLF);
see https://code.google.com/p/skia/issues/detail?id=1815 for reasons."""
with io.open(file_path, mode='w', newline='', encoding='utf-8') as outfile:
outfile.write(unicode(json.dumps(json_dict, outfile, sort_keys=True,
indent=2)))
| trevorlinton/skia | gm/gm_json.py | Python | bsd-3-clause | 5,464 |
from .. utils import TranspileTestCase, UnaryOperationTestCase, BinaryOperationTestCase, InplaceOperationTestCase
class FloatTests(TranspileTestCase):
def test_setattr(self):
self.assertCodeExecution("""
x = 3.14159
try:
x.attr = 42
except AttributeError as err:
print(err)
""")
def test_getattr(self):
self.assertCodeExecution("""
x = 3.14159
try:
print(x.attr)
except AttributeError as err:
print(err)
""")
def test_repr(self):
self.assertCodeExecution("""
x = 350000000000000000.0
print(x)
x = 3500.0
print(x)
x = 35.0
print(x)
x = 3.5
print(x)
x = 0.35
print(x)
x = 0.035
print(x)
x = 0.0035
print(x)
x = 0.00035
print(x)
x = 0.000035
print(x)
x = 0.0000035
print(x)
x = 0.00000000000000035
print(x)
x = 0.0
print(x)
x = float('-0.0')
print(x)
x = float('nan')
print(x)
x = float('inf')
print(x)
x = float('-inf')
print(x)
""")
def test_negative_zero_constant(self):
self.assertCodeExecution("""
x = -0.0
y = 0.0
print(x, y)
""")
def test_is_integer(self):
self.assertCodeExecution("""
x = 0.0
print(x.is_integer())
x = 3.14
print(x.is_integer())
x = -1.0
print(x.is_integer())
x = -62.5
print(x.is_integer())
x = float('nan')
print(x.is_integer())
x = float('inf')
print(x.is_integer())
x = float('-inf')
print(x.is_integer())
""")
def test_hex(self):
numbers = [
0e0, -0e0, 10000152587890625e-16, -566e85,
-87336362425182547697e-280, 4.9406564584124654e-324,
'nan', 'inf', '-inf'
]
template = """
x = float('{}')
print(x.hex())
"""
code = '\n'.join(template.format(number) for number in numbers)
self.assertCodeExecution(code)
class UnaryFloatOperationTests(UnaryOperationTestCase, TranspileTestCase):
data_type = 'float'
not_implemented = [
'test_unary_invert',
]
class BinaryFloatOperationTests(BinaryOperationTestCase, TranspileTestCase):
data_type = 'float'
not_implemented = [
'test_add_class',
'test_add_frozenset',
'test_and_class',
'test_and_frozenset',
'test_direct_eq_bytes',
'test_direct_ge_bytes',
'test_direct_gt_bytes',
'test_direct_le_bytes',
'test_direct_lt_bytes',
'test_direct_ne_bytes',
'test_direct_eq_frozenset',
'test_direct_ge_frozenset',
'test_direct_gt_frozenset',
'test_direct_le_frozenset',
'test_direct_lt_frozenset',
'test_direct_ne_frozenset',
'test_eq_class',
'test_eq_frozenset',
'test_ge_class',
'test_ge_frozenset',
'test_gt_class',
'test_gt_frozenset',
'test_le_class',
'test_le_frozenset',
'test_lshift_class',
'test_lshift_frozenset',
'test_lt_class',
'test_lt_frozenset',
'test_modulo_class',
'test_modulo_complex',
'test_modulo_frozenset',
'test_multiply_bytearray',
'test_multiply_bytes',
'test_multiply_class',
'test_multiply_complex',
'test_multiply_frozenset',
'test_multiply_NotImplemented',
'test_multiply_range',
'test_ne_class',
'test_ne_frozenset',
'test_or_class',
'test_or_frozenset',
'test_power_class',
'test_power_complex',
'test_power_float',
'test_power_frozenset',
'test_rshift_class',
'test_rshift_frozenset',
'test_subscr_bool',
'test_subscr_bytearray',
'test_subscr_bytes',
'test_subscr_class',
'test_subscr_complex',
'test_subscr_dict',
'test_subscr_float',
'test_subscr_frozenset',
'test_subscr_int',
'test_subscr_list',
'test_subscr_None',
'test_subscr_NotImplemented',
'test_subscr_range',
'test_subscr_set',
'test_subscr_slice',
'test_subscr_str',
'test_subscr_tuple',
'test_subtract_class',
'test_subtract_complex',
'test_subtract_frozenset',
'test_true_divide_class',
'test_true_divide_complex',
'test_true_divide_frozenset',
'test_xor_class',
'test_xor_frozenset',
]
class InplaceFloatOperationTests(InplaceOperationTestCase, TranspileTestCase):
data_type = 'float'
not_implemented = [
'test_add_class',
'test_add_complex',
'test_add_frozenset',
'test_and_class',
'test_and_frozenset',
'test_floor_divide_class',
'test_floor_divide_complex',
'test_floor_divide_frozenset',
'test_lshift_class',
'test_lshift_frozenset',
'test_modulo_class',
'test_modulo_complex',
'test_modulo_frozenset',
'test_multiply_bytearray',
'test_multiply_bytes',
'test_multiply_class',
'test_multiply_complex',
'test_multiply_frozenset',
'test_multiply_list',
'test_multiply_NotImplemented',
'test_multiply_range',
'test_multiply_str',
'test_multiply_tuple',
'test_or_class',
'test_or_frozenset',
'test_power_class',
'test_power_complex',
'test_power_float',
'test_power_frozenset',
'test_rshift_class',
'test_rshift_frozenset',
'test_subtract_class',
'test_subtract_complex',
'test_subtract_frozenset',
'test_true_divide_class',
'test_true_divide_complex',
'test_true_divide_frozenset',
'test_xor_class',
'test_xor_frozenset',
]
| pombredanne/voc | tests/datatypes/test_float.py | Python | bsd-3-clause | 6,425 |
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand
from twisted.conch.interfaces import ISession
from twisted.internet import reactor
from twisted.python import components
from brigitte.gitserver.server import GitSession, GitConchUser, GitServer
class Command(BaseCommand):
help = 'Starts the GitServer for brigitte.'
def handle(self, *args, **options):
components.registerAdapter(GitSession, GitConchUser, ISession)
reactor.listenTCP(settings.BRIGITTE_SSH_PORT,
GitServer(settings.BRIGITTE_SSH_KEY_PATH))
reactor.run()
| stephrdev/brigitte | brigitte/gitserver/management/commands/run_gitserver.py | Python | bsd-3-clause | 627 |
#!/usr/bin/env python
# Copyright (c) 2013, Carnegie Mellon University
# All rights reserved.
# Authors: Michael Koval <mkoval@cs.cmu.edu>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# - Neither the name of Carnegie Mellon University nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import base, dependency_manager, logger, ik_ranking, planning, perception, simulation, tsr, viz
from named_config import ConfigurationLibrary
from clone import Clone, Cloned
from bind import bind_subclass
import compatibility
| Stefanos19/prpy | src/prpy/__init__.py | Python | bsd-3-clause | 1,854 |
from taskinit import *
from mpl_toolkits.mplot3d import axes3d, Axes3D
import matplotlib.pyplot as plt
from matplotlib.ticker import LinearLocator, FormatStrFormatter
import matplotlib.cm as cm
import numpy as np
from pylab import ion,ioff
# plot3d is released under a BSD 3-Clause License
# See LICENSE for details
# HISTORY:
# 1.0 12Jul2014 Initial version.
# 1.1 04Aug2014 Fixed up time axis problem; correlation selection improved.
# 1.2 15Aug2014 Added uvrange selection.
# 1.3 25Aug2014 Bug fix: removed vmin from plot_surface.
# 1.4 01Oct2015 Added explicit handling for linear feed basis.
# 1.5 24Oct2016 Minor help file fixes, no change to code
#
def plot3d(vis,fid,datacolumn,corr,uvrange,plotall,spw,timecomp,chancomp,clipamp,outpng):
#
# Task plot3d
#
# Quickly inspect data for RFI by plotting time vs frequency vs amplitude
# Christopher A. Hales
#
# Version 1.5 (tested with CASA Version 4.7.0)
# 24 October 2016
casalog.origin('plot3d')
# channel to frequency conversion
tb.open(vis+'/SPECTRAL_WINDOW')
vtble=tb.getcol('CHAN_FREQ')
tb.close
nspw=vtble.shape[1]
# Get mapping between correlation string and number.
# Assume they don't change throughout observation.
# This is clunky...
tb.open(vis+'/DATA_DESCRIPTION')
if plotall:
# Get id of a spw in the data, just grab first one within the first
# scan on the chosen field so that some statistics can be obtained.
# Note: I won't assume that spw specifies data_desc_id in the main table, even
# though in most cases it probably does. Probably overkill given the lack
# of checks done elsewhere in this code...
# later we will gather scan information by looking at
# a single spw and assuming it represents all spw's
ms.open(vis)
ms.msselect({'field':str(fid)})
tempddid=ms.getdata(["DATA_DESC_ID"])['data_desc_id'][0]
ms.close
spw=tb.getcell('SPECTRAL_WINDOW_ID',tempddid)
polid=tb.getcell('POLARIZATION_ID',tempddid)
else:
temptb=tb.query('SPECTRAL_WINDOW_ID='+str(spw))
polid=temptb.getcell('POLARIZATION_ID')
tb.close
tb.open(vis+'/POLARIZATION')
npol=tb.getcell('NUM_CORR',polid)
tb.close
if npol == 2:
if corr == 'RR' or corr == 'XX':
corrID = 0
elif corr == 'LL' or corr == 'YY':
corrID = 1
else:
casalog.post('*** plot3d error: selected correlation doesn\'t exist. Terminating.', 'ERROR')
return
elif npol == 4:
if corr == 'RR' or corr == 'XX':
corrID = 0
elif corr == 'RL' or corr == 'XY':
corrID = 1
elif corr == 'LR' or corr == 'YX':
corrID = 2
elif corr == 'LL' or corr == 'YY':
corrID = 3
else:
casalog.post('*** plot3d error: selected correlation doesn\'t exist. Terminating.', 'ERROR')
return
else:
casalog.post('*** plot3d error: see the code, this is a weird error! Terminating.', 'ERROR')
corrSTR = corr
corr = corrID
# calculate number of effective channels per spw
# I assume that the end channels of each spw have been flagged.
# Force individual channels to remain at either end of spw,
# in order to ensure amplitudes are zero in between
# non-contiguous spw's. This will also ensure that it is
# easier to see spw boundaries in between contiguous spw's.
nchan = int(np.floor((vtble.shape[0]-2)/float(chancomp)))+2
# guard against the user inputting infinite chancomp
if nchan == 2:
nchan = 3
if plotall:
# I don't make any effort to set the amplitude to
# zero in the gaps between spw's (ie if spw's are not
# contiguous) because I will assume that flagging of
# spw edge channels has already taken place. Thus
# there is no need to pad spw's with extra channels
# if they happen to sit next to a gap in frequency
# coverage. For a more general code this would not
# be appropriate.
N=np.zeros(nchan*nspw)
t=0
for i in range(nspw):
# the following copied from single-spw "else" part below
k=0
# 1st channel in spw
N[t] = vtble[k,i]/1e6
t += 1
k += 1
# middle channels
# check if we are in the last block
while k+2*chancomp-1 <= vtble.shape[0]-2:
for h in range(chancomp):
N[t] = N[t] + vtble[k+h,i]
N[t] = N[t]/1e6/chancomp
t += 1
k += chancomp
# for the last block, just combine everything remaining
for h in range(k,vtble.shape[0]-1):
N[t] = N[t] + vtble[h,i]
N[t] = N[t]/1e6/len(range(k,vtble.shape[0]-1))
t += 1
# last channel in spw
N[t] = vtble[vtble.shape[0]-1,i]/1e6
t += 1
## TESTING: get regular channel data to compare
#Q=np.zeros([vtble.shape[0]*nspw])
#t=0
#for i in range(nspw):
# for k in range(vtble.shape[0]):
# Q[t] = vtble[k,i]/1e6
# t += 1
else:
N=np.zeros(nchan)
t=0
k=0
# 1st channel in spw
N[t] = vtble[k,spw]/1e6
t += 1
k += 1
# middle channels
# check if we are in the last block
while k+2*chancomp-1 <= vtble.shape[0]-2:
for h in range(chancomp):
N[t] = N[t] + vtble[k+h,spw]
N[t] = N[t]/1e6/chancomp
t += 1
k += chancomp
# for the last block, just combine everything remaining
for h in range(k,vtble.shape[0]-1):
N[t] = N[t] + vtble[h,spw]
N[t] = N[t]/1e6/len(range(k,vtble.shape[0]-1))
t += 1
# last channel in spw
N[t] = vtble[vtble.shape[0]-1,spw]/1e6
## TESTING: get regular channel data to compare
#Q=np.zeros(vtble.shape[0])
#t=0
#for k in range(vtble.shape[0]):
# Q[t] = vtble[k,spw]/1e6
# t += 1
ms.open(vis)
# assume time is same for each spw
# this is not the most efficient place in the code for this bit, meh
ms.reset()
ms.msselect({'field':str(fid),'spw':str(spw)})
if len(uvrange) > 0:
ms.msselect({'uvdist':uvrange})
# get the raw timestamps
Z=ms.getdata('time')['time']
# get the unique timestamps and nbaselines for each timestamp
# (don't assume the same baselines are available in each time step)
temptime = np.unique(Z)
nbaselines = []
for i in range(len(temptime)):
nbaselines.append(len(Z[Z==temptime[i]]))
# Get scan summary in prep for calculating time steps.
# Note that CASA currently reports all spw's in the
# scan summary, rather than the 1 selected above. meh
scan_summary = ms.getscansummary()
scan_list = []
for scan in scan_summary:
if scan_summary[scan]['0']['FieldId'] == fid:
scan_list.append(int(scan))
scan_list.sort()
# get integration time in minutes; assume it doesn't change in
# any way throughout the observation, ie between spw's, etc
inttime=scan_summary[str(scan_list[0])]['0']['IntegrationTime'] / 60.0
# Calculate number of true time steps per scan.
# In the code below, a dummy timestep will be added at each
# end of each scan to ensure amplitudes are zero in between
# non-contiguous scans. This will also ensure that it is
# easier to see scan boundaries in between contiguous
# scans. The 1st and last timestamp do not contribute to
# the time compression stuff.
# Also calculate effective time steps per scan, so that
# I can call the variable effntime...!
scan_ntime = []
scan_effntime = []
t = 0
for scan in scan_list:
i = 0
bcounter = 0
while bcounter < scan_summary[str(scan)]['0']['nRow']:
bcounter += nbaselines[t]
i += 1
t += 1
scan_ntime.append(i)
tempvar=int(np.floor(i/float(timecomp)))+2
# guard against the user inputting infinite timecomp
if tempvar == 2:
scan_effntime.append(tempvar+1)
else:
scan_effntime.append(tempvar)
ntime = sum(scan_effntime)
# go through each scan and add a dummy timestep before
# and after each one, with time difference equal to
# one ten thousandth of a time step (make this
# small so that a slope doesn't show up in the plot)
intdividefactor=10000.0
M=np.zeros(ntime)
t=0
for d in range(len(scan_list)):
checkfirst=True
k=0
while k+2*timecomp-1 <= scan_ntime[d]-1:
for h in range(timecomp):
if checkfirst:
t+=1
M[t] += temptime[sum(scan_ntime[:d])+k+h]
if checkfirst:
M[t-1] = M[t]-inttime/intdividefactor
checkfirst=False
M[t] = M[t]/timecomp
t += 1
k += timecomp
for h in range(scan_ntime[d]-k):
if checkfirst:
t+=1
M[t] += temptime[sum(scan_ntime[:d])+k+h]
if checkfirst:
M[t-1] = M[t]-inttime/intdividefactor
checkfirst=False
M[t] = M[t]/len(range(scan_ntime[d]-k))
t+=1
M[t] = M[t-1]+inttime/intdividefactor
t+=1
# time is in seconds from zero modified Julian date...not very aesthetic
# subtract off the starting time and convert to minutes
M=(M-M.min())/60
# For each gap between scans, modify the data so it looks like only 5
# integration times have passed. For example, this will make it easier
# to look at your secondary calibrator data. This will of course make
# your time axis look weird...but it can improve 3D plot rendering speed
for i in range(len(scan_list)-1):
i += 1
tempval = M[sum(scan_effntime[0:i])] - M[sum(scan_effntime[0:i])-1]
M[sum(scan_effntime[0:i]):] = M[sum(scan_effntime[0:i]):] - tempval + 5*inttime
# go through each spectral window and extract amplitude data
if plotall:
for i in range(nspw):
ms.reset()
ms.msselect({'field':str(fid),'spw':str(i)})
if len(uvrange) > 0:
ms.msselect({'uvdist':uvrange})
# visibility data (X,Y,Z) where
# X=4 (RR,RL,LR,LL) or (XX,XY,YX,YY)
# Y=number of channels
# Z=number of rows (visibilities/4)
tempdata=ms.getdata(datacolumn)
tempflag=ms.getdata('flag')
# true flag means I should flag it, so switch to ensure good points remain
tempflag=np.invert(tempflag['flag'][corr])
# select amplitude data associated with requested correlation
# and ensure any existing flagged points are set to zero
P1 = np.multiply(abs(tempdata[datacolumn][corr]),tempflag)
# time + baseline compression
P2=np.zeros([P1.shape[0],ntime])
# loop over channels
# yes, this is inefficient, but hopefully easier to understand
for s in range(P1.shape[0]):
t=0
for d in range(len(scan_list)):
checkfirst=True
k=0
while k+2*timecomp-1 <= scan_ntime[d]-1:
if checkfirst:
t+=1
P2[s,t] = max(P1[s,sum(nbaselines[:sum(scan_ntime[:d])+k]):sum(nbaselines[:sum(scan_ntime[:d])+k+timecomp])])
if clipamp>=0:
P2[s,t] = min(clipamp,P2[s,t])
if checkfirst:
P2[s,t-1] = 0.0
checkfirst=False
t += 1
k += timecomp
if checkfirst:
t+=1
tempvar=len(range(scan_ntime[d]-k))
P2[s,t] = max(P1[s,sum(nbaselines[:sum(scan_ntime[:d])+k]):sum(nbaselines[:sum(scan_ntime[:d])+k+tempvar])])
if clipamp>=0:
P2[s,t] = min(clipamp,P2[s,t])
if checkfirst:
P2[s,t-1] = 0.0
checkfirst=False
t+=1
P2[s,t] = 0.0
t+=1
# channel compression
# for clarity, don't combine this step with the
# time+baseline compression above
P3=np.zeros([nchan,ntime])
# 1st channel in spw
t=0
k=0
P3[t] = P2[t]
t += 1
k += 1
# middle channels
while k+2*chancomp-1 <= P2.shape[0]-2:
for h in range(chancomp):
P3[t] = np.maximum(P3[t],P2[k+h])
t += 1
k += chancomp
for h in range(k,P2.shape[0]-1):
P3[t] = np.maximum(P3[t],P2[h])
t += 1
# last channel in spw
P3[t] = P2[P2.shape[0]-1]
if i == 0:
P=P3
else:
P=np.concatenate((P,P3),axis=0)
# not needed because of selection above
# spectral window, with same number of rows as Z above
#sdata=ms.getdata('data_desc_id')
# not needed because of selection above
# field ID
#fdata=ms.getdata('field_id')
else:
# just copy the important steps from above
ms.reset()
ms.msselect({'field':str(fid),'spw':str(spw)})
if len(uvrange) > 0:
ms.msselect({'uvdist':uvrange})
tempdata=ms.getdata(datacolumn)
tempflag=ms.getdata('flag')
tempflag=np.invert(tempflag['flag'][corr])
P1=np.multiply(abs(tempdata[datacolumn][corr]),tempflag)
# time + baseline compression
P2=np.zeros([P1.shape[0],ntime])
# loop over channels
# yes, this is inefficient, but hopefully easier to understand
for s in range(P1.shape[0]):
t=0
for d in range(len(scan_list)):
checkfirst=True
k=0
while k+2*timecomp-1 <= scan_ntime[d]-1:
if checkfirst:
t+=1
P2[s,t] = max(P1[s,sum(nbaselines[:sum(scan_ntime[:d])+k]):sum(nbaselines[:sum(scan_ntime[:d])+k+timecomp])])
if clipamp>=0:
P2[s,t] = min(clipamp,P2[s,t])
if checkfirst:
P2[s,t-1] = 0.0
checkfirst=False
t += 1
k += timecomp
if checkfirst:
t+=1
tempvar=len(range(scan_ntime[d]-k))
P2[s,t] = max(P1[s,sum(nbaselines[:sum(scan_ntime[:d])+k]):sum(nbaselines[:sum(scan_ntime[:d])+k+tempvar])])
if clipamp>=0:
P2[s,t] = min(clipamp,P2[s,t])
if checkfirst:
P2[s,t-1] = 0.0
checkfirst=False
t+=1
P2[s,t] = 0.0
t+=1
# channel compression
# for clarity, don't combine this step with the
# time+baseline compression above
P=np.zeros([nchan,ntime])
# 1st channel in spw
t=0
k=0
P[t] = P2[t]
t += 1
k += 1
# middle channels
while k+2*chancomp-1 <= P2.shape[0]-2:
for h in range(chancomp):
P[t] = np.maximum(P[t],P2[k+h])
t += 1
k += chancomp
for h in range(k,P2.shape[0]-1):
P[t] = np.maximum(P[t],P2[h])
t += 1
# last channel in spw
P[t] = P2[P2.shape[0]-1]
ms.close()
# clear memory, not needed any more
vtble=[]
Z=[]
P1=[]
P2=[]
P3=[]
tempdata=[]
tempflag=[]
# M=time, N=frequency , P=amplitude
M2D,N2D=np.meshgrid(M,N)
ion()
fig = plt.figure()
ax = Axes3D(fig)
ax.set_xlabel('time (mins)')
ax.set_ylabel('frequency (MHz)')
ax.set_zlabel('amplitude')
if len(uvrange) == 0:
uvrange='ALL'
if plotall:
plot_title='field:'+str(fid)+' corr:'+corrSTR+' column:'+datacolumn+' uvrange:'+uvrange
figname=vis.strip('.ms')+'_plot3d_fid'+str(fid)+'_corr'+corrSTR+'_'+datacolumn+\
'_uv'+uvrange+'_t'+str(timecomp)+'_c'+str(chancomp)
else:
plot_title='field:'+str(fid)+' corr:'+corrSTR+' spw:'+str(spw)+' column:'+datacolumn+' uvrange:'+uvrange
figname=vis.strip('.ms')+'_plot3d_fid'+str(fid)+'_corr'+corrSTR+'_spw'+str(spw)+'_'+datacolumn+\
'_uv'+uvrange+'_t'+str(timecomp)+'_c'+str(chancomp)
ax.set_title(plot_title)
#ax.set_zscale('log')
ax.plot_surface(M2D, N2D, P, rstride=1, cstride=1, cmap=cm.jet)
#if isinstance(plotfig,str):
# figname=plotfig
# plotfig=1
if outpng:
fig.savefig(figname)
ioff()
| chrishales/plot3d | task_plot3d.py | Python | bsd-3-clause | 18,140 |
from datetime import datetime
import numpy as np
import pandas as pd
from pytest import raises
from featuretools.primitives import Haversine, IsIn, IsNull, Max, TimeSinceLast
from featuretools.primitives.base import TransformPrimitive
def test_call_agg():
primitive = Max()
# the assert is run twice on purpose
for _ in range(2):
assert 5 == primitive(range(6))
def test_call_trans():
primitive = IsNull()
for _ in range(2):
assert pd.Series([False] * 6).equals(primitive(range(6)))
def test_uses_calc_time():
primitive = TimeSinceLast()
primitive_h = TimeSinceLast(unit="hours")
datetimes = pd.Series([datetime(2015, 6, 6), datetime(2015, 6, 7)])
answer = 86400.0
answer_h = 24.0
assert answer == primitive(datetimes, time=datetime(2015, 6, 8))
assert answer_h == primitive_h(datetimes, time=datetime(2015, 6, 8))
def test_call_multiple_args():
primitive = Haversine()
data1 = [(42.4, -71.1), (40.0, -122.4)]
data2 = [(40.0, -122.4), (41.2, -96.75)]
answer = [2631.231, 1343.289]
for _ in range(2):
assert np.round(primitive(data1, data2), 3).tolist() == answer
def test_get_function_called_once():
class TestPrimitive(TransformPrimitive):
def __init__(self):
self.get_function_call_count = 0
def get_function(self):
self.get_function_call_count += 1
def test(x):
return x
return test
primitive = TestPrimitive()
for _ in range(2):
primitive(range(6))
assert primitive.get_function_call_count == 1
def test_multiple_arg_string():
class Primitive(TransformPrimitive):
def __init__(self, bool=True, int=0, float=None):
self.bool = bool
self.int = int
self.float = float
primitive = Primitive(bool=True, int=4, float=.1)
string = primitive.get_args_string()
assert string == ', int=4, float=0.1'
def test_single_args_string():
assert IsIn([1, 2, 3]).get_args_string() == ', list_of_outputs=[1, 2, 3]'
def test_args_string_default():
assert IsIn().get_args_string() == ''
def test_args_string_mixed():
class Primitive(TransformPrimitive):
def __init__(self, bool=True, int=0, float=None):
self.bool = bool
self.int = int
self.float = float
primitive = Primitive(bool=False, int=0)
string = primitive.get_args_string()
assert string == ', bool=False'
def test_args_string_undefined():
string = Max().get_args_string()
assert string == ''
def test_args_string_error():
class Primitive(TransformPrimitive):
def __init__(self, bool=True, int=0, float=None):
pass
with raises(AssertionError, match='must be attribute'):
Primitive(bool=True, int=4, float=.1).get_args_string()
| Featuretools/featuretools | featuretools/tests/primitive_tests/test_primitive_base.py | Python | bsd-3-clause | 2,871 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('profiles', '0003_auto_20150115_1939'),
]
operations = [
migrations.AddField(
model_name='calendalluser',
name='location',
field=models.CharField(blank=True, max_length=30, verbose_name='User location'),
preserve_default=True,
),
migrations.AddField(
model_name='calendalluser',
name='url',
field=models.URLField(blank=True, verbose_name='User homepage'),
preserve_default=True,
),
]
| calendall/calendall | calendall/profiles/migrations/0004_auto_20150117_1017.py | Python | bsd-3-clause | 703 |
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
import json
import mock
import time
from django.conf import settings
from django.core import mail
from olympia import amo
from olympia.abuse.models import AbuseReport
from olympia.access.models import Group, GroupUser
from olympia.activity.models import ActivityLog
from olympia.amo.tests import TestCase
from olympia.amo.tests import (
addon_factory, file_factory, user_factory, version_factory)
from olympia.addons.models import (
Addon, AddonApprovalsCounter, AddonReviewerFlags, AddonUser)
from olympia.files.models import FileValidation
from olympia.ratings.models import Rating
from olympia.reviewers.models import Whiteboard
from olympia.versions.models import (
Version, version_uploaded)
from olympia.files.models import File, WebextPermission
from olympia.reviewers.models import (
AutoApprovalNotEnoughFilesError, AutoApprovalNoValidationResultError,
AutoApprovalSummary, RereviewQueueTheme, ReviewerScore,
ReviewerSubscription, send_notifications, set_reviewing_cache,
ViewFullReviewQueue, ViewPendingQueue, ViewUnlistedAllList)
from olympia.users.models import UserProfile
def create_search_ext(name, version_str, addon_status, file_status,
channel):
addon, created_ = Addon.objects.get_or_create(
name__localized_string=name,
defaults={'type': amo.ADDON_SEARCH, 'name': name})
version, created_ = Version.objects.get_or_create(
addon=addon, version=version_str, defaults={'channel': channel})
File.objects.create(version=version, filename=u"%s.xpi" % name,
platform=amo.PLATFORM_ALL.id, status=file_status)
# Update status *after* there are files:
addon = Addon.objects.get(pk=addon.id)
addon.update(status=addon_status)
return addon
class TestQueue(TestCase):
"""Tests common attributes and coercions that each view must support."""
__test__ = False # this is an abstract test case
def test_latest_version(self):
addon = self.new_addon()
v1 = addon.find_latest_version(self.channel)
v1.update(created=self.days_ago(2))
v1.all_files[0].update(status=amo.STATUS_PUBLIC)
version_factory(addon=addon, version='2.0', created=self.days_ago(1),
channel=self.channel,
file_kw={'status': amo.STATUS_PUBLIC})
version_factory(addon=addon, version='3.0', created=self.days_ago(0),
channel=self.channel,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
row = self.Queue.objects.get()
assert row.latest_version == '3.0'
def test_addons_disabled_by_user_are_hidden(self):
self.new_addon(version=u'0.1').update(disabled_by_user=True)
assert list(self.Queue.objects.all()) == []
def test_addons_disabled_by_admin_are_hidden(self):
self.new_addon(version=u'0.1').update(status=amo.STATUS_DISABLED)
assert list(self.Queue.objects.all()) == []
def test_reviewed_files_are_hidden(self):
self.new_addon(name='Unreviewed')
addon_factory(name='Already Reviewed')
assert sorted(q.addon_name for q in self.Queue.objects.all()) == (
['Unreviewed'])
def test_search_extensions(self):
self.new_search_ext('Search Tool', '0.1')
row = self.Queue.objects.get()
assert row.addon_name == u'Search Tool'
assert row.addon_type_id == amo.ADDON_SEARCH
def test_count_all(self):
# Create two new addons and give each another version.
version_factory(addon=self.new_addon(), version=u'2.0',
channel=self.channel)
version_factory(addon=self.new_addon(), version=u'2.0',
channel=self.channel)
assert self.Queue.objects.all().count() == 2
class TestPendingQueue(TestQueue):
__test__ = True
Queue = ViewPendingQueue
channel = amo.RELEASE_CHANNEL_LISTED
def new_addon(self, name=u'Pending', version=u'1.0'):
"""Creates an approved addon with two listed versions, one approved,
the second awaiting review."""
addon = addon_factory(
name=name,
version_kw={'version': u'0.0.1', 'channel': self.channel,
'created': self.days_ago(1)})
version_factory(
addon=addon, version=version, channel=self.channel,
file_kw={'status': amo.STATUS_AWAITING_REVIEW,
'is_restart_required': False})
return addon
def new_search_ext(self, name, version, **kw):
return create_search_ext(name, version,
amo.STATUS_PUBLIC, amo.STATUS_AWAITING_REVIEW,
channel=self.channel, **kw)
def test_waiting_time(self):
self.new_addon()
Version.objects.update(created=datetime.utcnow())
row = self.Queue.objects.all()[0]
assert row.waiting_time_days == 0
# Time zone will be off, hard to test this.
assert row.waiting_time_hours is not None
def test_flags_needs_admin_code_review(self):
AddonReviewerFlags.objects.create(
addon=self.new_addon(), needs_admin_code_review=True)
q = self.Queue.objects.get()
assert q.flags == [
('needs-admin-code-review', 'Needs Admin Code Review')]
def test_flags_info_request(self):
self.new_addon().find_latest_version(self.channel).update(
has_info_request=True)
q = self.Queue.objects.get()
assert q.flags == [('info', 'More Information Requested')]
def test_flags_reviewer_comment(self):
self.new_addon().find_latest_version(self.channel).update(
has_reviewer_comment=True)
q = self.Queue.objects.get()
assert q.flags == [('reviewer', 'Contains Reviewer Comment')]
def test_flags_jetpack(self):
self.new_addon().find_latest_version(self.channel).all_files[0].update(
jetpack_version='1.8')
q = self.Queue.objects.get()
assert q.flags == [('jetpack', 'Jetpack Add-on')]
def test_flags_is_restart_required(self):
self.new_addon().find_latest_version(self.channel).all_files[0].update(
is_restart_required=True)
q = self.Queue.objects.get()
assert q.flags == [('is_restart_required', 'Requires Restart')]
def test_flags_sources_provided(self):
self.new_addon().find_latest_version(self.channel).update(
source='/some/source/file')
q = self.Queue.objects.get()
assert q.flags == [('sources-provided', 'Sources provided')]
def test_flags_webextension(self):
self.new_addon().find_latest_version(self.channel).all_files[0].update(
is_webextension=True)
queue = self.Queue.objects.get()
assert queue.flags == [('webextension', 'WebExtension')]
def test_no_flags(self):
self.new_addon()
q = self.Queue.objects.get()
assert q.flags == []
class TestFullReviewQueue(TestQueue):
__test__ = True
Queue = ViewFullReviewQueue
channel = amo.RELEASE_CHANNEL_LISTED
def new_addon(self, name=u'Nominated', version=u'1.0',
addon_status=amo.STATUS_NOMINATED,
file_status=amo.STATUS_AWAITING_REVIEW):
addon = addon_factory(
name=name, status=addon_status,
version_kw={'version': version, 'channel': self.channel},
file_kw={'status': file_status})
return addon
def new_search_ext(self, name, version, **kw):
return create_search_ext(name, version,
amo.STATUS_NOMINATED,
amo.STATUS_AWAITING_REVIEW,
channel=self.channel, **kw)
def test_waiting_time(self):
self.new_addon()
Version.objects.update(nomination=datetime.utcnow())
row = self.Queue.objects.all()[0]
assert row.waiting_time_days == 0
# Time zone will be off, hard to test this.
assert row.waiting_time_hours is not None
class TestUnlistedAllList(TestCase):
Queue = ViewUnlistedAllList
channel = amo.RELEASE_CHANNEL_UNLISTED
fixtures = ['base/users']
def new_addon(self, name=u'Unlisted', version=u'1.0',
addon_status=amo.STATUS_NULL,
file_status=amo.STATUS_PUBLIC):
addon = addon_factory(
name=name, status=addon_status,
version_kw={'version': version, 'channel': self.channel},
file_kw={'status': file_status})
return addon
def test_all_addons_are_in_q(self):
self.new_addon('Public', addon_status=amo.STATUS_PUBLIC,
file_status=amo.STATUS_PUBLIC)
self.new_addon('Nominated', addon_status=amo.STATUS_NOMINATED,
file_status=amo.STATUS_AWAITING_REVIEW)
self.new_addon('Deleted', addon_status=amo.STATUS_PUBLIC,
file_status=amo.STATUS_PUBLIC).delete()
assert sorted(q.addon_name for q in self.Queue.objects.all()) == (
['Deleted', 'Nominated', 'Public'])
def test_authors(self):
addon = self.new_addon()
bert = user_factory(username='bert')
ernie = user_factory(username='ernie')
AddonUser.objects.create(addon=addon, user=bert)
AddonUser.objects.create(addon=addon, user=ernie)
row = self.Queue.objects.all()[0]
self.assertSetEqual(set(row.authors),
{(ernie.id, 'ernie'), (bert.id, 'bert')})
def test_last_reviewed_version(self):
today = datetime.today().date()
addon = self.new_addon(version='1.0')
v2 = version_factory(addon=addon, version='2.0', channel=self.channel)
log = ActivityLog.create(amo.LOG.APPROVE_VERSION, v2, v2.addon,
user=UserProfile.objects.get(pk=999))
version_factory(addon=addon, version='3.0', channel=self.channel)
row = self.Queue.objects.all()[0]
assert row.review_date == today
assert row.review_version_num == '2.0'
assert row.review_log_id == log.id
def test_no_developer_actions(self):
addon = self.new_addon(version='1.0')
ActivityLog.create(amo.LOG.ADD_VERSION, addon.latest_unlisted_version,
addon, user=UserProfile.objects.get(pk=999))
row = self.Queue.objects.all()[0]
assert row.review_version_num is None
ver2 = version_factory(version='2.0', addon=addon,
channel=self.channel)
ActivityLog.create(amo.LOG.APPROVE_VERSION, ver2, addon,
user=UserProfile.objects.get(pk=999))
row = self.Queue.objects.all()[0]
assert row.review_version_num == '2.0'
ver3 = version_factory(version='3.0', addon=addon,
channel=self.channel)
ActivityLog.create(amo.LOG.EDIT_VERSION, ver3, addon,
user=UserProfile.objects.get(pk=999))
row = self.Queue.objects.all()[0]
# v2.0 is still the last reviewed version.
assert row.review_version_num == '2.0'
def test_no_automatic_reviews(self):
ver = self.new_addon(
name='addon789', version='1.0').latest_unlisted_version
ActivityLog.create(
amo.LOG.APPROVE_VERSION, ver, ver.addon,
user=UserProfile.objects.get(pk=settings.TASK_USER_ID))
row = self.Queue.objects.all()[0]
assert row.review_version_num is None
def test_latest_version(self):
addon = addon_factory(
version_kw={'version': u'0.1', 'channel': self.channel,
'created': self.days_ago(2)},
file_kw={'created': self.days_ago(2)})
version_factory(
addon=addon, version=u'0.2', channel=self.channel,
created=self.days_ago(1), file_kw={'created': self.days_ago(1)})
version_factory(
addon=addon, version=u'0.3', channel=self.channel)
row = self.Queue.objects.get()
assert row.latest_version == '0.3'
def test_addons_disabled_by_user_are_hidden(self):
self.new_addon().update(disabled_by_user=True)
assert list(self.Queue.objects.all()) == []
def test_addons_disabled_by_admin_are_hidden(self):
self.new_addon(version=u'0.1').update(status=amo.STATUS_DISABLED)
assert list(self.Queue.objects.all()) == []
def test_count_all(self):
addon1 = self.new_addon()
version_factory(addon=addon1, version=u'0.2')
addon2 = self.new_addon()
version_factory(addon=addon2, version=u'0.2')
assert self.Queue.objects.all().count() == 2
def test_mixed_listed(self):
unlisted_listed = addon_factory(
status=amo.STATUS_NULL, name=u'UnlistedListed',
version_kw={'version': u'0.1',
'channel': amo.RELEASE_CHANNEL_UNLISTED},
file_kw={'status': amo.STATUS_PUBLIC})
version_factory(addon=unlisted_listed, version=u'0.2',
channel=amo.RELEASE_CHANNEL_LISTED,
file_kw={'status': amo.STATUS_PUBLIC})
listed_unlisted = addon_factory(
status=amo.STATUS_NULL, name=u'ListedUnlisted',
version_kw={'version': u'0.1',
'channel': amo.RELEASE_CHANNEL_LISTED},
file_kw={'status': amo.STATUS_PUBLIC})
version_factory(addon=listed_unlisted, version=u'0.2',
channel=amo.RELEASE_CHANNEL_UNLISTED,
file_kw={'status': amo.STATUS_PUBLIC})
just_unlisted = addon_factory(
status=amo.STATUS_NULL, name=u'JustUnlisted',
version_kw={'version': u'0.1',
'channel': amo.RELEASE_CHANNEL_UNLISTED},
file_kw={'status': amo.STATUS_PUBLIC})
version_factory(addon=just_unlisted, version=u'0.2',
channel=amo.RELEASE_CHANNEL_UNLISTED,
file_kw={'status': amo.STATUS_PUBLIC})
just_listed = addon_factory(
status=amo.STATUS_NULL, name=u'JustListed',
version_kw={'version': u'0.1',
'channel': amo.RELEASE_CHANNEL_LISTED},
file_kw={'status': amo.STATUS_PUBLIC})
version_factory(addon=just_listed, version=u'0.2',
channel=amo.RELEASE_CHANNEL_LISTED,
file_kw={'status': amo.STATUS_PUBLIC})
assert self.Queue.objects.all().count() == 3
assert [addon.addon_name for addon in self.Queue.objects.all()] == [
'UnlistedListed', 'ListedUnlisted', 'JustUnlisted']
assert ([addon.latest_version for addon in self.Queue.objects.all()] ==
['0.1', '0.2', '0.2'])
class TestReviewerSubscription(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestReviewerSubscription, self).setUp()
self.addon = Addon.objects.get(pk=3615)
self.version = self.addon.current_version
self.user_one = UserProfile.objects.get(pk=55021)
self.user_two = UserProfile.objects.get(pk=999)
self.reviewer_group = Group.objects.create(
name='reviewers', rules='Addons:Review')
for user in [self.user_one, self.user_two]:
ReviewerSubscription.objects.create(addon=self.addon, user=user)
GroupUser.objects.create(group=self.reviewer_group, user=user)
def test_email(self):
es = ReviewerSubscription.objects.get(user=self.user_one)
es.send_notification(self.version)
assert len(mail.outbox) == 1
assert mail.outbox[0].to == [u'del@icio.us']
assert mail.outbox[0].subject == (
'Mozilla Add-ons: Delicious Bookmarks Updated')
def test_notifications(self):
send_notifications(sender=self.version)
assert len(mail.outbox) == 2
emails = sorted([o.to for o in mail.outbox])
assert emails == [[u'del@icio.us'], [u'regular@mozilla.com']]
def test_notifications_clean(self):
send_notifications(Version, self.version)
assert ReviewerSubscription.objects.count() == 0
mail.outbox = []
send_notifications(Version, self.version)
assert len(mail.outbox) == 0
def test_notifications_beta(self):
self.version.all_files[0].update(status=amo.STATUS_BETA)
version_uploaded.send(sender=self.version)
assert len(mail.outbox) == 0
def test_signal_edit(self):
self.version.save()
assert len(mail.outbox) == 0
def test_signal_create(self):
v = Version.objects.create(addon=self.addon)
version_uploaded.send(sender=v)
assert len(mail.outbox) == 2
assert mail.outbox[0].subject == (
'Mozilla Add-ons: Delicious Bookmarks Updated')
def test_signal_create_twice(self):
v = Version.objects.create(addon=self.addon)
version_uploaded.send(sender=v)
mail.outbox = []
v = Version.objects.create(addon=self.addon)
version_uploaded.send(sender=v)
assert len(mail.outbox) == 0
def test_no_email_for_ex_reviewers(self):
self.user_one.delete()
# Remove user_two from reviewers.
GroupUser.objects.get(
group=self.reviewer_group, user=self.user_two).delete()
send_notifications(sender=self.version)
assert len(mail.outbox) == 0
def test_no_email_address_for_reviewer(self):
self.user_one.update(email=None)
send_notifications(sender=self.version)
assert len(mail.outbox) == 1
class TestReviewerScore(TestCase):
fixtures = ['base/users']
def setUp(self):
super(TestReviewerScore, self).setUp()
self.addon = amo.tests.addon_factory(status=amo.STATUS_NOMINATED)
self.user = UserProfile.objects.get(email='reviewer@mozilla.com')
def _give_points(self, user=None, addon=None, status=None):
user = user or self.user
addon = addon or self.addon
ReviewerScore.award_points(
user, addon, status or addon.status, version=addon.current_version)
def check_event(self, type, status, event, **kwargs):
self.addon.type = type
assert ReviewerScore.get_event(self.addon, status, **kwargs) == event
def test_events_addons(self):
types = {
amo.ADDON_ANY: None,
amo.ADDON_EXTENSION: 'ADDON',
amo.ADDON_THEME: 'THEME',
amo.ADDON_DICT: 'DICT',
amo.ADDON_SEARCH: 'SEARCH',
amo.ADDON_LPAPP: 'LP',
amo.ADDON_LPADDON: 'LP',
amo.ADDON_PLUGIN: 'ADDON',
amo.ADDON_API: 'ADDON',
amo.ADDON_PERSONA: 'PERSONA',
}
statuses = {
amo.STATUS_NULL: None,
amo.STATUS_PENDING: None,
amo.STATUS_NOMINATED: 'FULL',
amo.STATUS_PUBLIC: 'UPDATE',
amo.STATUS_DISABLED: None,
amo.STATUS_BETA: None,
amo.STATUS_DELETED: None,
amo.STATUS_REJECTED: None,
amo.STATUS_REVIEW_PENDING: None,
}
for tk, tv in types.items():
for sk, sv in statuses.items():
try:
event = getattr(amo, 'REVIEWED_%s_%s' % (tv, sv))
except AttributeError:
try:
event = getattr(amo, 'REVIEWED_%s' % tv)
except AttributeError:
event = None
self.check_event(tk, sk, event)
def test_events_post_review(self):
self.addon.update(status=amo.STATUS_PUBLIC)
base_args = (self.addon, self.addon.status)
# No version.
assert ReviewerScore.get_event(
*base_args, version=None,
post_review=True) == amo.REVIEWED_EXTENSION_LOW_RISK
# No autoapprovalsummary.
assert ReviewerScore.get_event(
*base_args, version=self.addon.current_version,
post_review=True) == amo.REVIEWED_EXTENSION_LOW_RISK
# Now with a summary... low risk.
summary = AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED,
weight=-10)
assert ReviewerScore.get_event(
*base_args, version=self.addon.current_version,
post_review=True) is amo.REVIEWED_EXTENSION_LOW_RISK
# Medium risk.
summary.update(weight=21)
assert ReviewerScore.get_event(
*base_args, version=self.addon.current_version,
post_review=True) is amo.REVIEWED_EXTENSION_MEDIUM_RISK
# High risk.
summary.update(weight=101)
assert ReviewerScore.get_event(
*base_args, version=self.addon.current_version,
post_review=True) is amo.REVIEWED_EXTENSION_HIGH_RISK
# Highest risk.
summary.update(weight=151)
assert ReviewerScore.get_event(
*base_args, version=self.addon.current_version,
post_review=True) is amo.REVIEWED_EXTENSION_HIGHEST_RISK
# Highest risk again.
summary.update(weight=65535)
assert ReviewerScore.get_event(
*base_args, version=self.addon.current_version,
post_review=True) is amo.REVIEWED_EXTENSION_HIGHEST_RISK
# Content review is always the same.
assert ReviewerScore.get_event(
*base_args, version=self.addon.current_version, post_review=True,
content_review=True) == amo.REVIEWED_CONTENT_REVIEW
def test_award_points(self):
self._give_points()
assert ReviewerScore.objects.all()[0].score == (
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL])
def test_award_points_with_extra_note(self):
ReviewerScore.award_points(
self.user, self.addon, self.addon.status, extra_note=u'ÔMG!')
reviewer_score = ReviewerScore.objects.all()[0]
assert reviewer_score.note_key == amo.REVIEWED_ADDON_FULL
assert reviewer_score.score == (
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL])
assert reviewer_score.note == u'ÔMG!'
def test_award_points_bonus(self):
user2 = UserProfile.objects.get(email='admin@mozilla.com')
bonus_days = 2
days = amo.REVIEWED_OVERDUE_LIMIT + bonus_days
bonus_addon = addon_factory(
status=amo.STATUS_NOMINATED,
file_kw={'status': amo.STATUS_AWAITING_REVIEW})
bonus_addon.current_version.update(
nomination=(datetime.now() - timedelta(days=days, minutes=5))
)
self._give_points(user2, bonus_addon, amo.STATUS_NOMINATED)
score = ReviewerScore.objects.get(user=user2)
expected = (amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL] +
(amo.REVIEWED_OVERDUE_BONUS * bonus_days))
assert score.score == expected
def test_award_points_no_bonus_for_content_review(self):
self.addon.update(status=amo.STATUS_PUBLIC)
self.addon.current_version.update(nomination=self.days_ago(28))
AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED,
weight=100)
ReviewerScore.award_points(
self.user, self.addon, self.addon.status,
version=self.addon.current_version,
post_review=False, content_review=True)
score = ReviewerScore.objects.get(user=self.user)
assert score.score == amo.REVIEWED_SCORES[amo.REVIEWED_CONTENT_REVIEW]
def test_award_points_no_bonus_for_post_review(self):
self.addon.update(status=amo.STATUS_PUBLIC)
self.addon.current_version.update(nomination=self.days_ago(29))
AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED,
weight=101)
ReviewerScore.award_points(
self.user, self.addon, self.addon.status,
version=self.addon.current_version,
post_review=True, content_review=False)
score = ReviewerScore.objects.get(user=self.user)
assert score.score == amo.REVIEWED_SCORES[
amo.REVIEWED_EXTENSION_HIGH_RISK]
def test_award_moderation_points(self):
ReviewerScore.award_moderation_points(self.user, self.addon, 1)
score = ReviewerScore.objects.all()[0]
assert score.score == (
amo.REVIEWED_SCORES.get(amo.REVIEWED_ADDON_REVIEW))
assert score.note_key == amo.REVIEWED_ADDON_REVIEW
def test_get_total(self):
user2 = UserProfile.objects.get(email='admin@mozilla.com')
self._give_points()
self._give_points(status=amo.STATUS_PUBLIC)
self._give_points(user=user2, status=amo.STATUS_NOMINATED)
assert ReviewerScore.get_total(self.user) == (
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL] +
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_UPDATE])
assert ReviewerScore.get_total(user2) == (
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL])
def test_get_recent(self):
user2 = UserProfile.objects.get(email='admin@mozilla.com')
self._give_points()
time.sleep(1) # Wait 1 sec so ordering by created is checked.
self._give_points(status=amo.STATUS_PUBLIC)
self._give_points(user=user2)
scores = ReviewerScore.get_recent(self.user)
assert len(scores) == 2
assert scores[0].score == (
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_UPDATE])
assert scores[1].score == (
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL])
def test_get_leaderboards(self):
user2 = UserProfile.objects.get(email='persona_reviewer@mozilla.com')
self._give_points()
self._give_points(status=amo.STATUS_PUBLIC)
self._give_points(user=user2, status=amo.STATUS_NOMINATED)
leaders = ReviewerScore.get_leaderboards(self.user)
assert leaders['user_rank'] == 1
assert leaders['leader_near'] == []
assert leaders['leader_top'][0]['rank'] == 1
assert leaders['leader_top'][0]['user_id'] == self.user.id
assert leaders['leader_top'][0]['total'] == (
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL] +
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_UPDATE])
assert leaders['leader_top'][1]['rank'] == 2
assert leaders['leader_top'][1]['user_id'] == user2.id
assert leaders['leader_top'][1]['total'] == (
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL])
self._give_points(
user=user2, addon=amo.tests.addon_factory(type=amo.ADDON_PERSONA))
leaders = ReviewerScore.get_leaderboards(
self.user, addon_type=amo.ADDON_PERSONA)
assert len(leaders['leader_top']) == 1
assert leaders['leader_top'][0]['user_id'] == user2.id
def test_only_active_reviewers_in_leaderboards(self):
user2 = UserProfile.objects.create(username='former-reviewer')
self._give_points()
self._give_points(status=amo.STATUS_PUBLIC)
self._give_points(user=user2, status=amo.STATUS_NOMINATED)
leaders = ReviewerScore.get_leaderboards(self.user)
assert leaders['user_rank'] == 1
assert leaders['leader_near'] == []
assert leaders['leader_top'][0]['user_id'] == self.user.id
assert len(leaders['leader_top']) == 1 # Only the reviewer is here.
assert user2.id not in [l['user_id'] for l in leaders['leader_top']], (
'Unexpected non-reviewer user found in leaderboards.')
def test_no_admins_or_staff_in_leaderboards(self):
user2 = UserProfile.objects.get(email='admin@mozilla.com')
self._give_points()
self._give_points(status=amo.STATUS_PUBLIC)
self._give_points(user=user2, status=amo.STATUS_NOMINATED)
leaders = ReviewerScore.get_leaderboards(self.user)
assert leaders['user_rank'] == 1
assert leaders['leader_near'] == []
assert leaders['leader_top'][0]['user_id'] == self.user.id
assert len(leaders['leader_top']) == 1 # Only the reviewer is here.
assert user2.id not in [l['user_id'] for l in leaders['leader_top']], (
'Unexpected admin user found in leaderboards.')
def test_get_leaderboards_last(self):
users = []
for i in range(6):
user = UserProfile.objects.create(username='user-%s' % i)
GroupUser.objects.create(group_id=50002, user=user)
users.append(user)
last_user = users.pop(len(users) - 1)
for u in users:
self._give_points(user=u)
# Last user gets lower points by reviewing a persona.
addon = self.addon
addon.type = amo.ADDON_PERSONA
self._give_points(user=last_user, addon=addon)
leaders = ReviewerScore.get_leaderboards(last_user)
assert leaders['user_rank'] == 6
assert len(leaders['leader_top']) == 3
assert len(leaders['leader_near']) == 2
def test_leaderboard_score_when_in_multiple_reviewer_groups(self):
group_reviewers = Group.objects.create(
name='Reviewers: Addons', rules='Addons:Review')
group_content_reviewers = Group.objects.create(
name='Reviewers: Content', rules='Addons:ContentReview')
GroupUser.objects.create(group=group_reviewers, user=self.user)
GroupUser.objects.create(group=group_content_reviewers, user=self.user)
AutoApprovalSummary.objects.create(
version=self.addon.current_version, verdict=amo.AUTO_APPROVED,
weight=101)
ReviewerScore.award_points(
self.user, self.addon, self.addon.status,
version=self.addon.current_version,
post_review=True, content_review=False)
assert ReviewerScore._leaderboard_list() == [(
self.user.id, self.user.name, amo.REVIEWED_SCORES[
amo.REVIEWED_EXTENSION_HIGH_RISK])]
def test_all_users_by_score(self):
user2 = UserProfile.objects.create(
username='otherreviewer', email='otherreviewer@mozilla.com')
self.grant_permission(
user2, 'Personas:Review', name='Reviewers: Themes')
amo.REVIEWED_LEVELS[0]['points'] = 180
self._give_points()
self._give_points(status=amo.STATUS_PUBLIC)
self._give_points(user=user2, status=amo.STATUS_NOMINATED)
users = ReviewerScore.all_users_by_score()
assert len(users) == 2
# First user.
assert users[0]['total'] == 200
assert users[0]['user_id'] == self.user.id
assert users[0]['level'] == amo.REVIEWED_LEVELS[0]['name']
# Second user.
assert users[1]['total'] == 120
assert users[1]['user_id'] == user2.id
assert users[1]['level'] == ''
def test_caching(self):
self._give_points()
with self.assertNumQueries(1):
ReviewerScore.get_total(self.user)
with self.assertNumQueries(0):
ReviewerScore.get_total(self.user)
with self.assertNumQueries(1):
ReviewerScore.get_recent(self.user)
with self.assertNumQueries(0):
ReviewerScore.get_recent(self.user)
with self.assertNumQueries(3):
ReviewerScore.get_leaderboards(self.user)
with self.assertNumQueries(0):
ReviewerScore.get_leaderboards(self.user)
with self.assertNumQueries(1):
ReviewerScore.get_breakdown(self.user)
with self.assertNumQueries(0):
ReviewerScore.get_breakdown(self.user)
# New points invalidates all caches.
self._give_points()
with self.assertNumQueries(1):
ReviewerScore.get_total(self.user)
with self.assertNumQueries(1):
ReviewerScore.get_recent(self.user)
with self.assertNumQueries(3):
ReviewerScore.get_leaderboards(self.user)
with self.assertNumQueries(1):
ReviewerScore.get_breakdown(self.user)
class TestRereviewQueueTheme(TestCase):
def test_manager_soft_delete_addons(self):
"""Test manager excludes soft delete add-ons."""
# Normal RQT object.
RereviewQueueTheme.objects.create(
theme=addon_factory(type=amo.ADDON_PERSONA).persona, header='',
footer='')
# Deleted add-on RQT object.
addon = addon_factory(type=amo.ADDON_PERSONA)
RereviewQueueTheme.objects.create(
theme=addon.persona, header='', footer='')
addon.delete()
assert RereviewQueueTheme.objects.count() == 1
assert RereviewQueueTheme.unfiltered.count() == 2
def test_footer_path_without_footer(self):
rqt = RereviewQueueTheme.objects.create(
theme=addon_factory(type=amo.ADDON_PERSONA).persona, header='',
footer='')
assert rqt.footer_path == ''
def test_footer_url_without_footer(self):
rqt = RereviewQueueTheme.objects.create(
theme=addon_factory(type=amo.ADDON_PERSONA).persona, header='',
footer='')
assert rqt.footer_url == ''
def test_filter_for_many_to_many(self):
# Check https://bugzilla.mozilla.org/show_bug.cgi?id=1142035.
addon = addon_factory(type=amo.ADDON_PERSONA)
rqt = RereviewQueueTheme.objects.create(theme=addon.persona)
assert addon.persona.rereviewqueuetheme_set.get() == rqt
# Delete the addon: it shouldn't be listed anymore.
addon.update(status=amo.STATUS_DELETED)
assert addon.persona.rereviewqueuetheme_set.all().count() == 0
class TestAutoApprovalSummary(TestCase):
def setUp(self):
self.addon = addon_factory(
average_daily_users=666, version_kw={'version': '1.0'})
self.current_file_validation = FileValidation.objects.create(
file=self.addon.current_version.all_files[0], validation=u'{}')
self.version = version_factory(
addon=self.addon, version='1.1', file_kw={
'status': amo.STATUS_AWAITING_REVIEW,
'is_webextension': True})
self.file = self.version.all_files[0]
self.file_validation = FileValidation.objects.create(
file=self.version.all_files[0], validation=u'{}')
AddonApprovalsCounter.objects.create(addon=self.addon, counter=1)
def test_negative_weight(self):
summary = AutoApprovalSummary.objects.create(
version=self.version, weight=-300)
summary = AutoApprovalSummary.objects.get(pk=summary.pk)
assert summary.weight == -300
def test_calculate_weight(self):
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
expected_result = {
'abuse_reports': 0,
'admin_code_review': 0,
'average_daily_users': 0,
'negative_ratings': 0,
'reputation': 0,
'past_rejection_history': 0,
'uses_custom_csp': 0,
'uses_eval_or_document_write': 0,
'uses_implied_eval': 0,
'uses_innerhtml': 0,
'uses_native_messaging': 0,
'size_of_code_changes': 0,
'uses_remote_scripts': 0,
'uses_unknown_minified_code': 0,
'violates_mozilla_conditions': 0,
}
assert weight_info == expected_result
def test_calculate_weight_admin_code_review(self):
AddonReviewerFlags.objects.create(
addon=self.addon, needs_admin_code_review=True)
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 100
assert weight_info['admin_code_review'] == 100
def test_calculate_weight_abuse_reports(self):
# Extra abuse report for a different add-on, does not count.
AbuseReport.objects.create(addon=addon_factory())
# Extra abuse report for a different user, does not count.
AbuseReport.objects.create(user=user_factory())
# Extra old abuse report, does not count either.
old_report = AbuseReport.objects.create(addon=self.addon)
old_report.update(created=self.days_ago(370))
# Recent abuse reports.
AbuseReport.objects.create(addon=self.addon)
AbuseReport.objects.create(addon=self.addon)
# Recent abuse report for one of the developers of the add-on.
author = user_factory()
AddonUser.objects.create(addon=self.addon, user=author)
AbuseReport.objects.create(user=author)
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 30
assert weight_info['abuse_reports'] == 30
# Should be capped at 100.
for i in range(0, 10):
AbuseReport.objects.create(addon=self.addon)
weight_info = summary.calculate_weight()
assert summary.weight == 100
assert weight_info['abuse_reports'] == 100
def test_calculate_weight_abuse_reports_use_created_from_instance(self):
# Create an abuse report 400 days in the past. It should be ignored it
# we were calculating from today, but use an AutoApprovalSummary
# instance that is 40 days old, making the abuse report count.
report = AbuseReport.objects.create(addon=self.addon)
report.update(created=self.days_ago(400))
summary = AutoApprovalSummary.objects.create(version=self.version)
summary.update(created=self.days_ago(40))
weight_info = summary.calculate_weight()
assert summary.weight == 10
assert weight_info['abuse_reports'] == 10
def test_calculate_weight_negative_ratings(self):
# Positive rating, does not count.
Rating.objects.create(
user=user_factory(), addon=self.addon, version=self.version,
rating=5)
# Negative rating, but too old, does not count.
old_rating = Rating.objects.create(
user=user_factory(), addon=self.addon, version=self.version,
rating=1)
old_rating.update(created=self.days_ago(370))
# Negative review on a different add-on, does not count either.
extra_addon = addon_factory()
Rating.objects.create(
user=user_factory(), addon=extra_addon,
version=extra_addon.current_version, rating=1)
# Recent negative ratings.
ratings = [Rating(
user=user_factory(), addon=self.addon,
version=self.version, rating=3) for i in range(0, 49)]
Rating.objects.bulk_create(ratings)
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 0 # Not enough negative ratings yet...
assert weight_info['negative_ratings'] == 0
# Create one more to get to weight == 1.
Rating.objects.create(
user=user_factory(), addon=self.addon, version=self.version,
rating=2)
weight_info = summary.calculate_weight()
assert summary.weight == 1
assert weight_info['negative_ratings'] == 1
# Create 5000 more (sorry!) to make sure it's capped at 100.
ratings = [Rating(
user=user_factory(), addon=self.addon,
version=self.version, rating=3) for i in range(0, 5000)]
Rating.objects.bulk_create(ratings)
weight_info = summary.calculate_weight()
assert summary.weight == 100
assert weight_info['negative_ratings'] == 100
def test_calculate_weight_reputation(self):
summary = AutoApprovalSummary(version=self.version)
self.addon.update(reputation=0)
weight_info = summary.calculate_weight()
assert summary.weight == 0
assert weight_info['reputation'] == 0
self.addon.update(reputation=3)
weight_info = summary.calculate_weight()
assert summary.weight == -300
assert weight_info['reputation'] == -300
self.addon.update(reputation=1000)
weight_info = summary.calculate_weight()
assert summary.weight == -300
assert weight_info['reputation'] == -300
self.addon.update(reputation=-1000)
weight_info = summary.calculate_weight()
assert summary.weight == 0
assert weight_info['reputation'] == 0
def test_calculate_weight_average_daily_users(self):
self.addon.update(average_daily_users=142444)
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 14
assert weight_info['average_daily_users'] == 14
self.addon.update(average_daily_users=1756567658)
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 100
assert weight_info['average_daily_users'] == 100
def test_calculate_weight_past_rejection_history(self):
# Old rejected version, does not count.
version_factory(
addon=self.addon,
file_kw={'reviewed': self.days_ago(370),
'status': amo.STATUS_DISABLED})
# Version disabled by the developer, not Mozilla (original_status
# is set to something different than STATUS_NULL), does not count.
version_factory(
addon=self.addon,
file_kw={'reviewed': self.days_ago(15),
'status': amo.STATUS_DISABLED,
'original_status': amo.STATUS_PUBLIC})
# Rejected version.
version_factory(
addon=self.addon,
file_kw={'reviewed': self.days_ago(14),
'status': amo.STATUS_DISABLED})
# Another rejected version, with multiple files. Only counts once.
version_with_multiple_files = version_factory(
addon=self.addon,
file_kw={'reviewed': self.days_ago(13),
'status': amo.STATUS_DISABLED,
'platform': amo.PLATFORM_WIN.id})
file_factory(
reviewed=self.days_ago(13),
version=version_with_multiple_files,
status=amo.STATUS_DISABLED,
platform=amo.PLATFORM_MAC.id)
# Rejected version on a different add-on, does not count.
version_factory(
addon=addon_factory(),
file_kw={'reviewed': self.days_ago(12),
'status': amo.STATUS_DISABLED})
# Approved version, does not count.
new_approved_version = version_factory(
addon=self.addon,
file_kw={'reviewed': self.days_ago(11)})
FileValidation.objects.create(
file=new_approved_version.all_files[0], validation=u'{}')
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 20
assert weight_info['past_rejection_history'] == 20
# Should be capped at 100.
for i in range(0, 10):
version_factory(
addon=self.addon,
file_kw={'reviewed': self.days_ago(10),
'status': amo.STATUS_DISABLED})
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 100
assert weight_info['past_rejection_history'] == 100
def test_calculate_weight_uses_eval_or_document_write(self):
validation_data = {
'messages': [{
'id': ['DANGEROUS_EVAL'],
}]
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 20
assert weight_info['uses_eval_or_document_write'] == 20
validation_data = {
'messages': [{
'id': ['NO_DOCUMENT_WRITE'],
}]
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 20
assert weight_info['uses_eval_or_document_write'] == 20
# Still only 20 if both appear.
validation_data = {
'messages': [{
'id': ['DANGEROUS_EVAL'],
}, {
'id': ['NO_DOCUMENT_WRITE'],
}]
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 20
assert weight_info['uses_eval_or_document_write'] == 20
def test_calculate_weight_uses_implied_eval(self):
validation_data = {
'messages': [{
'id': ['NO_IMPLIED_EVAL'],
}]
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 5
assert weight_info['uses_implied_eval'] == 5
def test_calculate_weight_uses_innerhtml(self):
validation_data = {
'messages': [{
'id': ['UNSAFE_VAR_ASSIGNMENT'],
}]
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 20
assert weight_info['uses_innerhtml'] == 20
def test_calculate_weight_uses_custom_csp(self):
validation_data = {
'messages': [{
'id': ['MANIFEST_CSP'],
}]
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 30
assert weight_info['uses_custom_csp'] == 30
def test_calculate_weight_uses_native_messaging(self):
WebextPermission.objects.create(
file=self.file, permissions=['nativeMessaging'])
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 20
assert weight_info['uses_native_messaging'] == 20
def test_calculate_weight_uses_remote_scripts(self):
validation_data = {
'messages': [{
'id': ['REMOTE_SCRIPT'],
}]
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 40
assert weight_info['uses_remote_scripts'] == 40
def test_calculate_weight_violates_mozilla_conditions_of_use(self):
validation_data = {
'messages': [{
'id': ['MOZILLA_COND_OF_USE'],
}]
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 20
assert weight_info['violates_mozilla_conditions'] == 20
def test_calculate_weight_uses_unknown_minified_code_nothing(self):
validation_data = {
'metadata': {
'unknownMinifiedFiles': [] # Empty list: no weight.
}
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 0
assert weight_info['uses_unknown_minified_code'] == 0
validation_data = {
'metadata': {
# Missing property: no weight.
}
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 0
assert weight_info['uses_unknown_minified_code'] == 0
validation_data = {
# Missing metadata: no weight.
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 0
assert weight_info['uses_unknown_minified_code'] == 0
def test_calculate_weight_uses_unknown_minified_code(self):
validation_data = {
'metadata': {
'unknownMinifiedFiles': ['something']
}
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 10
assert weight_info['uses_unknown_minified_code'] == 10
def test_calculate_size_of_code_changes_no_current_validation(self):
# Delete the validation for the current version and reload the version
# we're testing (otherwise the file validation has already been loaded
# and is still attached to the instance...)
self.current_file_validation.delete()
self.version = Version.objects.get(pk=self.version.pk)
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 200
assert weight_info['no_validation_result'] == 200
def test_calculate_size_of_code_changes_no_new_validation(self):
# Delete the validation for the new version and reload that version.
# (otherwise the file validation has already been loaded and is still
# attached to the instance...)
self.file_validation.delete()
self.version = Version.objects.get(pk=self.version.pk)
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 200
assert weight_info['no_validation_result'] == 200
def test_calculate_size_of_code_changes_no_reported_size(self):
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.calculate_size_of_code_changes() == 0
assert summary.weight == 0
assert weight_info['size_of_code_changes'] == 0
def test_calculate_size_of_code_changes_no_current_version(self):
validation_data = {
'metadata': {
'totalScannedFileSize': 15000,
}
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
assert summary.calculate_size_of_code_changes() == 15000
weight_info = summary.calculate_weight()
assert summary.weight == 3
assert weight_info['size_of_code_changes'] == 3
def test_calculate_size_of_code_changes(self):
old_validation_data = {
'metadata': {
'totalScannedFileSize': 5000,
}
}
self.current_file_validation.update(
validation=json.dumps(old_validation_data))
new_validation_data = {
'metadata': {
'totalScannedFileSize': 15000,
}
}
self.file_validation.update(
validation=json.dumps(new_validation_data))
summary = AutoApprovalSummary(version=self.version)
assert summary.calculate_size_of_code_changes() == 10000
weight_info = summary.calculate_weight()
assert summary.weight == 2
assert weight_info['size_of_code_changes'] == 2
def test_calculate_size_of_code_changes_no_negative(self):
old_validation_data = {
'metadata': {
'totalScannedFileSize': 20000,
}
}
self.current_file_validation.update(
validation=json.dumps(old_validation_data))
new_validation_data = {
'metadata': {
'totalScannedFileSize': 5000,
}
}
self.file_validation.update(
validation=json.dumps(new_validation_data))
summary = AutoApprovalSummary(version=self.version)
assert summary.calculate_size_of_code_changes() == 15000
weight_info = summary.calculate_weight()
assert summary.weight == 3
assert weight_info['size_of_code_changes'] == 3
def test_calculate_size_of_code_changes_max(self):
old_validation_data = {
'metadata': {
'totalScannedFileSize': 50000000,
}
}
self.current_file_validation.update(
validation=json.dumps(old_validation_data))
new_validation_data = {
'metadata': {
'totalScannedFileSize': 0,
}
}
self.file_validation.update(
validation=json.dumps(new_validation_data))
summary = AutoApprovalSummary(version=self.version)
assert summary.calculate_size_of_code_changes() == 50000000
weight_info = summary.calculate_weight()
assert summary.weight == 100
assert weight_info['size_of_code_changes'] == 100
def test_calculate_weight_sum(self):
validation_data = {
'messages': [
{'id': ['MANIFEST_CSP']},
{'id': ['UNSAFE_VAR_ASSIGNMENT']},
{'id': ['NO_IMPLIED_EVAL']},
{'id': ['DANGEROUS_EVAL']},
]
}
self.file_validation.update(validation=json.dumps(validation_data))
summary = AutoApprovalSummary(version=self.version)
weight_info = summary.calculate_weight()
assert summary.weight == 75
expected_result = {
'abuse_reports': 0,
'admin_code_review': 0,
'average_daily_users': 0,
'negative_ratings': 0,
'reputation': 0,
'past_rejection_history': 0,
'uses_custom_csp': 30,
'uses_eval_or_document_write': 20,
'uses_implied_eval': 5,
'uses_innerhtml': 20,
'uses_native_messaging': 0,
'size_of_code_changes': 0,
'uses_remote_scripts': 0,
'uses_unknown_minified_code': 0,
'violates_mozilla_conditions': 0,
}
assert weight_info == expected_result
def test_check_uses_custom_csp(self):
assert AutoApprovalSummary.check_uses_custom_csp(self.version) is False
validation_data = {
'messages': [{
'id': ['MANIFEST_CSP'],
}]
}
self.file_validation.update(validation=json.dumps(validation_data))
assert AutoApprovalSummary.check_uses_custom_csp(self.version) is True
def test_check_uses_custom_csp_file_validation_missing(self):
self.file_validation.delete()
del self.version.all_files
with self.assertRaises(AutoApprovalNoValidationResultError):
AutoApprovalSummary.check_uses_custom_csp(self.version)
# Also happens if only one file is missing validation info.
self.file_validation = FileValidation.objects.create(
file=self.version.all_files[0], validation=u'{}')
del self.version.all_files
file_factory(version=self.version, status=amo.STATUS_AWAITING_REVIEW)
with self.assertRaises(AutoApprovalNoValidationResultError):
AutoApprovalSummary.check_uses_custom_csp(self.version)
def test_check_uses_native_messaging(self):
assert (
AutoApprovalSummary.check_uses_native_messaging(self.version)
is False)
webext_permissions = WebextPermission.objects.create(
file=self.file, permissions=['foobar'])
del self.file.webext_permissions_list
assert (
AutoApprovalSummary.check_uses_native_messaging(self.version)
is False)
webext_permissions.update(permissions=['nativeMessaging', 'foobar'])
del self.file.webext_permissions_list
assert (
AutoApprovalSummary.check_uses_native_messaging(self.version)
is True)
def test_check_is_locked(self):
assert AutoApprovalSummary.check_is_locked(self.version) is False
set_reviewing_cache(self.version.addon.pk, settings.TASK_USER_ID)
assert AutoApprovalSummary.check_is_locked(self.version) is False
set_reviewing_cache(self.version.addon.pk, settings.TASK_USER_ID + 42)
assert AutoApprovalSummary.check_is_locked(self.version) is True
@mock.patch.object(AutoApprovalSummary, 'calculate_weight', spec=True)
@mock.patch.object(AutoApprovalSummary, 'calculate_verdict', spec=True)
def test_create_summary_for_version(
self, calculate_verdict_mock, calculate_weight_mock):
calculate_verdict_mock.return_value = {'dummy_verdict': True}
summary, info = AutoApprovalSummary.create_summary_for_version(
self.version,)
assert calculate_weight_mock.call_count == 1
assert calculate_verdict_mock.call_count == 1
assert calculate_verdict_mock.call_args == ({
'dry_run': False,
},)
assert summary.pk
assert summary.version == self.version
assert info == {'dummy_verdict': True}
@mock.patch.object(AutoApprovalSummary, 'calculate_verdict', spec=True)
def test_create_summary_no_previously_approved_versions(
self, calculate_verdict_mock):
AddonApprovalsCounter.objects.all().delete()
self.version.reload()
calculate_verdict_mock.return_value = {'dummy_verdict': True}
summary, info = AutoApprovalSummary.create_summary_for_version(
self.version)
assert summary.pk
assert info == {'dummy_verdict': True}
def test_create_summary_already_existing(self):
# Create a dummy summary manually, then call the method to create a
# real one. It should have just updated the previous instance.
summary = AutoApprovalSummary.objects.create(
version=self.version, is_locked=True)
assert summary.pk
assert summary.version == self.version
assert summary.verdict == amo.NOT_AUTO_APPROVED
previous_summary_pk = summary.pk
summary, info = AutoApprovalSummary.create_summary_for_version(
self.version)
assert summary.pk == previous_summary_pk
assert summary.version == self.version
assert summary.is_locked is False
assert summary.verdict == amo.AUTO_APPROVED
assert info == {
'is_locked': False,
}
def test_create_summary_no_files(self):
self.file.delete()
del self.version.all_files
with self.assertRaises(AutoApprovalNotEnoughFilesError):
AutoApprovalSummary.create_summary_for_version(self.version)
def test_calculate_verdict_failure_dry_run(self):
summary = AutoApprovalSummary.objects.create(
version=self.version, is_locked=True)
info = summary.calculate_verdict(dry_run=True)
assert info == {
'is_locked': True,
}
assert summary.verdict == amo.WOULD_NOT_HAVE_BEEN_AUTO_APPROVED
def test_calculate_verdict_failure(self):
summary = AutoApprovalSummary.objects.create(
version=self.version, is_locked=True)
info = summary.calculate_verdict()
assert info == {
'is_locked': True,
}
assert summary.verdict == amo.NOT_AUTO_APPROVED
def test_calculate_verdict_success(self):
summary = AutoApprovalSummary.objects.create(version=self.version)
info = summary.calculate_verdict()
assert info == {
'is_locked': False,
}
assert summary.verdict == amo.AUTO_APPROVED
def test_calculate_verdict_success_dry_run(self):
summary = AutoApprovalSummary.objects.create(version=self.version)
info = summary.calculate_verdict(dry_run=True)
assert info == {
'is_locked': False,
}
assert summary.verdict == amo.WOULD_HAVE_BEEN_AUTO_APPROVED
def test_calculate_verdict_post_review(self):
summary = AutoApprovalSummary.objects.create(version=self.version)
info = summary.calculate_verdict()
assert info == {
'is_locked': False,
}
# Regardless of the many flags that are set, it's approved because we
# are in post-review mode.
assert summary.verdict == amo.AUTO_APPROVED
def test_verdict_info_prettifier(self):
verdict_info = {
'is_locked': True,
}
result = list(
AutoApprovalSummary.verdict_info_prettifier(verdict_info))
assert result == [
u'Is locked by a reviewer.',
]
result = list(AutoApprovalSummary.verdict_info_prettifier({}))
assert result == []
class TestWhiteboardWatchChange(TestCase):
def make_addon(self, whiteboard='', **kwargs):
addon = Addon(type=amo.ADDON_EXTENSION, status=amo.STATUS_PUBLIC,
**kwargs)
addon.save()
addon.versions.create(has_info_request=True)
addon.versions.create(has_info_request=False)
addon.versions.create(has_info_request=True)
whiteboard = Whiteboard(pk=addon.pk, public=whiteboard)
whiteboard.save()
return addon
def assert_has_info_set(self, addon):
assert any([v.has_info_request for v in addon.versions.all()])
def assert_has_info_not_set(self, addon):
assert all([not v.has_info_request for v in addon.versions.all()])
def test_has_info_update_whiteboard(self):
"""Test saving with a change to whiteboard clears has_info_request."""
addon = self.make_addon()
self.assert_has_info_set(addon)
addon.whiteboard.public = 'Info about things.'
addon.whiteboard.save()
self.assert_has_info_not_set(addon)
def test_has_info_update_whiteboard_no_change(self):
"""Test saving without a change to whiteboard doesn't clear
has_info_request."""
addon = self.make_addon(whiteboard='Info about things.')
self.assert_has_info_set(addon)
addon.whiteboard.public = 'Info about things.'
addon.whiteboard.save()
self.assert_has_info_set(addon)
def test_has_info_whiteboard_removed(self):
"""Test saving with an empty whiteboard doesn't clear
has_info_request."""
addon = self.make_addon(whiteboard='Info about things.')
self.assert_has_info_set(addon)
addon.whiteboard.public = ''
addon.whiteboard.save()
self.assert_has_info_set(addon)
| tsl143/addons-server | src/olympia/reviewers/tests/test_models.py | Python | bsd-3-clause | 63,568 |
##
# api.py
#
# This file is the workhorse for the the entire web application.
# It implements and provides the API required for the iOS portion
# of the project as well as interacting with Google's datastore
# for persistent storage of our models.
##
# for sending mail
from google.appengine.api import mail
# Used in conjunction with the geomodel library for doing
# proximity based searches
from google.appengine.ext.db import GeoPt
from geo import geotypes
# HttpResponse is what all Django-based views must return
# to render the output. In our web application the
# _json* methods build and return HttpResponse objects
# for rendering JSON dat
from django.http import HttpResponse
# For encoding Python objects into JSON strings
from django.utils import simplejson
# Our datastore models
from model import *
# For handling user sessions
from appengine_utilities.sessions import Session
# Provides the sha1 module we use for hashing passwords
import hashlib
# The Python loggin module. We use the basicConfig method
# to setup to log to the console (or GoogleAppEngineLauncher
# logs screen)
import logging
logging.basicConfig(level=logging.DEBUG)
##
# CONSTANTS
##
"""
The email address to send from. See the Notes section of the README
for more information on what to set this to.
"""
SENDER_EMAIL_ADDRESS = "VALID@APPENGINE_ADDRESS.COM"
##
# UTILITY METHODS
##
def _hash_password(password):
"""
Returns a sha1-hashed version of the given plaintext password.
"""
return hashlib.sha1(password).hexdigest()
def _json_response(success=True, msg="OK", **kwargs):
"""
Helper method to build an HTTPResponse with a stock
JSON object.
@param success=True: indicates success or failure of the API method
@param msg: string with details on success or failure
@kwargs: any number of key/value pairs to be sent with the JSON object
"""
# build up the response data and convert it to a string using the
# simplejson module
response_data = dict(success=success, msg=msg)
response_data.update(kwargs)
response_string = simplejson.dumps(response_data)
# All views must return a valid HttpResponse object so build it and
# set the JSON string and mimetype indicating that the result is
# JSON
return HttpResponse(response_string, mimetype="application/json")
def _json_unauthorized_response(**kwargs):
"""
Helper method to build an HTTPResponse with a stock JSON object
that represents unauthorized access to an API method.
NOTE: Always returns success=false and msg="Unauthorized"
@kwargs: any number of key/value pairs to be sent with the JSON object
"""
# Same process as _json_response method, accept always return false and
# an Unauthorized message with a status code of 401
response_data = dict(success=False, msg="Unauthorized")
response_data.update(kwargs)
response_string = simplejson.dumps(response_data)
return HttpResponse(response_string, status=401, mimetype="application/json")
##
# DECORATORS
#
# For more information about decorators in Python see:
#
# http://www.python.org/dev/peps/pep-0318/
# http://wiki.python.org/moin/PythonDecorators
# http://www.ibm.com/developerworks/linux/library/l-cpdecor.html
# Google...
##
# Usage: @validate_request(method, p1, p2, ...)
def validate_request(method, *params):
"""
Decorator for validating the required request method for an API call as
well as enforcing any required parameters in the request. If either the
method or parameter checks fail a stock failure JSON object is returned
with the exact issue in the msg field. If all checks pass then the
API call proceeds.
"""
def _dec(view_func):
def _view(request, *args, **kwargs):
# check the required method
if request.method == method:
# check that each parameter exists and has a value
for param in params:
value = request.REQUEST.get(param, "")
if not value:
# failed parameter check
return _json_response(success=False,
msg="'%s' is required." % param)
# return the original API call through
return view_func(request, *args, **kwargs)
else:
# failed method check
return _json_response(success=False,
msg="%s requests are not allowed." % request.method)
return _view
return _dec
# Usage: @validate_session()
def validate_session():
"""
Decorator for validating that a user is authenticated by checking the
session for a user object. If this fails the stock json_unauthorized_response
is returned or else the API call is allowed to proceed.
"""
def _dec(view_func):
def _view(request, *args, **kwargs):
# get the session and check for a user, fail if it doesn't exist
if Session().get("user") is None:
# failed request
return _json_unauthorized_response()
# return the original API call through
return view_func(request, *args, **kwargs)
return _view
return _dec
##
# API METHODS
##
@validate_session()
@validate_request("POST", "question", "latitude", "longitude", "pay_key")
def ask(request):
"""
API Method - /ask
Creates a new Question and adds it to the datastore
@method POST
@param question: the text of the question
@param latitude: latitude of the location
@param longitude: longitude of the location
@param pay_key: the pay key from a successful PayPal purchase
@returns stock success or failure JSON response along with
the question and user objects.
"""
# authenticated user
user = Session().get("user")
# required parameters
question = request.REQUEST.get("question")
latitude = float(request.REQUEST.get("latitude"))
longitude = float(request.REQUEST.get("longitude"))
pay_key = request.REQUEST.get("pay_key")
# Using the PayKey you could validate it using PayPal APIs
# to confirm that a user paid and the transaction is complete.
# This is left up to the curious coder to implement :)
# Create the question with the required fields and tie it
# to the authenticated user
q = Question(question=question,
location=GeoPt(latitude, longitude),
user=user)
q.update_location()
q.put()
# return stock JSON with the Question object details
return _json_response(question=q.to_json(), user=user.to_json())
@validate_session()
@validate_request("POST", "question_id", "answer")
def answer(request):
"""
API Method - /answer
Creates a new Answer object and adds it to the datastore. Validates
that the question exists and does not have an accepted answer before
accepting the answer.
This method also takes care of sending the owner of the question
an email saying a new answer has been given with the answer in the
body of the message.
@method POST
@param question_id: id of an existing question
@param answer: the text for the answer to a question
@returns one answer object
"""
# session and authenticated user
user = Session().get("user")
# required parameters
question_id = int(request.REQUEST.get("question_id"))
answer = request.REQUEST.get("answer")
# find the question associated with the question_id parameter
question = Question.get_by_id(question_id)
# no question with the given id
if question is None:
return _json_response(success=False, msg="Question does not exist.")
# question has already been answered
if question.closed:
return _json_response(success=False, msg="Question has an accepted answer and is now closed.")
# create a new answer and save it to the datastore
a = Answer(user=user,
question=question,
answer=answer)
a.put()
# send an email to the owner of the question
question_owner_email = question.user.email
mail.send_mail(sender=SENDER_EMAIL_ADDRESS,
to=question_owner_email,
subject="Your question has a new answer!",
body="""
This is to inform you that one of your questions has
received a new answer.
Your question:
%s
The answer:
%s
Regards,
Inquire Application
""" % (question.question, answer))
# return stock JSON with details of the answer object
return _json_response(answer=a.to_json())
@validate_session()
@validate_request("POST", "answer_id")
def accept(request):
"""
API Method - /accept
Accepts an answer for a question. The question must be owned by the
current authenticated user accepting the question and not already
have an accepted answer.
This method also takes care of sending the owner of the answer
an email saying their answer was accepted. The accepted answer
owner will also be given one karma point.
@method POST
@param answer_id: id of the answer being accepted
@returns stock JSON object
"""
# session and authenticated user
user = Session().get("user")
# required parameters
answer_id = int(request.REQUEST.get("answer_id"))
# find the answer associated with the answer_id
answer = Answer.get_by_id(answer_id)
# no answer with the given id
if answer is None:
return _json_response(success=False, msg="Answer does not exist.")
# associated question
question = answer.question
# make sure the question for this answer is owned by this user
question = answer.question
if question.user.key().id() != user.key().id():
return _json_response(success=False, msg="You must be the owner of the question to accept an answer.")
# also make sure the question is not already answered
if question.closed:
return _json_response(success=False, msg="Question already has an accepted answer.")
# change the accepted flag of the answer and save it.
answer.accepted_answer = True
answer.put()
# close the question and save it
question.closed = True
question.put()
# update the answer owner's karma points
answer.user.karma += 1
answer.user.put()
# send an email to the address assigned to the answer
answer_owner_email = answer.user.email
mail.send_mail(sender=SENDER_EMAIL_ADDRESS,
to=answer_owner_email,
subject="Your answer was accepted!",
body="""
This is to inform you that one of your answers has
been accepted! You have been given one karma point.
The question you answered:
%s
Your answer:
%s
Regards,
Inquire Application
""" % (question.question, answer.answer))
# return stock success JSON
return _json_response()
@validate_session()
@validate_request("GET", "question_id")
def answers(request):
"""
API Method - /answers
Returns a list of answers for a given question id.
@method GET
@param question_id: The question id to retrieve answers for
@returns list of answer objects
"""
# required parameters
question_id = int(request.GET.get("question_id"))
# retrieve the matching question
question = Question.get_by_id(question_id)
if question is None:
return _json_response(success=False,
msg="Question does not exist!")
return _json_response(answers=[a.to_json() for a in question.answer_set])
@validate_session()
@validate_request("GET", "latitude", "longitude")
def questions(request):
"""
API Method - /questions
Returns a list of questions that are within geographical proximity
to the passed in latitude/longitude.
@method GET
@param latitude: latitude of the location
@param longitude longitude of the location
@optional max_results: max number of questions to return, default=25
@optional max_distance: max distance to search in miles
@returns list of question objects
"""
# required parameters
latitude = float(request.GET.get("latitude"))
longitude = float(request.GET.get("longitude"))
# defines the center of our proximity search
# geotypes.Point provided by geomodel project
center = geotypes.Point(latitude, longitude)
# default
max_results = int(request.GET.get("max_results", 25)) # 25 results default
max_distance = int(request.GET.get("max_distance", 50)) # 50 mile default
# convert miles to kilometers
max_distance = 1000*max_distance/0.621371192
# Get all unclosed questions within the proximity max_distance and
# limit to max_results
base_query = Question.all().filter("closed =", False)
questions = Question.proximity_fetch(base_query,
center,
max_results=max_results,
max_distance=max_distance)
return _json_response(questions=[q.to_json() for q in questions])
@validate_request("POST", "email", "password")
def register(request):
"""
API Method - /register
Creates a new user and adds it to the datastore. If a user already
exists with the given email address the request fails and an
appropriate JSON response is returned.
@method POST
@param email: email address for the user
@param password: password for the user
@returns newly created user object or failure JSON
"""
# required parameters
email = request.POST.get("email")
password = request.POST.get("password")
#
users = User.all()
users.filter("email =", email)
if users.count() != 0:
return _json_response(success=False,
msg="Email address already exists.", users=users.count())
password = _hash_password(password)
new_user = User(email=email, password=password)
new_user.put()
return _json_response()
def logout(request):
"""
API Method - /logout
Destroys the active user's session object. Any further use of
protected API methods will require a new session via the auth
API.
@method GET
@returns stock JSON response
"""
# delete session and return stock JSON response with a msg
# indicating the user has logged out
session = Session()
session.delete()
return _json_response(msg="User has been logged out.")
@validate_request("POST", "email", "password")
def auth(request):
"""
API Method - /auth
If credentials are correct a new session is created for this
user which authorizes them to use protected API methods.
@method POST
@param email: user's email address
@param password: user's password
@returns stock JSON response
"""
# required parameters
email = request.POST.get("email")
password = request.POST.get("password")
# hash the password
password = _hash_password(password)
# Look up a User object that matches the email/password
users = User.all()
users \
.filter("email =", email) \
.filter("password =", password)
# No user found, return a failure message
if users.count() == 0:
return _json_response(success=False,
msg="Email or password is invalid.")
# Somehow more than one client with the same user/password have
# been created, which should never happen. Error out here.
if users.count() > 1:
return _json_response(details=None,
success=False,
msg="Internal security error. Contact an administrator")
# Pull the User from the datastore
user = users.get()
# Build a new session object and store the user
session = Session()
session["user"] = user
# return stock JSON with user details
return _json_response(user=user.to_json())
# Utility method for generating random questions around a
# given point. The point is currently Apple's headquarters
# so this works well with testing with the simulator.
def randomize(request):
import random
# location to generate questions around
near_lat, near_lon = 37.331693, -122.030457
# ~50 miles
dx = 50.0/69.0
# Number of questions to generate
num_questions = 10
# Possible users to assign questions to. These
# users will be looked up by the email addresses
# supply in this list and they must exist
email_accounts = ["email1@example.com", "email2@example.com"]
# no more editing
# look up the user objects associated with the
# given email addresses
users = []
for email in email_accounts:
user = User.all().filter("email =", email).get()
if user is not None:
users.append(user)
# return false if there were no user objects found
if not users:
return _json_response(success=False, msg="No users found")
# generate num_questions random questions around the given
# point (near_lat, near_lon) within some distance dx and
# assigning a random user to the question
for i in range(num_questions):
lat = random.uniform(near_lat-dx, near_lat+dx)
lon = random.uniform(near_lon-dx, near_lon+dx)
user = random.sample(users, 1)[0]
q = Question(user=user,
question="Question %d" % i,
location=db.GeoPt(lat, lon))
q.update_location()
q.put()
# return true
return _json_response()
| zaffra/Inquire | GAE/api.py | Python | bsd-3-clause | 17,740 |